Fixed Merkle proof generation for aux chains

This commit is contained in:
SChernykh 2023-12-10 19:24:05 +01:00
parent 38417b0fd5
commit 658d756120
9 changed files with 58 additions and 91 deletions

View file

@ -1307,32 +1307,50 @@ std::vector<AuxChainData> BlockTemplate::get_aux_chains(const uint32_t template_
return m_poolBlockTemplate->m_auxChains;
}
bool BlockTemplate::get_aux_proof(const uint32_t template_id, const hash& h, std::vector<hash>& proof) const
bool BlockTemplate::get_aux_proof(const uint32_t template_id, uint32_t extra_nonce, const hash& h, std::vector<hash>& proof) const
{
ReadLock lock(m_lock);
if (template_id != m_templateId) {
const BlockTemplate* old = m_oldTemplates[template_id % array_size(&BlockTemplate::m_oldTemplates)];
if (old && (template_id == old->m_templateId)) {
return old->get_aux_proof(template_id, h, proof);
return old->get_aux_proof(template_id, extra_nonce, h, proof);
}
return false;
}
std::vector<std::pair<bool, hash>> t;
if (!get_merkle_proof(m_poolBlockTemplate->m_merkleTree, h, t)) {
bool found = false;
const hash sidechain_id = calc_sidechain_hash(extra_nonce);
const uint32_t n_aux_chains = static_cast<uint32_t>(m_poolBlockTemplate->m_auxChains.size() + 1);
std::vector<hash> hashes(n_aux_chains);
for (const AuxChainData& aux_data : m_poolBlockTemplate->m_auxChains) {
const uint32_t aux_slot = get_aux_slot(aux_data.unique_id, m_poolBlockTemplate->m_auxNonce, n_aux_chains);
hashes[aux_slot] = aux_data.data;
if (aux_data.data == h) {
found = true;
}
}
const uint32_t aux_slot = get_aux_slot(m_sidechain->consensus_hash(), m_poolBlockTemplate->m_auxNonce, n_aux_chains);
hashes[aux_slot] = sidechain_id;
if (sidechain_id == h) {
found = true;
}
if (!found) {
return false;
}
proof.clear();
proof.reserve(proof.size());
std::vector<std::vector<hash>> tree;
merkle_hash_full_tree(hashes, tree);
for (const auto& k : t) {
proof.emplace_back(k.second);
}
return true;
return get_merkle_proof(tree, h, proof);
}
std::vector<uint8_t> BlockTemplate::get_block_template_blob(uint32_t template_id, uint32_t sidechain_extra_nonce, size_t& nonce_offset, size_t& extra_nonce_offset, size_t& merkle_root_offset, hash& merge_mining_root) const
@ -1465,16 +1483,10 @@ void BlockTemplate::init_merge_mining_merkle_proof()
}
}
merkle_hash_full_tree(hashes, m_poolBlockTemplate->m_merkleTree);
std::vector<std::vector<hash>> tree;
merkle_hash_full_tree(hashes, tree);
std::vector<std::pair<bool, hash>> proof;
get_merkle_proof(m_poolBlockTemplate->m_merkleTree, m_poolBlockTemplate->m_sidechainId, proof);
m_poolBlockTemplate->m_merkleProof.reserve(proof.size());
for (const auto& p : proof) {
m_poolBlockTemplate->m_merkleProof.push_back(p.second);
}
get_merkle_proof(tree, m_poolBlockTemplate->m_sidechainId, m_poolBlockTemplate->m_merkleProof);
}
} // namespace p2pool

View file

@ -49,7 +49,7 @@ public:
uint32_t get_hashing_blobs(uint32_t extra_nonce_start, uint32_t count, std::vector<uint8_t>& blobs, uint64_t& height, difficulty_type& difficulty, difficulty_type& aux_diff, difficulty_type& sidechain_difficulty, hash& seed_hash, size_t& nonce_offset, uint32_t& template_id) const;
std::vector<AuxChainData> get_aux_chains(const uint32_t template_id) const;
bool get_aux_proof(const uint32_t template_id, const hash& h, std::vector<hash>& proof) const;
bool get_aux_proof(const uint32_t template_id, uint32_t extra_nonce, const hash& h, std::vector<hash>& proof) const;
std::vector<uint8_t> get_block_template_blob(uint32_t template_id, uint32_t sidechain_extra_nonce, size_t& nonce_offset, size_t& extra_nonce_offset, size_t& merkle_root_offset, hash& merge_mining_root) const;

View file

@ -313,8 +313,8 @@ void MergeMiningClient::merge_mining_submit_solution(const std::vector<uint8_t>&
bool MergeMiningClient::parse_merge_mining_submit_solution(const char* data, size_t size)
{
auto err = [](const char* msg) {
LOGWARN(1, "merge_mining_submit_solution RPC call failed: " << msg);
auto err = [this](const char* msg) {
LOGWARN(1, "merge_mining_submit_solution to " << m_host << ':' << m_port << " failed: " << msg);
return false;
};
@ -339,7 +339,7 @@ bool MergeMiningClient::parse_merge_mining_submit_solution(const char* data, siz
}
const char* status = result["status"].GetString();
LOGINFO(0, log::LightGreen() << "merge_mining_submit_solution: " << status);
LOGINFO(0, log::LightGreen() << "merge_mining_submit_solution to " << m_host << ':' << m_port << ": " << status);
// Get new mining job
on_timer();

View file

@ -117,7 +117,7 @@ void merkle_hash_full_tree(const std::vector<hash>& hashes, std::vector<std::vec
}
}
bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h, std::vector<std::pair<bool, hash>>& proof)
bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h, std::vector<hash>& proof)
{
if (tree.empty()) {
return false;
@ -142,7 +142,7 @@ bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h,
return true;
}
else if (count == 2) {
proof.emplace_back(index != 0, hashes[index ^ 1]);
proof.emplace_back(hashes[index ^ 1]);
}
else {
size_t cnt = 1;
@ -157,7 +157,7 @@ bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h,
if (j >= count) {
return false;
}
proof.emplace_back((index & 1) != 0, hashes[j]);
proof.emplace_back(hashes[j]);
index = (index >> 1) + k;
}
@ -168,32 +168,13 @@ bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h,
if ((i >= n) || (j >= tree[i].size())) {
return false;
}
proof.emplace_back((index & 1) != 0, tree[i][j]);
proof.emplace_back(tree[i][j]);
}
}
return true;
}
bool verify_merkle_proof(hash h, const std::vector<std::pair<bool, hash>>& proof, const hash& root)
{
hash tmp[2];
for (size_t i = 0, n = proof.size(); i < n; ++i) {
if (proof[i].first) {
tmp[0] = proof[i].second;
tmp[1] = h;
}
else {
tmp[0] = h;
tmp[1] = proof[i].second;
}
keccak(tmp[0].h, HASH_SIZE * 2, h.h);
}
return (h == root);
}
hash get_root_from_proof(hash h, const std::vector<hash>& proof, size_t index, size_t count)
{
if (count == 1) {

View file

@ -22,8 +22,7 @@ namespace p2pool {
void merkle_hash(const std::vector<hash>& hashes, hash& root);
void merkle_hash_full_tree(const std::vector<hash>& hashes, std::vector<std::vector<hash>>& tree);
bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h, std::vector<std::pair<bool, hash>>& proof);
bool verify_merkle_proof(hash h, const std::vector<std::pair<bool, hash>>& proof, const hash& root);
bool get_merkle_proof(const std::vector<std::vector<hash>>& tree, const hash& h, std::vector<hash>& proof);
hash get_root_from_proof(hash h, const std::vector<hash>& proof, size_t index, size_t count);
bool verify_merkle_proof(hash h, const std::vector<hash>& proof, size_t index, size_t count, const hash& root);

View file

@ -348,29 +348,6 @@ void p2pool::handle_miner_data(MinerData& data)
update_aux_data(hash());
// TODO: remove after testing
#if 0
{
data.aux_chains.clear();
data.aux_chains.resize(10);
std::vector<hash> tmp(11);
uint8_t id[] = "aux0";
uint8_t aux_data[] = "data0";
for (int i = 0; i < 10; ++i, ++id[sizeof(id) - 2], ++aux_data[sizeof(aux_data) - 2]) {
keccak(id, sizeof(id) - 1, tmp[i].h);
data.aux_chains[i].unique_id = tmp[i];
keccak(aux_data, sizeof(aux_data) - 1, data.aux_chains[i].data.h);
}
tmp[10] = m_sideChain->consensus_hash();
find_aux_nonce(tmp, data.aux_nonce);
}
#endif
data.tx_backlog.clear();
data.time_received = std::chrono::high_resolution_clock::now();
{
@ -651,12 +628,25 @@ void p2pool::submit_aux_block(const hash& chain_id, uint32_t template_id, uint32
for (MergeMiningClient* c : m_mergeMiningClients) {
if (chain_id == c->aux_id()) {
std::vector<hash> proof;
if (m_blockTemplate->get_aux_proof(template_id, c->aux_data(), proof)) {
const hash aux_hash = c->aux_data();
if (m_blockTemplate->get_aux_proof(template_id, extra_nonce, aux_hash, proof)) {
if (pool_block_debug()) {
const MinerData data = miner_data();
const uint32_t n_aux_chains = static_cast<uint32_t>(data.aux_chains.size() + 1);
const uint32_t index = get_aux_slot(c->aux_id(), data.aux_nonce, n_aux_chains);
if (!verify_merkle_proof(aux_hash, proof, index, n_aux_chains, merge_mining_root)) {
LOGERR(0, "submit_aux_block: verify_merkle_proof failed for chain_id " << chain_id);
}
}
c->merge_mining_submit_solution(blob, proof);
}
else {
LOGWARN(3, "submit_aux_block: failed to get merkle proof");
LOGWARN(3, "submit_aux_block: failed to get merkle proof for chain_id " << chain_id);
}
return;
}
}

View file

@ -101,7 +101,6 @@ PoolBlock& PoolBlock::operator=(const PoolBlock& b)
m_sidechainHeight = b.m_sidechainHeight;
m_difficulty = b.m_difficulty;
m_cumulativeDifficulty = b.m_cumulativeDifficulty;
m_merkleTree = b.m_merkleTree;
m_merkleProof = b.m_merkleProof;
memcpy(m_sidechainExtraBuf, b.m_sidechainExtraBuf, sizeof(m_sidechainExtraBuf));
m_sidechainId = b.m_sidechainId;
@ -296,9 +295,6 @@ void PoolBlock::reset_offchain_data()
m_auxChains.shrink_to_fit();
m_auxNonce = 0;
m_merkleTree.clear();
m_merkleTree.shrink_to_fit();
}
bool PoolBlock::get_pow_hash(RandomX_Hasher_Base* hasher, uint64_t height, const hash& seed_hash, hash& pow_hash, bool force_light_mode)

View file

@ -151,8 +151,6 @@ struct PoolBlock
std::vector<AuxChainData> m_auxChains;
uint32_t m_auxNonce;
std::vector<std::vector<hash>> m_merkleTree;
std::vector<uint8_t> serialize_mainchain_data(size_t* header_size = nullptr, size_t* miner_tx_size = nullptr, int* outputs_offset = nullptr, int* outputs_blob_size = nullptr, const uint32_t* nonce = nullptr, const uint32_t* extra_nonce = nullptr) const;
std::vector<uint8_t> serialize_sidechain_data() const;

View file

@ -73,19 +73,10 @@ TEST(merkle, tree)
for (size_t i = 0, n = hashes.size(); i < n; ++i) {
const hash& h = hashes[i];
std::vector<std::pair<bool, hash>> proof;
std::vector<hash> proof;
ASSERT_TRUE(get_merkle_proof(tree, h, proof));
ASSERT_TRUE(verify_merkle_proof(h, proof, root));
std::vector<hash> proof2;
proof2.reserve(proof.size());
for (const auto& p : proof) {
proof2.emplace_back(p.second);
}
ASSERT_TRUE(verify_merkle_proof(h, proof2, i, n, root));
ASSERT_TRUE(verify_merkle_proof(h, proof, i, n, root));
}
};