Added more integrity checks

This commit is contained in:
SChernykh 2021-09-04 09:10:44 +02:00
parent c75bc83fc1
commit 1554a44f5c
2 changed files with 46 additions and 1 deletions

View file

@ -911,7 +911,21 @@ uint32_t BlockTemplate::get_hashing_blobs(uint32_t extra_nonce_start, uint32_t c
for (uint32_t i = 0; i < count; ++i) {
uint8_t blob[128];
blob_size = get_hashing_blob_nolock(extra_nonce_start + i, blob);
const uint32_t n = get_hashing_blob_nolock(extra_nonce_start + i, blob);
if (n > sizeof(blob)) {
LOGERR(1, "internal error: get_hashing_blob_nolock returned too large blob size " << n << ", expected <= " << sizeof(blob));
}
else if (n < 76) {
LOGERR(1, "internal error: get_hashing_blob_nolock returned too little blob size " << n << ", expected >= 76");
}
if (blob_size == 0) {
blob_size = n;
}
else if (n != blob_size) {
LOGERR(1, "internal error: get_hashing_blob_nolock returned different blob size " << n << ", expected " << blob_size);
}
blobs.insert(blobs.end(), blob, blob + blob_size);
}

View file

@ -111,6 +111,37 @@ void StratumServer::on_block(const BlockTemplate& block)
m_extraNonce.exchange(blobs_data->m_numClientsExpected);
blobs_data->m_blobSize = block.get_hashing_blobs(0, blobs_data->m_numClientsExpected, blobs_data->m_blobs, blobs_data->m_height, difficulty, sidechain_difficulty, blobs_data->m_seedHash, nonce_offset, blobs_data->m_templateId);
// Integrity checks
if (blobs_data->m_blobSize < 76) {
LOGERR(1, "internal error: get_hashing_blobs returned too small blobs (" << blobs_data->m_blobSize << " bytes)");
}
else if (blobs_data->m_blobs.size() != blobs_data->m_blobSize * num_connections) {
LOGERR(1, "internal error: get_hashing_blobs returned wrong amount of data");
}
else if (num_connections > 1) {
std::vector<uint64_t> blob_hashes;
blob_hashes.reserve(num_connections);
const uint8_t* data = blobs_data->m_blobs.data();
const size_t size = blobs_data->m_blobSize;
// Get first 8 bytes of the Merkle root hash from each blob
for (size_t i = 0; i < num_connections; ++i) {
blob_hashes.emplace_back(*reinterpret_cast<const uint64_t*>(data + i * size + 43));
}
// Find duplicates
std::sort(blob_hashes.begin(), blob_hashes.end());
for (uint32_t i = 1; i < num_connections; ++i) {
if (blob_hashes[i - 1] == blob_hashes[i]) {
LOGERR(1, "internal error: get_hashing_blobs returned two identical blobs");
break;
}
}
}
blobs_data->m_target = std::max(difficulty.target(), sidechain_difficulty.target());
{