Added global backends hashrate to "GET /2/backends" endpoint.

This commit is contained in:
XMRig 2019-09-20 14:15:35 +07:00
parent ed11c0a6da
commit 40e8bfe443
5 changed files with 51 additions and 23 deletions

View file

@ -23,10 +23,10 @@
*/
#include <assert.h>
#include <cassert>
#include <cmath>
#include <memory.h>
#include <stdio.h>
#include <cstdio>
#include "backend/common/Hashrate.h"
@ -133,8 +133,8 @@ double xmrig::Hashrate::calc(size_t threadId, size_t ms) const
return nan("");
}
const double hashes = static_cast<double>(lastestHashCnt - earliestHashCount);
const double time = static_cast<double>(lastestStamp - earliestStamp) / 1000.0;
const auto hashes = static_cast<double>(lastestHashCnt - earliestHashCount);
const auto time = static_cast<double>(lastestStamp - earliestStamp) / 1000.0;
return hashes / time;
}
@ -175,3 +175,33 @@ rapidjson::Value xmrig::Hashrate::normalize(double d)
return Value(floor(d * 100.0) / 100.0);
}
#ifdef XMRIG_FEATURE_API
rapidjson::Value xmrig::Hashrate::toJSON(rapidjson::Document &doc) const
{
using namespace rapidjson;
auto &allocator = doc.GetAllocator();
Value out(kArrayType);
out.PushBack(normalize(calc(ShortInterval)), allocator);
out.PushBack(normalize(calc(MediumInterval)), allocator);
out.PushBack(normalize(calc(LargeInterval)), allocator);
return out;
}
rapidjson::Value xmrig::Hashrate::toJSON(size_t threadId, rapidjson::Document &doc) const
{
using namespace rapidjson;
auto &allocator = doc.GetAllocator();
Value out(kArrayType);
out.PushBack(normalize(calc(threadId, ShortInterval)), allocator);
out.PushBack(normalize(calc(threadId, MediumInterval)), allocator);
out.PushBack(normalize(calc(threadId, LargeInterval)), allocator);
return out;
}
#endif

View file

@ -26,10 +26,11 @@
#define XMRIG_HASHRATE_H
#include <stddef.h>
#include <stdint.h>
#include <cstddef>
#include <cstdint>
#include "base/tools/Object.h"
#include "rapidjson/fwd.h"
@ -39,6 +40,8 @@ namespace xmrig {
class Hashrate
{
public:
XMRIG_DISABLE_COPY_MOVE_DEFAULT(Hashrate)
enum Intervals {
ShortInterval = 10000,
MediumInterval = 60000,
@ -58,6 +61,11 @@ public:
static const char *format(double h, char *buf, size_t size);
static rapidjson::Value normalize(double d);
# ifdef XMRIG_FEATURE_API
rapidjson::Value toJSON(rapidjson::Document &doc) const;
rapidjson::Value toJSON(size_t threadId, rapidjson::Document &doc) const;
# endif
private:
constexpr static size_t kBucketSize = 2 << 11;
constexpr static size_t kBucketMask = kBucketSize - 1;

View file

@ -405,8 +405,9 @@ rapidjson::Value xmrig::CpuBackend::toJSON(rapidjson::Document &doc) const
return out;
}
out.AddMember("hashrate", hashrate()->toJSON(doc), allocator);
Value threads(kArrayType);
const Hashrate *hr = hashrate();
size_t i = 0;
for (const CpuLaunchData &data : d_ptr->threads) {
@ -414,15 +415,9 @@ rapidjson::Value xmrig::CpuBackend::toJSON(rapidjson::Document &doc) const
thread.AddMember("intensity", data.intensity, allocator);
thread.AddMember("affinity", data.affinity, allocator);
thread.AddMember("av", data.av(), allocator);
Value hashrate(kArrayType);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::ShortInterval)), allocator);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::MediumInterval)), allocator);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::LargeInterval)), allocator);
thread.AddMember("hashrate", hashrate()->toJSON(i, doc), allocator);
i++;
thread.AddMember("hashrate", hashrate, allocator);
threads.PushBack(thread, allocator);
}

View file

@ -391,22 +391,17 @@ rapidjson::Value xmrig::OclBackend::toJSON(rapidjson::Document &doc) const
return out;
}
out.AddMember("hashrate", hashrate()->toJSON(doc), allocator);
Value threads(kArrayType);
const Hashrate *hr = hashrate();
size_t i = 0;
for (const OclLaunchData &data : d_ptr->threads) {
Value thread = data.thread.toJSON(doc);
thread.AddMember("affinity", data.affinity, allocator);
Value hashrate(kArrayType);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::ShortInterval)), allocator);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::MediumInterval)), allocator);
hashrate.PushBack(Hashrate::normalize(hr->calc(i, Hashrate::LargeInterval)), allocator);
thread.AddMember("hashrate", hashrate()->toJSON(i, doc), allocator);
i++;
thread.AddMember("hashrate", hashrate, allocator);
threads.PushBack(thread, allocator);
}

View file

@ -561,7 +561,7 @@ cl_int xmrig::OclLib::release(cl_program program) noexcept
return CL_SUCCESS;
}
LOG_REFS("%p %u ~program %s", program, getUint(program, CL_PROGRAM_REFERENCE_COUNT), getString(program, CL_PROGRAM_KERNEL_NAMES).data()); // FIXME
LOG_REFS("%p %u ~program %s", program, getUint(program, CL_PROGRAM_REFERENCE_COUNT), getString(program, CL_PROGRAM_KERNEL_NAMES).data());
const cl_int ret = pReleaseProgram(program);
if (ret != CL_SUCCESS) {