From ae10bef0ee93df723361fdf1c182b414d988e9c8 Mon Sep 17 00:00:00 2001 From: julian Date: Sun, 15 Dec 2024 20:15:33 -0600 Subject: [PATCH] fix: updated spark data calls and caching --- lib/db/sqlite/firo_cache_coordinator.dart | 23 +++++++++++++++++------ lib/db/sqlite/firo_cache_reader.dart | 6 +++--- lib/electrumx_rpc/electrumx_client.dart | 10 +++++++--- 3 files changed, 27 insertions(+), 12 deletions(-) diff --git a/lib/db/sqlite/firo_cache_coordinator.dart b/lib/db/sqlite/firo_cache_coordinator.dart index 81da6134b..45fa4c62e 100644 --- a/lib/db/sqlite/firo_cache_coordinator.dart +++ b/lib/db/sqlite/firo_cache_coordinator.dart @@ -47,9 +47,18 @@ abstract class FiroCacheCoordinator { final setMetaSize = (await setMetaCacheFile.exists()) ? await setMetaCacheFile.length() : 0; - print("TAG SIZE: $tagsSize"); - print("SET SIZE: $setSize"); - print("SET META SIZE: $setMetaSize"); + Logging.instance.log( + "Spark cache used tags size: $tagsSize", + level: LogLevel.Debug, + ); + Logging.instance.log( + "Spark cache anon set size: $setSize", + level: LogLevel.Debug, + ); + Logging.instance.log( + "Spark cache set meta size: $setMetaSize", + level: LogLevel.Debug, + ); final int bytes = tagsSize + setSize + setMetaSize; @@ -112,7 +121,7 @@ abstract class FiroCacheCoordinator { startBlockHash: blockHash.toHexReversedFromBase64, ); } else { - const sectorSize = 100; // TODO adjust this? + const sectorSize = 2000; // TODO adjust this? final prevMetaSize = await FiroCacheCoordinator.getSparkMetaSetSizeForGroupId( groupId, @@ -139,10 +148,11 @@ abstract class FiroCacheCoordinator { final start = (i * sectorSize) + prevSize; final data = await client.getSparkAnonymitySetBySector( coinGroupId: groupId, - latestBlock: meta.blockHash, + latestBlock: meta.blockHash.toHexReversedFromBase64, startIndex: start, endIndex: start + sectorSize, ); + progressUpdated.call(start + sectorSize, meta.size); coins.addAll(data); } @@ -150,10 +160,11 @@ abstract class FiroCacheCoordinator { if (remainder > 0) { final data = await client.getSparkAnonymitySetBySector( coinGroupId: groupId, - latestBlock: meta.blockHash, + latestBlock: meta.blockHash.toHexReversedFromBase64, startIndex: meta.size - remainder, endIndex: meta.size, ); + progressUpdated.call(meta.size, meta.size); coins.addAll(data); } diff --git a/lib/db/sqlite/firo_cache_reader.dart b/lib/db/sqlite/firo_cache_reader.dart index 11e6382f8..b27cd77e3 100644 --- a/lib/db/sqlite/firo_cache_reader.dart +++ b/lib/db/sqlite/firo_cache_reader.dart @@ -63,9 +63,9 @@ abstract class _Reader { required Database db, }) async { final query = """ - SELECT ss.size - FROM PreviousMetaFetchResult ss - WHERE ss.groupId = $groupId; + SELECT size + FROM PreviousMetaFetchResult + WHERE coinGroupId = $groupId; """; return db.select("$query;"); diff --git a/lib/electrumx_rpc/electrumx_client.dart b/lib/electrumx_rpc/electrumx_client.dart index f8196388a..6737e7b4b 100644 --- a/lib/electrumx_rpc/electrumx_client.dart +++ b/lib/electrumx_rpc/electrumx_client.dart @@ -1193,8 +1193,12 @@ class ElectrumXClient { required int coinGroupId, }) async { try { - const command = - "spark.getsparkanonyumitysetmeta"; // TODO verify this will be correct + const command = "spark.getsparkanonymitysetmeta"; + Logging.instance.log( + "[${getElectrumAdapter()?.host}] => attempting to fetch $command...", + level: LogLevel.Info, + ); + final start = DateTime.now(); final response = await request( requestID: requestID, @@ -1238,7 +1242,7 @@ class ElectrumXClient { }) async { try { const command = - "spark.getsparkanonyumitysetsector"; // TODO verify this will be correct + "spark.getsparkanonymitysetsector"; // TODO verify this will be correct final start = DateTime.now(); final response = await request( requestID: requestID,