fix: updated spark data calls and caching

This commit is contained in:
julian 2024-12-15 20:15:33 -06:00 committed by julian-CStack
parent d6aec00b58
commit ae10bef0ee
3 changed files with 27 additions and 12 deletions

View file

@ -47,9 +47,18 @@ abstract class FiroCacheCoordinator {
final setMetaSize = final setMetaSize =
(await setMetaCacheFile.exists()) ? await setMetaCacheFile.length() : 0; (await setMetaCacheFile.exists()) ? await setMetaCacheFile.length() : 0;
print("TAG SIZE: $tagsSize"); Logging.instance.log(
print("SET SIZE: $setSize"); "Spark cache used tags size: $tagsSize",
print("SET META SIZE: $setMetaSize"); level: LogLevel.Debug,
);
Logging.instance.log(
"Spark cache anon set size: $setSize",
level: LogLevel.Debug,
);
Logging.instance.log(
"Spark cache set meta size: $setMetaSize",
level: LogLevel.Debug,
);
final int bytes = tagsSize + setSize + setMetaSize; final int bytes = tagsSize + setSize + setMetaSize;
@ -112,7 +121,7 @@ abstract class FiroCacheCoordinator {
startBlockHash: blockHash.toHexReversedFromBase64, startBlockHash: blockHash.toHexReversedFromBase64,
); );
} else { } else {
const sectorSize = 100; // TODO adjust this? const sectorSize = 2000; // TODO adjust this?
final prevMetaSize = final prevMetaSize =
await FiroCacheCoordinator.getSparkMetaSetSizeForGroupId( await FiroCacheCoordinator.getSparkMetaSetSizeForGroupId(
groupId, groupId,
@ -139,10 +148,11 @@ abstract class FiroCacheCoordinator {
final start = (i * sectorSize) + prevSize; final start = (i * sectorSize) + prevSize;
final data = await client.getSparkAnonymitySetBySector( final data = await client.getSparkAnonymitySetBySector(
coinGroupId: groupId, coinGroupId: groupId,
latestBlock: meta.blockHash, latestBlock: meta.blockHash.toHexReversedFromBase64,
startIndex: start, startIndex: start,
endIndex: start + sectorSize, endIndex: start + sectorSize,
); );
progressUpdated.call(start + sectorSize, meta.size);
coins.addAll(data); coins.addAll(data);
} }
@ -150,10 +160,11 @@ abstract class FiroCacheCoordinator {
if (remainder > 0) { if (remainder > 0) {
final data = await client.getSparkAnonymitySetBySector( final data = await client.getSparkAnonymitySetBySector(
coinGroupId: groupId, coinGroupId: groupId,
latestBlock: meta.blockHash, latestBlock: meta.blockHash.toHexReversedFromBase64,
startIndex: meta.size - remainder, startIndex: meta.size - remainder,
endIndex: meta.size, endIndex: meta.size,
); );
progressUpdated.call(meta.size, meta.size);
coins.addAll(data); coins.addAll(data);
} }

View file

@ -63,9 +63,9 @@ abstract class _Reader {
required Database db, required Database db,
}) async { }) async {
final query = """ final query = """
SELECT ss.size SELECT size
FROM PreviousMetaFetchResult ss FROM PreviousMetaFetchResult
WHERE ss.groupId = $groupId; WHERE coinGroupId = $groupId;
"""; """;
return db.select("$query;"); return db.select("$query;");

View file

@ -1193,8 +1193,12 @@ class ElectrumXClient {
required int coinGroupId, required int coinGroupId,
}) async { }) async {
try { try {
const command = const command = "spark.getsparkanonymitysetmeta";
"spark.getsparkanonyumitysetmeta"; // TODO verify this will be correct Logging.instance.log(
"[${getElectrumAdapter()?.host}] => attempting to fetch $command...",
level: LogLevel.Info,
);
final start = DateTime.now(); final start = DateTime.now();
final response = await request( final response = await request(
requestID: requestID, requestID: requestID,
@ -1238,7 +1242,7 @@ class ElectrumXClient {
}) async { }) async {
try { try {
const command = const command =
"spark.getsparkanonyumitysetsector"; // TODO verify this will be correct "spark.getsparkanonymitysetsector"; // TODO verify this will be correct
final start = DateTime.now(); final start = DateTime.now();
final response = await request( final response = await request(
requestID: requestID, requestID: requestID,