add new used tags call that includes txids

This commit is contained in:
julian 2024-06-10 11:24:05 -06:00
parent b87105773c
commit e53aa98363
5 changed files with 78 additions and 43 deletions

View file

@ -30,7 +30,7 @@ void _debugLog(Object? object) {
abstract class _FiroCache {
static const int _setCacheVersion = 1;
static const int _tagsCacheVersion = 1;
static const int _tagsCacheVersion = 2;
static const String sparkSetCacheFileName =
"spark_set_v$_setCacheVersion.sqlite3";
static const String sparkUsedTagsCacheFileName =
@ -154,7 +154,8 @@ abstract class _FiroCache {
"""
CREATE TABLE SparkUsedCoinTags (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
tag TEXT NOT NULL UNIQUE
tag TEXT NOT NULL UNIQUE,
txid TEXT NOT NULL
);
""",
);

View file

@ -51,7 +51,7 @@ abstract class FiroCacheCoordinator {
ElectrumXClient client,
) async {
final count = await FiroCacheCoordinator.getUsedCoinTagsCount();
final unhashedTags = await client.getSparkUnhashedUsedCoinsTags(
final unhashedTags = await client.getSparkUnhashedUsedCoinsTagsWithTxHashes(
startNumber: count,
);
if (unhashedTags.isNotEmpty) {

View file

@ -100,7 +100,7 @@ class _FiroCacheWorker {
case FCFuncName._updateSparkUsedTagsWith:
result = _updateSparkUsedTagsWith(
usedTagsCacheDb,
task.data as List<String>,
task.data as List<List<dynamic>>,
);
break;
}

View file

@ -15,10 +15,12 @@ class FCResult {
/// returns true if successful, otherwise some exception
FCResult _updateSparkUsedTagsWith(
Database db,
List<String> tags,
List<List<dynamic>> tags,
) {
// hash the tags here since this function is called in a background isolate
final hashedTags = LibSpark.hashTags(base64Tags: tags);
final hashedTags = LibSpark.hashTags(
base64Tags: tags.map((e) => e[0] as String),
);
if (hashedTags.isEmpty) {
// nothing to add, return early
@ -27,13 +29,13 @@ FCResult _updateSparkUsedTagsWith(
db.execute("BEGIN;");
try {
for (final tag in hashedTags) {
for (int i = 0; i < hashedTags.length; i++) {
db.execute(
"""
INSERT OR IGNORE INTO SparkUsedCoinTags (tag)
VALUES (?);
INSERT OR IGNORE INTO SparkUsedCoinTags (tag, txid)
VALUES (?, ?);
""",
[tag],
[hashedTags[i], (tags[i][1] as String).toHexReversedFromBase64],
);
}

View file

@ -932,41 +932,41 @@ class ElectrumXClient {
}
}
// TODO: update when we get new call to include tx hashes in response
/// NOT USED. See [getSparkUnhashedUsedCoinsTagsWithTxHashes]
/// Takes [startNumber], if it is 0, we get the full set,
/// otherwise the used tags after that number
Future<List<String>> getSparkUnhashedUsedCoinsTags({
String? requestID,
required int startNumber,
}) async {
try {
final start = DateTime.now();
await _checkElectrumAdapter();
final Map<String, dynamic> response =
await (getElectrumAdapter() as FiroElectrumClient)
.getUsedCoinsTags(startNumber: startNumber);
// TODO: Add 2 minute timeout.
// Why 2 minutes?
Logging.instance.log(
"Fetching spark.getusedcoinstags finished",
level: LogLevel.Info,
);
final map = Map<String, dynamic>.from(response);
final tags = List<String>.from(map["tags"] as List);
Logging.instance.log(
"Finished ElectrumXClient.getSparkUnhashedUsedCoinsTags(startNumber"
"=$startNumber). # of tags fetched=${tags.length}, "
"Duration=${DateTime.now().difference(start)}",
level: LogLevel.Info,
);
return tags;
} catch (e) {
Logging.instance.log(e, level: LogLevel.Error);
rethrow;
}
}
// Future<List<String>> getSparkUnhashedUsedCoinsTags({
// String? requestID,
// required int startNumber,
// }) async {
// try {
// final start = DateTime.now();
// await _checkElectrumAdapter();
// final Map<String, dynamic> response =
// await (getElectrumAdapter() as FiroElectrumClient)
// .getUsedCoinsTags(startNumber: startNumber);
// // TODO: Add 2 minute timeout.
// // Why 2 minutes?
// Logging.instance.log(
// "Fetching spark.getusedcoinstags finished",
// level: LogLevel.Info,
// );
// final map = Map<String, dynamic>.from(response);
// final tags = List<String>.from(map["tags"] as List);
//
// Logging.instance.log(
// "Finished ElectrumXClient.getSparkUnhashedUsedCoinsTags(startNumber"
// "=$startNumber). # of tags fetched=${tags.length}, "
// "Duration=${DateTime.now().difference(start)}",
// level: LogLevel.Info,
// );
//
// return tags;
// } catch (e) {
// Logging.instance.log(e, level: LogLevel.Error);
// rethrow;
// }
// }
/// Takes a list of [sparkCoinHashes] and returns the set id and block height
/// for each coin
@ -1085,6 +1085,38 @@ class ElectrumXClient {
}
}
/// Takes [startNumber], if it is 0, we get the full set,
/// otherwise the used tags and txids after that number
Future<List<List<dynamic>>> getSparkUnhashedUsedCoinsTagsWithTxHashes({
String? requestID,
required int startNumber,
}) async {
try {
final start = DateTime.now();
final response = await request(
requestID: requestID,
command: "spark.getusedcoinstagstxhashes",
args: [
"$startNumber",
],
);
final map = Map<String, dynamic>.from(response as Map);
final tags = List<List<dynamic>>.from(map["tagsandtxids"] as List);
Logging.instance.log(
"Finished ElectrumXClient.getSparkUnhashedUsedCoinsTagsWithTxHashes("
"startNumber=$startNumber). # of tags fetched=${tags.length}, "
"Duration=${DateTime.now().difference(start)}",
level: LogLevel.Info,
);
return tags;
} catch (e) {
Logging.instance.log(e, level: LogLevel.Error);
rethrow;
}
}
// ===========================================================================
/// Get the current fee rate.