cleanup commands
Some checks failed
Deny / audit (push) Has been cancelled

This commit is contained in:
Boog900 2024-12-30 02:13:03 +00:00
parent 647fd09ed4
commit c657b9980d
No known key found for this signature in database
GPG key ID: 42AB1287CB0041C2
8 changed files with 61 additions and 30 deletions

7
Cargo.lock generated
View file

@ -445,6 +445,7 @@ checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
dependencies = [
"anstyle",
"clap_lex",
"strsim",
"terminal_size",
]
@ -3180,6 +3181,12 @@ dependencies = [
"spin",
]
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.26.3"

View file

@ -44,7 +44,7 @@ borsh = { workspace = true }
bytemuck = { workspace = true }
bytes = { workspace = true }
cfg-if = { workspace = true }
clap = { workspace = true, features = ["cargo", "help", "wrap_help"] }
clap = { workspace = true, features = ["cargo", "help", "wrap_help", "usage", "error-context", "suggestions"] }
chrono = { workspace = true }
crypto-bigint = { workspace = true }
crossbeam = { workspace = true }

View file

@ -47,7 +47,7 @@ buffer_bytes = 50_000_000
## The size of the queue of blocks which are waiting for a parent block to be downloaded (bytes).
in_progress_queue_bytes = 50_000_000
## The target size of a batch of blocks (bytes), must not exceed 100MB.
target_batch_bytes= 5_000_000
target_batch_bytes = 15_000_000
## The amount of time between checking the pool of connected peers for free peers to download blocks.
check_client_pool_interval = { secs = 30, nanos = 0 }

View file

@ -3,7 +3,6 @@ use std::{collections::HashMap, sync::Arc};
use futures::StreamExt;
use monero_serai::block::Block;
use tokio::sync::{mpsc, oneshot, Notify};
use tokio_util::sync::CancellationToken;
use tower::{Service, ServiceExt};
use tracing::error;

View file

@ -147,8 +147,6 @@ impl super::BlockchainManager {
/// This function will panic if any internal service returns an unexpected error that we cannot
/// recover from or if the incoming batch contains no blocks.
async fn handle_incoming_block_batch_main_chain(&mut self, batch: BlockBatch) {
let start_height = batch.blocks.first().unwrap().0.number().unwrap();
let batch_prep_res = self
.block_verifier_service
.ready()

View file

@ -1,27 +1,25 @@
use clap::{builder::TypedValueParser, Parser};
use std::io;
use std::io::Stdin;
use std::iter::once;
use std::thread::sleep;
use std::time::Duration;
use std::{io, thread::sleep, time::Duration};
use clap::{builder::TypedValueParser, Parser, ValueEnum};
use tokio::sync::mpsc;
use tracing::level_filters::LevelFilter;
// strip out usage
const PARSER_TEMPLATE: &str = "\
{all-args}
";
// strip out name/version
const APPLET_TEMPLATE: &str = "\
{about-with-newline}\n\
{all-args}\
";
const PARSER_TEMPLATE: &str = "{all-args}";
/// A command received from [`io::stdin`].
#[derive(Debug, Parser)]
#[command(multicall = true, subcommand_required = true, rename_all = "snake_case", help_template = PARSER_TEMPLATE, arg_required_else_help = true, disable_help_flag = true)]
#[command(
multicall = true,
subcommand_required = true,
rename_all = "snake_case",
help_template = PARSER_TEMPLATE,
arg_required_else_help = true,
disable_help_flag = true
)]
pub enum Command {
/// Change the log output.
#[command(arg_required_else_help = true, help_template = APPLET_TEMPLATE)]
#[command(arg_required_else_help = true)]
SetLog {
/// The minimum log level that will be displayed.
#[arg(
@ -29,13 +27,25 @@ pub enum Command {
value_parser = clap::builder::PossibleValuesParser::new(["off", "trace", "debug", "info", "warn", "error"])
.map(|s| s.parse::<LevelFilter>().unwrap()),
)]
level: LevelFilter,
level: Option<LevelFilter>,
/// The logging output target to change.
#[arg(value_enum, default_value_t = OutputTarget::Stdout)]
output_target: OutputTarget,
},
/// Print status information on `cuprated`.
#[command(help_template = APPLET_TEMPLATE)]
Status,
}
/// The log output target.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum OutputTarget {
/// The stdout logging output.
Stdout,
/// The file appender logging output.
File,
}
/// The [`Command`] listener loop.
pub fn command_listener(incoming_commands: mpsc::Sender<Command>) -> ! {
let mut stdin = io::stdin();
let mut line = String::new();

View file

@ -50,7 +50,7 @@ impl Default for BlockDownloaderConfig {
buffer_bytes: 50_000_000,
in_progress_queue_bytes: 50_000_000,
check_client_pool_interval: Duration::from_secs(30),
target_batch_bytes: 5_000_000,
target_batch_bytes: 15_000_000,
}
}
}

View file

@ -26,7 +26,12 @@ use cuprate_consensus_context::{
};
use cuprate_helper::time::secs_to_hms;
use crate::{commands::Command, config::Config, constants::PANIC_CRITICAL_SERVICE_ERROR};
use crate::{
commands::{Command, OutputTarget},
config::Config,
constants::PANIC_CRITICAL_SERVICE_ERROR,
logging::CupratedTracingFilter,
};
mod blockchain;
mod commands;
@ -135,6 +140,7 @@ fn main() {
fn init_tokio_rt(config: &Config) -> tokio::runtime::Runtime {
tokio::runtime::Builder::new_multi_thread()
.worker_threads(config.tokio.threads)
.thread_name("cuprated-tokio")
.enable_all()
.build()
.unwrap()
@ -144,6 +150,7 @@ fn init_tokio_rt(config: &Config) -> tokio::runtime::Runtime {
fn init_global_rayon_pool(config: &Config) {
rayon::ThreadPoolBuilder::new()
.num_threads(config.rayon.threads)
.thread_name(|index| format!("cuprated-rayon-{}", index))
.build_global()
.unwrap()
}
@ -155,11 +162,21 @@ async fn io_loop(
) -> ! {
while let Some(command) = incoming_commands.recv().await {
match command {
Command::SetLog { level } => {
logging::modify_stdout_output(|filter| {
filter.level = level;
Command::SetLog {
level,
output_target,
} => {
let modify_output = |filter: &mut CupratedTracingFilter| {
if let Some(level) = level {
filter.level = level;
}
println!("NEW LOG FILTER: {filter}");
});
};
match output_target {
OutputTarget::File => logging::modify_file_output(modify_output),
OutputTarget::Stdout => logging::modify_stdout_output(modify_output),
}
}
Command::Status => {
let BlockChainContextResponse::Context(blockchain_context) = context_service