From cf3a4f5f6e068073d2c5e2927aa1aa5a2899eef0 Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sun, 13 Apr 2025 13:38:08 -0300 Subject: [PATCH 1/7] feat: introduce and create a proof of concept for the tracing lib This commit introduces tracing, tracing_subscriber and tracing_appender libs. The objective is replace the current logging in-house module, for a more extensible architecture which supports the same features as the current logging method, but which is simpler to build, maintain, and well known in the community. Signed-off-by: Lorenzo Bertin Salvador --- Cargo.lock | 210 +++++++++++++++++++++++++++++++++----- Cargo.toml | 3 + src/main.rs | 11 ++ src/monitoring.rs | 22 ++++ src/monitoring/logging.rs | 104 +++++++++++++++++++ 5 files changed, 323 insertions(+), 27 deletions(-) create mode 100644 src/monitoring.rs create mode 100644 src/monitoring/logging.rs diff --git a/Cargo.lock b/Cargo.lock index 1e19419..b052a27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 4 +version = 3 [[package]] name = "addr2line" @@ -178,9 +178,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.17" +version = "1.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" +checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362" dependencies = [ "shlex", ] @@ -207,9 +207,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.35" +version = "4.5.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944" +checksum = "2df961d8c8a0d08aa9945718ccf584145eee3f3aa06cddbeac12933781102e04" dependencies = [ "clap_builder", "clap_derive", @@ -217,9 +217,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.35" +version = "4.5.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9" +checksum = "132dbda40fb6753878316a489d5a1242a8ef2f0d9e47ba01c951ea8aa7d013a5" dependencies = [ "anstream", "anstyle", @@ -307,6 +307,21 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crossterm" version = "0.28.1" @@ -317,7 +332,7 @@ dependencies = [ "crossterm_winapi", "mio", "parking_lot", - "rustix", + "rustix 0.38.44", "signal-hook", "signal-hook-mio", "winapi", @@ -367,6 +382,15 @@ dependencies = [ "syn", ] +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", +] + [[package]] name = "derive-getters" version = "0.5.0" @@ -404,9 +428,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", "windows-sys 0.59.0", @@ -424,12 +448,12 @@ dependencies = [ [[package]] name = "flate2" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" +checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", - "miniz_oxide 0.8.5", + "miniz_oxide 0.8.8", ] [[package]] @@ -605,6 +629,12 @@ version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + [[package]] name = "lock_api" version = "0.4.12" @@ -653,9 +683,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] @@ -708,6 +738,22 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-traits" version = "0.2.19" @@ -732,6 +778,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "owo-colors" version = "3.5.0" @@ -785,6 +837,9 @@ dependencies = [ "serde-xml-rs", "serde_json", "thiserror 2.0.12", + "tracing", + "tracing-appender", + "tracing-subscriber", "ureq", "which", ] @@ -801,6 +856,12 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "predicates" version = "3.1.3" @@ -881,9 +942,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" +checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" dependencies = [ "bitflags", ] @@ -946,15 +1007,28 @@ dependencies = [ "bitflags", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustix" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.9.4", "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.23.25" +version = "0.23.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "822ee9188ac4ec04a2f0531e55d035fb2de73f18b41a63c70c2712503b6fb13c" +checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" dependencies = [ "log", "once_cell", @@ -1106,9 +1180,9 @@ checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" [[package]] name = "smallvec" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" [[package]] name = "static_assertions" @@ -1217,6 +1291,37 @@ dependencies = [ "once_cell", ] +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + [[package]] name = "tracing" version = "0.1.41" @@ -1224,9 +1329,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.69", + "time", + "tracing-subscriber", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tracing-core" version = "0.1.33" @@ -1247,15 +1376,42 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + [[package]] name = "tracing-subscriber" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ + "nu-ansi-term", + "serde", + "serde_json", "sharded-slab", + "smallvec", "thread_local", "tracing-core", + "tracing-log", + "tracing-serde", ] [[package]] @@ -1422,13 +1578,13 @@ dependencies = [ [[package]] name = "which" -version = "7.0.2" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2774c861e1f072b3aadc02f8ba886c26ad6321567ecc294c935434cad06f1283" +checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" dependencies = [ "either", "env_home", - "rustix", + "rustix 1.0.5", "winsafe", ] @@ -1603,9 +1759,9 @@ checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" [[package]] name = "xml-rs" -version = "0.8.25" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4" +checksum = "a62ce76d9b56901b19a74f19431b0d8b3bc7ca4ad685a746dfd78ca8f4fc6bda" [[package]] name = "zeroize" diff --git a/Cargo.toml b/Cargo.toml index 23f9511..6390c97 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,9 @@ regex = "1.11.1" serde = { version = "1.0.219", features = ["derive"] } serde-xml-rs = "0.6.0" serde_json = "1.0.140" +tracing = "0.1" +tracing-appender = "0.2.3" +tracing-subscriber = { version = "0.3.19", features = ["fmt", "std", "json"] } thiserror = "2.0.12" clap = { version = "4.5.35", features = ["derive"] } chrono = "0.4.40" diff --git a/src/main.rs b/src/main.rs index 34612f6..d18e769 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,14 +5,22 @@ use app::{config::Config, logging::Logger}; use clap::Parser; use cli::Cli; use handler::run_app; +use monitoring::{init_monitoring, InitMonitoringProduct}; +use tracing::{event, Level}; mod app; mod cli; mod handler; +mod monitoring; mod ui; mod utils; fn main() -> color_eyre::Result<()> { + // file writer guards should be propagated to main() so the logging thread lives enough + let InitMonitoringProduct { + file_writer_guards: _file_writer_guards, + } = init_monitoring(); + let args = Cli::parse(); utils::install_hooks()?; @@ -32,6 +40,9 @@ fn main() -> color_eyre::Result<()> { utils::restore()?; Logger::info("patch-hub finished"); + // event! usage example as an alternative for Logger module + event!(Level::INFO, "patch-hub finished"); + Logger::flush(); Ok(()) diff --git a/src/monitoring.rs b/src/monitoring.rs new file mode 100644 index 0000000..49805d9 --- /dev/null +++ b/src/monitoring.rs @@ -0,0 +1,22 @@ +use logging::{init_logging_layer, InitLoggingLayerProduct}; +use tracing_appender::non_blocking::WorkerGuard; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +pub mod logging; + +pub struct InitMonitoringProduct { + pub file_writer_guards: Vec, +} + +pub fn init_monitoring() -> InitMonitoringProduct { + let InitLoggingLayerProduct { + logging_layer, + file_writer_guards, + } = init_logging_layer(); + + // for future telemetry monitoring, we should just have to add another .with() in the registry + // with the new telemetry layer + tracing_subscriber::registry().with(logging_layer).init(); + + InitMonitoringProduct { file_writer_guards } +} diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs new file mode 100644 index 0000000..bc76632 --- /dev/null +++ b/src/monitoring/logging.rs @@ -0,0 +1,104 @@ +use std::{ + fs::File, + io::Write, + path::Path, + sync::{Arc, Mutex}, +}; + +use tracing_appender::non_blocking::WorkerGuard; +use tracing_subscriber::{Layer, Registry}; + +const PATH_HUB_TARGET: &str = "patch_hub"; +const LATEST_LOG_FILENAME: &str = "latest.log"; +const TEMP_LOG_DIR: &str = "/tmp/temporary-patch-hub-logs"; + +pub struct InitLoggingLayerProduct { + pub logging_layer: Box + Send + Sync>, + pub file_writer_guards: Vec, +} +struct InitLoggingFileWritersProduct { + multi_file_writer: MultiFileWriter, + file_writer_guards: Vec, +} + +#[derive(Clone)] +pub struct MultiFileWriter { + writers: Vec>>, +} + +pub fn init_logging_layer() -> InitLoggingLayerProduct { + let InitLoggingFileWritersProduct { + multi_file_writer, + file_writer_guards, + } = init_logging_file_writers(); + + let filter_patch_hub_target = + tracing_subscriber::filter::filter_fn(|metadata| metadata.target() == PATH_HUB_TARGET); + + let logging_layer = Box::new( + tracing_subscriber::fmt::layer() + .with_writer(move || multi_file_writer.clone()) + .with_file(true) + .with_line_number(true) + .with_timer(tracing_subscriber::fmt::time::SystemTime) + .json() + .with_filter(filter_patch_hub_target), + ); + + InitLoggingLayerProduct { + logging_layer, + file_writer_guards, + } +} + +fn init_logging_file_writers() -> InitLoggingFileWritersProduct { + let timestamp_log_file_name = format!( + "patch-hub_{}.log", + chrono::Local::now().format("%Y%m%d-%H%M%S") + ); + + // we have to truncate the file so if the file already exists we overwrite its content + let latest_log_path = Path::new(TEMP_LOG_DIR).join(LATEST_LOG_FILENAME); + let _ = File::create(&latest_log_path); + + let timestamp_log_file_appender = + tracing_appender::rolling::never(TEMP_LOG_DIR, timestamp_log_file_name); + let latest_log_file_appender = + tracing_appender::rolling::never(TEMP_LOG_DIR, LATEST_LOG_FILENAME); + + // logging thread should be non-blocking so it does not interfere with the rest of the application + let (timestamp_log_writer, timestamp_log_writer_guard) = + tracing_appender::non_blocking(timestamp_log_file_appender); + let (latest_log_writer, latest_log_writer_guard) = + tracing_appender::non_blocking(latest_log_file_appender); + + InitLoggingFileWritersProduct { + multi_file_writer: MultiFileWriter::new(vec![ + Arc::new(Mutex::new(timestamp_log_writer)), + Arc::new(Mutex::new(latest_log_writer)), + ]), + file_writer_guards: vec![timestamp_log_writer_guard, latest_log_writer_guard], + } +} + +impl MultiFileWriter { + pub fn new(writers: Vec>>) -> Self { + Self { writers } + } +} + +impl Write for MultiFileWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + for writer in &self.writers { + let _ = writer.lock().expect("to get lock").write_all(buf); + } + Ok(buf.len()) + } + + fn flush(&mut self) -> std::io::Result<()> { + for writer in &self.writers { + writer.lock().expect("to get lock").flush()?; + } + Ok(()) + } +} From 163c34bd526b12f7e7f57d2ba023404d98f112b6 Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sun, 13 Apr 2025 18:31:59 -0300 Subject: [PATCH 2/7] feat: use config log path and deal with logs before its initialization This commit changes the new logging logic so logs are stored in the path defined at patch-hub's config. Since logging should encompass the whole application lifecycle, we have to correctly deal with logs produced before/during Config initialization. To solve this firstly we save the logs to a temporary path, and then, after Config intitialization we move the existing logs to the path defined at the Config and reload the logging layer so new logs are stored in the updated path. Signed-off-by: Lorenzo Bertin Salvador --- src/main.rs | 12 +- src/monitoring.rs | 38 +++- src/monitoring/logging.rs | 105 ++++------- .../logging/multi_log_file_writer.rs | 171 ++++++++++++++++++ 4 files changed, 254 insertions(+), 72 deletions(-) create mode 100644 src/monitoring/logging/multi_log_file_writer.rs diff --git a/src/main.rs b/src/main.rs index d18e769..1692804 100644 --- a/src/main.rs +++ b/src/main.rs @@ -18,7 +18,10 @@ mod utils; fn main() -> color_eyre::Result<()> { // file writer guards should be propagated to main() so the logging thread lives enough let InitMonitoringProduct { - file_writer_guards: _file_writer_guards, + logging_guards_by_file_name, + mut multi_log_file_writer, + logging_reload_handle, + .. } = init_monitoring(); let args = Cli::parse(); @@ -29,6 +32,13 @@ fn main() -> color_eyre::Result<()> { let config = Config::build(); config.create_dirs(); + // with the config we can update log directory + let _guards = multi_log_file_writer.update_log_writer_with_config( + &config, + logging_guards_by_file_name, + logging_reload_handle, + ); + match args.resolve(terminal, &config) { ControlFlow::Break(b) => return b, ControlFlow::Continue(t) => terminal = t, diff --git a/src/monitoring.rs b/src/monitoring.rs index 49805d9..ef8059f 100644 --- a/src/monitoring.rs +++ b/src/monitoring.rs @@ -1,22 +1,48 @@ -use logging::{init_logging_layer, InitLoggingLayerProduct}; +use std::collections::HashMap; + +use logging::{ + init_logging_layer, multi_log_file_writer::MultiLogFileWriter, InitLoggingLayerProduct, +}; +use tracing::level_filters::LevelFilter; use tracing_appender::non_blocking::WorkerGuard; -use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; +use tracing_subscriber::{ + filter::Targets, layer::SubscriberExt, reload::Handle, util::SubscriberInitExt, Layer, Registry, +}; pub mod logging; +pub const PATH_HUB_TARGET: &str = "patch_hub"; + pub struct InitMonitoringProduct { - pub file_writer_guards: Vec, + pub multi_log_file_writer: MultiLogFileWriter, + pub logging_guards_by_file_name: HashMap, + pub logging_reload_handle: Handle + Send + Sync>, Registry>, } pub fn init_monitoring() -> InitMonitoringProduct { let InitLoggingLayerProduct { logging_layer, - file_writer_guards, + multi_log_file_writer, + guards_by_file_name: logging_guards_by_file_name, + reload_handle: logging_reload_handle, } = init_logging_layer(); + // the filter is separate from the logging layer because of a lib limitation: https://github.com/tokio-rs/tracing/issues/1629 + // otherwise we could not reload the logging layer after the logging dir is updated + let filter_patch_hub_target = Targets::new() + .with_target(PATH_HUB_TARGET, LevelFilter::TRACE) + .with_default(LevelFilter::OFF); + // for future telemetry monitoring, we should just have to add another .with() in the registry // with the new telemetry layer - tracing_subscriber::registry().with(logging_layer).init(); + tracing_subscriber::registry() + .with(logging_layer) + .with(filter_patch_hub_target) + .init(); - InitMonitoringProduct { file_writer_guards } + InitMonitoringProduct { + multi_log_file_writer, + logging_guards_by_file_name, + logging_reload_handle, + } } diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs index bc76632..23ee01a 100644 --- a/src/monitoring/logging.rs +++ b/src/monitoring/logging.rs @@ -1,104 +1,79 @@ use std::{ - fs::File, - io::Write, - path::Path, + collections::HashMap, sync::{Arc, Mutex}, }; +use multi_log_file_writer::{create_non_blocking_writer, get_fmt_layer, MultiLogFileWriter}; use tracing_appender::non_blocking::WorkerGuard; -use tracing_subscriber::{Layer, Registry}; +use tracing_subscriber::{reload::Handle, Layer, Registry}; -const PATH_HUB_TARGET: &str = "patch_hub"; -const LATEST_LOG_FILENAME: &str = "latest.log"; +pub mod multi_log_file_writer; + +const LATEST_LOG_FILENAME: &str = "latest.tracing.log"; const TEMP_LOG_DIR: &str = "/tmp/temporary-patch-hub-logs"; pub struct InitLoggingLayerProduct { pub logging_layer: Box + Send + Sync>, - pub file_writer_guards: Vec, + pub multi_log_file_writer: MultiLogFileWriter, + pub guards_by_file_name: HashMap, + pub reload_handle: Handle + Send + Sync>, Registry>, } struct InitLoggingFileWritersProduct { - multi_file_writer: MultiFileWriter, - file_writer_guards: Vec, -} - -#[derive(Clone)] -pub struct MultiFileWriter { - writers: Vec>>, + multi_log_file_writer: MultiLogFileWriter, + guards_by_file_name: HashMap, } pub fn init_logging_layer() -> InitLoggingLayerProduct { let InitLoggingFileWritersProduct { - multi_file_writer, - file_writer_guards, + multi_log_file_writer, + guards_by_file_name, } = init_logging_file_writers(); - let filter_patch_hub_target = - tracing_subscriber::filter::filter_fn(|metadata| metadata.target() == PATH_HUB_TARGET); + let fmt_layer = get_fmt_layer(multi_log_file_writer.clone()); - let logging_layer = Box::new( - tracing_subscriber::fmt::layer() - .with_writer(move || multi_file_writer.clone()) - .with_file(true) - .with_line_number(true) - .with_timer(tracing_subscriber::fmt::time::SystemTime) - .json() - .with_filter(filter_patch_hub_target), - ); + let (reload_layer, reload_handle) = tracing_subscriber::reload::Layer::new(fmt_layer); InitLoggingLayerProduct { - logging_layer, - file_writer_guards, + logging_layer: Box::new(reload_layer), + multi_log_file_writer, + guards_by_file_name, + reload_handle, } } fn init_logging_file_writers() -> InitLoggingFileWritersProduct { let timestamp_log_file_name = format!( - "patch-hub_{}.log", + "patch-hub_{}.tracing.log", chrono::Local::now().format("%Y%m%d-%H%M%S") ); - // we have to truncate the file so if the file already exists we overwrite its content - let latest_log_path = Path::new(TEMP_LOG_DIR).join(LATEST_LOG_FILENAME); - let _ = File::create(&latest_log_path); - - let timestamp_log_file_appender = - tracing_appender::rolling::never(TEMP_LOG_DIR, timestamp_log_file_name); - let latest_log_file_appender = - tracing_appender::rolling::never(TEMP_LOG_DIR, LATEST_LOG_FILENAME); - // logging thread should be non-blocking so it does not interfere with the rest of the application let (timestamp_log_writer, timestamp_log_writer_guard) = - tracing_appender::non_blocking(timestamp_log_file_appender); + create_non_blocking_writer(TEMP_LOG_DIR, timestamp_log_file_name.as_str()); let (latest_log_writer, latest_log_writer_guard) = - tracing_appender::non_blocking(latest_log_file_appender); + create_non_blocking_writer(TEMP_LOG_DIR, LATEST_LOG_FILENAME); - InitLoggingFileWritersProduct { - multi_file_writer: MultiFileWriter::new(vec![ + let writer_by_file_name = HashMap::from([ + ( + timestamp_log_file_name.clone(), Arc::new(Mutex::new(timestamp_log_writer)), + ), + ( + LATEST_LOG_FILENAME.to_string(), Arc::new(Mutex::new(latest_log_writer)), - ]), - file_writer_guards: vec![timestamp_log_writer_guard, latest_log_writer_guard], - } -} + ), + ]); -impl MultiFileWriter { - pub fn new(writers: Vec>>) -> Self { - Self { writers } - } -} + let guards_by_file_name = HashMap::from([ + (timestamp_log_file_name, timestamp_log_writer_guard), + (LATEST_LOG_FILENAME.to_string(), latest_log_writer_guard), + ]); -impl Write for MultiFileWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - for writer in &self.writers { - let _ = writer.lock().expect("to get lock").write_all(buf); - } - Ok(buf.len()) - } - - fn flush(&mut self) -> std::io::Result<()> { - for writer in &self.writers { - writer.lock().expect("to get lock").flush()?; - } - Ok(()) + InitLoggingFileWritersProduct { + multi_log_file_writer: MultiLogFileWriter::new( + TEMP_LOG_DIR.to_string(), + writer_by_file_name, + ), + guards_by_file_name, } } diff --git a/src/monitoring/logging/multi_log_file_writer.rs b/src/monitoring/logging/multi_log_file_writer.rs new file mode 100644 index 0000000..a65255f --- /dev/null +++ b/src/monitoring/logging/multi_log_file_writer.rs @@ -0,0 +1,171 @@ +use std::{ + collections::HashMap, + fs::{File, OpenOptions}, + io::Write, + path::Path, + sync::{Arc, Mutex}, +}; + +use tracing::{event, Level}; +use tracing_appender::non_blocking::{NonBlocking, WorkerGuard}; +use tracing_subscriber::{reload::Handle, Layer, Registry}; + +use crate::app::config::Config; + +#[derive(Clone)] +pub struct MultiLogFileWriter { + directory: String, + writer_by_file_name: HashMap>>, +} + +impl MultiLogFileWriter { + pub fn new( + directory: String, + writer_by_file_name: HashMap>>, + ) -> Self { + Self { + writer_by_file_name, + directory, + } + } + + pub fn update_log_writer_with_config( + &mut self, + config: &Config, + current_guards_by_file_name: HashMap, + reload_handle: Handle + Send + Sync>, Registry>, + ) -> Vec { + let new_log_directory = config.logs_path(); + let guards = self.update_logging_dir( + new_log_directory, + current_guards_by_file_name, + reload_handle, + ); + + event!( + Level::INFO, + "Updated log file directory to: {}", + new_log_directory + ); + guards + } + + pub fn update_logging_dir( + &mut self, + new_directory: &str, + mut current_guards_by_file_name: HashMap, + reload_handle: Handle + Send + Sync>, Registry>, + ) -> Vec { + event!(Level::INFO, "Starting log directory update..."); + let mut new_guards = vec![]; + let old_directory = self.directory.clone(); + + for (file_name, current_writer) in self.writer_by_file_name.iter() { + let old_log_file_path = format!("{}/{}", old_directory, file_name); + let new_log_file_path = format!("{}/{}", new_directory, file_name); + + // first we drop current corresponding guard + if let Some(current_guard) = current_guards_by_file_name.remove(file_name) { + drop(current_guard); + // making sure we'll flush everything by the time we copy old file contents + std::thread::sleep(std::time::Duration::from_millis(50)); + } + + let (file_writer, file_writer_guard) = + create_non_blocking_writer(new_directory, file_name.as_str()); + + Self::copy_old_logs_to_new_path(old_log_file_path, new_log_file_path); + + // then we update the writers + let mut mutex_guard = current_writer.lock().expect("to get lock"); + *mutex_guard = file_writer; + + new_guards.push(file_writer_guard); + } + + // reloading layer + reload_handle + .reload(get_fmt_layer(self.clone())) + .expect("Failed reloading logging layer"); + + self.directory = new_directory.to_string(); + + new_guards + } + + fn copy_old_logs_to_new_path(old_log_file_path: String, new_log_file_path: String) { + let Ok(mut old_log_file_content) = File::open(&old_log_file_path) else { + event!( + Level::ERROR, + "Could not open old log file: {}", + old_log_file_path + ); + return; + }; + if let Some(parent_dir) = Path::new(&new_log_file_path).parent() { + // Create new log dir if it doesn't exist + std::fs::create_dir_all(parent_dir).expect("to create dir"); + } + let Ok(mut new_log_file_content) = OpenOptions::new() + .create(true) + .append(true) + .open(&new_log_file_path) + else { + event!( + Level::ERROR, + "Could not open new log file: {}", + new_log_file_path + ); + return; + }; + + let copy_result = std::io::copy(&mut old_log_file_content, &mut new_log_file_content); + if let Err(err) = copy_result { + event!(Level::ERROR, "Could not copy old file logs: {}", err); + }; + } +} + +impl Write for MultiLogFileWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + for writer in self.writer_by_file_name.values() { + writer + .lock() + .expect("to get lock") + .write_all(buf) + .expect("to write"); + } + Ok(buf.len()) + } + + fn flush(&mut self) -> std::io::Result<()> { + for writer in self.writer_by_file_name.values() { + writer + .lock() + .expect("to get lock") + .flush() + .expect("to flush"); + } + Ok(()) + } +} + +pub fn get_fmt_layer(writer: MultiLogFileWriter) -> Box + Send + Sync> { + tracing_subscriber::fmt::layer() + .with_writer(move || writer.clone()) + .with_file(true) + .with_line_number(true) + .with_timer(tracing_subscriber::fmt::time::SystemTime) + .json() + .boxed() +} + +pub fn create_non_blocking_writer(directory: &str, file_name: &str) -> (NonBlocking, WorkerGuard) { + // we have to truncate the file so if the file already exists we overwrite its content + // this is particularly important for the latest log desired behavior + let log_path = Path::new(directory).join(file_name); + let _ = File::create(&log_path); + + let file_appender = tracing_appender::rolling::never(directory, file_name); + tracing_appender::non_blocking(file_appender) +} From 0ee020a23d64d50b3963a9cace39c118dab517a6 Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sat, 19 Apr 2025 11:29:30 -0300 Subject: [PATCH 3/7] THIS COMMIT SHOULD BE DROPPED This commit is just a way of testing the log behavior regarding logs created before and after config initialization Signed-off-by: Lorenzo Bertin Salvador --- src/main.rs | 14 ++++++++++++++ src/monitoring/logging.rs | 2 +- src/monitoring/logging/multi_log_file_writer.rs | 4 ++-- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/src/main.rs b/src/main.rs index 1692804..52e13c0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -16,6 +16,11 @@ mod ui; mod utils; fn main() -> color_eyre::Result<()> { + event!( + Level::INFO, + "log before logging initialization (should not appear anywhere)" + ); + // file writer guards should be propagated to main() so the logging thread lives enough let InitMonitoringProduct { logging_guards_by_file_name, @@ -24,6 +29,8 @@ fn main() -> color_eyre::Result<()> { .. } = init_monitoring(); + event!(Level::INFO, "log before config initialization"); + let args = Cli::parse(); utils::install_hooks()?; @@ -32,6 +39,11 @@ fn main() -> color_eyre::Result<()> { let config = Config::build(); config.create_dirs(); + event!( + Level::INFO, + "log after config initialization but before logging layer reload" + ); + // with the config we can update log directory let _guards = multi_log_file_writer.update_log_writer_with_config( &config, @@ -39,6 +51,8 @@ fn main() -> color_eyre::Result<()> { logging_reload_handle, ); + event!(Level::INFO, "log after logging layer reload"); + match args.resolve(terminal, &config) { ControlFlow::Break(b) => return b, ControlFlow::Continue(t) => terminal = t, diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs index 23ee01a..2fccb58 100644 --- a/src/monitoring/logging.rs +++ b/src/monitoring/logging.rs @@ -10,7 +10,7 @@ use tracing_subscriber::{reload::Handle, Layer, Registry}; pub mod multi_log_file_writer; const LATEST_LOG_FILENAME: &str = "latest.tracing.log"; -const TEMP_LOG_DIR: &str = "/tmp/temporary-patch-hub-logs"; +const TEMP_LOG_DIR: &str = "./temporary-logs-test/temporary-patch-hub-logs/"; pub struct InitLoggingLayerProduct { pub logging_layer: Box + Send + Sync>, diff --git a/src/monitoring/logging/multi_log_file_writer.rs b/src/monitoring/logging/multi_log_file_writer.rs index a65255f..358a711 100644 --- a/src/monitoring/logging/multi_log_file_writer.rs +++ b/src/monitoring/logging/multi_log_file_writer.rs @@ -31,11 +31,11 @@ impl MultiLogFileWriter { pub fn update_log_writer_with_config( &mut self, - config: &Config, + _config: &Config, current_guards_by_file_name: HashMap, reload_handle: Handle + Send + Sync>, Registry>, ) -> Vec { - let new_log_directory = config.logs_path(); + let new_log_directory = "./temporary-logs-test/config-dir-logs-test/"; let guards = self.update_logging_dir( new_log_directory, current_guards_by_file_name, From 7979ddade3d214e1ade399ef99dd4ebaf9deb060 Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sat, 19 Apr 2025 11:30:26 -0300 Subject: [PATCH 4/7] Revert "THIS COMMIT SHOULD BE DROPPED". THIS COMMIT ALSO SHOULD BE DROPPED This reverts commit 948f13d0be0770509fdcdc62bf89a70dede6db3f. --- src/main.rs | 14 -------------- src/monitoring/logging.rs | 2 +- src/monitoring/logging/multi_log_file_writer.rs | 4 ++-- 3 files changed, 3 insertions(+), 17 deletions(-) diff --git a/src/main.rs b/src/main.rs index 52e13c0..1692804 100644 --- a/src/main.rs +++ b/src/main.rs @@ -16,11 +16,6 @@ mod ui; mod utils; fn main() -> color_eyre::Result<()> { - event!( - Level::INFO, - "log before logging initialization (should not appear anywhere)" - ); - // file writer guards should be propagated to main() so the logging thread lives enough let InitMonitoringProduct { logging_guards_by_file_name, @@ -29,8 +24,6 @@ fn main() -> color_eyre::Result<()> { .. } = init_monitoring(); - event!(Level::INFO, "log before config initialization"); - let args = Cli::parse(); utils::install_hooks()?; @@ -39,11 +32,6 @@ fn main() -> color_eyre::Result<()> { let config = Config::build(); config.create_dirs(); - event!( - Level::INFO, - "log after config initialization but before logging layer reload" - ); - // with the config we can update log directory let _guards = multi_log_file_writer.update_log_writer_with_config( &config, @@ -51,8 +39,6 @@ fn main() -> color_eyre::Result<()> { logging_reload_handle, ); - event!(Level::INFO, "log after logging layer reload"); - match args.resolve(terminal, &config) { ControlFlow::Break(b) => return b, ControlFlow::Continue(t) => terminal = t, diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs index 2fccb58..23ee01a 100644 --- a/src/monitoring/logging.rs +++ b/src/monitoring/logging.rs @@ -10,7 +10,7 @@ use tracing_subscriber::{reload::Handle, Layer, Registry}; pub mod multi_log_file_writer; const LATEST_LOG_FILENAME: &str = "latest.tracing.log"; -const TEMP_LOG_DIR: &str = "./temporary-logs-test/temporary-patch-hub-logs/"; +const TEMP_LOG_DIR: &str = "/tmp/temporary-patch-hub-logs"; pub struct InitLoggingLayerProduct { pub logging_layer: Box + Send + Sync>, diff --git a/src/monitoring/logging/multi_log_file_writer.rs b/src/monitoring/logging/multi_log_file_writer.rs index 358a711..a65255f 100644 --- a/src/monitoring/logging/multi_log_file_writer.rs +++ b/src/monitoring/logging/multi_log_file_writer.rs @@ -31,11 +31,11 @@ impl MultiLogFileWriter { pub fn update_log_writer_with_config( &mut self, - _config: &Config, + config: &Config, current_guards_by_file_name: HashMap, reload_handle: Handle + Send + Sync>, Registry>, ) -> Vec { - let new_log_directory = "./temporary-logs-test/config-dir-logs-test/"; + let new_log_directory = config.logs_path(); let guards = self.update_logging_dir( new_log_directory, current_guards_by_file_name, From f58683d61374d389609bd037265fabca202b1f48 Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sat, 19 Apr 2025 11:56:43 -0300 Subject: [PATCH 5/7] refactor: replace Logger occurrences by tracing::event! This commit finishes making patch-hub new log logic equal to the previous one (with the Logger module). The log file names are the same and every log is produced by tracing::event! macro. Signed-off-by: Lorenzo Bertin Salvador --- src/app.rs | 29 ++++++++++++++++++++-------- src/app/cover_renderer.rs | 5 ++--- src/app/logging/garbage_collector.rs | 14 +++++++++----- src/app/logging/log_on_error.rs | 14 ++------------ src/app/patch_renderer.rs | 16 +++++++++------ src/handler.rs | 16 +++++++++------ src/main.rs | 2 -- src/monitoring/logging.rs | 4 ++-- src/ui/edit_config.rs | 4 ++-- 9 files changed, 58 insertions(+), 46 deletions(-) diff --git a/src/app.rs b/src/app.rs index a5faef2..852965b 100644 --- a/src/app.rs +++ b/src/app.rs @@ -23,6 +23,7 @@ use screens::{ CurrentScreen, }; use std::collections::{HashMap, HashSet}; +use tracing::{event, Level}; use crate::utils; @@ -81,7 +82,7 @@ impl App { // Initialize the logger before the app starts Logger::init_log_file(&config)?; - Logger::info("patch-hub started"); + event!(Level::INFO, "patch-hub started"); logging::garbage_collector::collect_garbage(&config); Ok(App { @@ -207,7 +208,10 @@ impl App { { Ok(render) => render, Err(_) => { - Logger::error("Failed to render cover preview with external program"); + event!( + Level::ERROR, + "Failed to render cover preview with external program" + ); raw_cover.to_string() } }; @@ -216,7 +220,8 @@ impl App { match render_patch_preview(raw_patch, self.config.patch_renderer()) { Ok(render) => render, Err(_) => { - Logger::error( + event!( + Level::ERROR, "Failed to render patch preview with external program", ); raw_patch.to_string() @@ -392,30 +397,38 @@ impl App { let mut app_can_run = true; if !utils::binary_exists("b4") { - Logger::error("b4 is not installed, patchsets cannot be downloaded"); + event!( + Level::ERROR, + "b4 is not installed, patchsets cannot be downloaded" + ); app_can_run = false; } if !utils::binary_exists("git") { - Logger::warn("git is not installed, send-email won't work"); + event!(Level::WARN, "git is not installed, send-email won't work"); } match self.config.patch_renderer() { PatchRenderer::Bat => { if !utils::binary_exists("bat") { - Logger::warn("bat is not installed, patch rendering will fallback to default"); + event!( + Level::WARN, + "bat is not installed, patch rendering will fallback to default" + ); } } PatchRenderer::Delta => { if !utils::binary_exists("delta") { - Logger::warn( + event!( + Level::WARN, "delta is not installed, patch rendering will fallback to default", ); } } PatchRenderer::DiffSoFancy => { if !utils::binary_exists("diff-so-fancy") { - Logger::warn( + event!( + Level::WARN, "diff-so-fancy is not installed, patch rendering will fallback to default", ); } diff --git a/src/app/cover_renderer.rs b/src/app/cover_renderer.rs index fc30f26..7e4dcb0 100644 --- a/src/app/cover_renderer.rs +++ b/src/app/cover_renderer.rs @@ -5,8 +5,7 @@ use std::{ }; use serde::{Deserialize, Serialize}; - -use super::logging::Logger; +use tracing::{event, Level}; #[derive(Debug, Serialize, Deserialize, Clone, Copy, Default)] pub enum CoverRenderer { @@ -68,7 +67,7 @@ fn bat_cover_renderer(patch: &str) -> color_eyre::Result { .stdout(Stdio::piped()) .spawn() .map_err(|e| { - Logger::error(format!("Failed to spawn bat for cover preview: {}", e)); + event!(Level::ERROR, "Failed to spawn bat for cover preview: {}", e); e })?; diff --git a/src/app/logging/garbage_collector.rs b/src/app/logging/garbage_collector.rs index e06892b..c7709f8 100644 --- a/src/app/logging/garbage_collector.rs +++ b/src/app/logging/garbage_collector.rs @@ -2,9 +2,9 @@ //! //! This module is responsible for cleaning up the log files. -use crate::app::config::Config; +use tracing::{event, Level}; -use super::Logger; +use crate::app::config::Config; /// Collects the garbage from the logs directory. /// Will check for log files `patch-hub_*.log` and remove them if they are older than the `max_log_age` in the config. @@ -16,7 +16,10 @@ pub fn collect_garbage(config: &Config) { let now = std::time::SystemTime::now(); let logs_path = config.logs_path(); let Ok(logs) = std::fs::read_dir(logs_path) else { - Logger::error("Failed to read the logs directory during garbage collection"); + event!( + Level::ERROR, + "Failed to read the logs directory during garbage collection" + ); return; }; @@ -41,10 +44,11 @@ pub fn collect_garbage(config: &Config) { let age = age.as_secs() / 60 / 60 / 24; if age as usize > config.max_log_age() && std::fs::remove_file(log.path()).is_err() { - Logger::warn(format!( + event!( + Level::WARN, "Failed to remove the log file: {}", log.path().to_string_lossy() - )); + ); } } } diff --git a/src/app/logging/log_on_error.rs b/src/app/logging/log_on_error.rs index b156c33..bf88b0c 100644 --- a/src/app/logging/log_on_error.rs +++ b/src/app/logging/log_on_error.rs @@ -1,7 +1,7 @@ #[macro_export] macro_rules! log_on_error { ($result:expr) => { - log_on_error!($crate::app::logging::LogLevel::Error, $result) + log_on_error!(tracing::Level::ERROR, $result) }; ($level:expr, $result:expr) => { match $result { @@ -9,17 +9,7 @@ macro_rules! log_on_error { Err(ref error) => { let error_message = format!("Error executing {:?}: {}", stringify!($result), &error); - match $level { - $crate::app::logging::LogLevel::Info => { - Logger::info(error_message); - } - $crate::app::logging::LogLevel::Warning => { - Logger::warn(error_message); - } - $crate::app::logging::LogLevel::Error => { - Logger::error(error_message); - } - } + tracing::event!($level, error_message); $result } } diff --git a/src/app/patch_renderer.rs b/src/app/patch_renderer.rs index 5a9efc4..ac748de 100644 --- a/src/app/patch_renderer.rs +++ b/src/app/patch_renderer.rs @@ -6,8 +6,7 @@ use std::{ use color_eyre::eyre::eyre; use serde::{Deserialize, Serialize}; - -use super::logging::Logger; +use tracing::{event, Level}; #[derive(Debug, Serialize, Deserialize, Clone, Copy, Default)] pub enum PatchRenderer { @@ -96,7 +95,7 @@ fn bat_patch_renderer(patch: &str) -> color_eyre::Result { .stdout(Stdio::piped()) .spawn() .map_err(|e| { - Logger::error(format!("Failed to spawn bat for patch preview: {}", e)); + event!(Level::ERROR, "Failed to spawn bat for patch preview: {}", e); e })?; @@ -128,7 +127,11 @@ fn delta_patch_renderer(patch: &str) -> color_eyre::Result { .stdout(Stdio::piped()) .spawn() .map_err(|e| { - Logger::error(format!("Failed to spawn delta for patch preview: {}", e)); + event!( + Level::ERROR, + "Failed to spawn delta for patch preview: {}", + e + ); e })?; @@ -154,10 +157,11 @@ fn diff_so_fancy_renderer(patch: &str) -> color_eyre::Result { .stdout(Stdio::piped()) .spawn() .map_err(|e| { - Logger::error(format!( + event!( + Level::ERROR, "Failed to spawn diff-so-fancy for patch preview: {}", e - )); + ); e })?; diff --git a/src/handler.rs b/src/handler.rs index ef00320..71db078 100644 --- a/src/handler.rs +++ b/src/handler.rs @@ -8,9 +8,10 @@ use std::{ ops::ControlFlow, time::{Duration, Instant}, }; +use tracing::{event, Level}; use crate::{ - app::{logging::Logger, screens::CurrentScreen, App}, + app::{screens::CurrentScreen, App}, loading_screen, ui::draw_ui, }; @@ -22,7 +23,7 @@ use edit_config::handle_edit_config; use latest::handle_latest_patchsets; use mail_list::handle_mailing_list_selection; use ratatui::{ - crossterm::event::{self, Event, KeyCode, KeyEvent, KeyEventKind}, + crossterm::event::{Event, KeyCode, KeyEvent, KeyEventKind}, prelude::Backend, Terminal, }; @@ -107,7 +108,10 @@ where B: Backend + Send + 'static, { if !app.check_external_deps() { - Logger::error("patch-hub cannot be executed because some dependencies are missing"); + event!( + Level::WARN, + "patch-hub cannot be executed because some dependencies are missing" + ); bail!("patch-hub cannot be executed because some dependencies are missing, check logs for more information"); } @@ -121,7 +125,7 @@ where // need to refresh the UI independently of any event as doing so gravely // hinders the performance to below acceptable. // if event::poll(Duration::from_millis(16))? { - if let Event::Key(key) = event::read()? { + if let Event::Key(key) = ratatui::crossterm::event::read()? { if key.kind == KeyEventKind::Release { continue; } @@ -138,8 +142,8 @@ fn wait_key_press(ch: char, wait_time: Duration) -> color_eyre::Result { let start = Instant::now(); while Instant::now() - start < wait_time { - if event::poll(Duration::from_millis(16))? { - if let Event::Key(key) = event::read()? { + if ratatui::crossterm::event::poll(Duration::from_millis(16))? { + if let Event::Key(key) = ratatui::crossterm::event::read()? { if key.kind == KeyEventKind::Release { continue; } diff --git a/src/main.rs b/src/main.rs index 1692804..438bda2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -49,8 +49,6 @@ fn main() -> color_eyre::Result<()> { run_app(terminal, app)?; utils::restore()?; - Logger::info("patch-hub finished"); - // event! usage example as an alternative for Logger module event!(Level::INFO, "patch-hub finished"); Logger::flush(); diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs index 23ee01a..444429c 100644 --- a/src/monitoring/logging.rs +++ b/src/monitoring/logging.rs @@ -9,7 +9,7 @@ use tracing_subscriber::{reload::Handle, Layer, Registry}; pub mod multi_log_file_writer; -const LATEST_LOG_FILENAME: &str = "latest.tracing.log"; +const LATEST_LOG_FILENAME: &str = "latest.log"; const TEMP_LOG_DIR: &str = "/tmp/temporary-patch-hub-logs"; pub struct InitLoggingLayerProduct { @@ -43,7 +43,7 @@ pub fn init_logging_layer() -> InitLoggingLayerProduct { fn init_logging_file_writers() -> InitLoggingFileWritersProduct { let timestamp_log_file_name = format!( - "patch-hub_{}.tracing.log", + "patch-hub_{}.log", chrono::Local::now().format("%Y%m%d-%H%M%S") ); diff --git a/src/ui/edit_config.rs b/src/ui/edit_config.rs index 88b5a57..208920c 100644 --- a/src/ui/edit_config.rs +++ b/src/ui/edit_config.rs @@ -5,8 +5,8 @@ use ratatui::{ widgets::{Block, Borders, Paragraph}, Frame, }; +use tracing::{event, Level}; -use crate::app::logging::Logger; use crate::app::App; pub fn render_main(f: &mut Frame, app: &App, chunk: Rect) { @@ -31,7 +31,7 @@ pub fn render_main(f: &mut Frame, app: &App, chunk: Rect) { let (config, value) = match edit_config.config(i) { Some((cfg, val)) => (cfg, val), None => { - Logger::error(format!("Invalid configuration index: {}", i)); + event!(Level::ERROR, "Invalid configuration index: {}", i); return; } }; From 5763f69cc73b48ef24388c3703a0380be66e9a1e Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sat, 19 Apr 2025 12:25:44 -0300 Subject: [PATCH 6/7] refactor: move garbage_collector and log_on_error location The garbage_collector function continues to work with the new logging methods since it just depends on log path and file names which didn't change. Temporary logs are on /tmp which is already a temporary directory, so garbage_collector doesn't need to deal with them. The log_on_error macro was already adapted to deal with tracing::Level log level. Signed-off-by: Lorenzo Bertin Salvador --- src/app.rs | 3 ++- src/app/logging.rs | 3 --- src/monitoring/logging.rs | 2 ++ src/{app => monitoring}/logging/garbage_collector.rs | 0 src/{app => monitoring}/logging/log_on_error.rs | 0 5 files changed, 4 insertions(+), 4 deletions(-) rename src/{app => monitoring}/logging/garbage_collector.rs (100%) rename src/{app => monitoring}/logging/log_on_error.rs (100%) diff --git a/src/app.rs b/src/app.rs index 852965b..69a7efc 100644 --- a/src/app.rs +++ b/src/app.rs @@ -1,5 +1,6 @@ use crate::{ log_on_error, + monitoring::logging::garbage_collector::collect_garbage, ui::popup::{info_popup::InfoPopUp, PopUp}, }; use ansi_to_tui::IntoText; @@ -83,7 +84,7 @@ impl App { // Initialize the logger before the app starts Logger::init_log_file(&config)?; event!(Level::INFO, "patch-hub started"); - logging::garbage_collector::collect_garbage(&config); + collect_garbage(&config); Ok(App { current_screen: CurrentScreen::MailingListSelection, diff --git a/src/app/logging.rs b/src/app/logging.rs index 1e6ec8f..7a52dca 100644 --- a/src/app/logging.rs +++ b/src/app/logging.rs @@ -8,9 +8,6 @@ use chrono::Local; use crate::app::config::Config; -pub mod garbage_collector; -pub mod log_on_error; - const LATEST_LOG_FILENAME: &str = "latest.log"; static mut LOG_BUFFER: Logger = Logger { diff --git a/src/monitoring/logging.rs b/src/monitoring/logging.rs index 444429c..8e91874 100644 --- a/src/monitoring/logging.rs +++ b/src/monitoring/logging.rs @@ -7,6 +7,8 @@ use multi_log_file_writer::{create_non_blocking_writer, get_fmt_layer, MultiLogF use tracing_appender::non_blocking::WorkerGuard; use tracing_subscriber::{reload::Handle, Layer, Registry}; +pub mod garbage_collector; +pub mod log_on_error; pub mod multi_log_file_writer; const LATEST_LOG_FILENAME: &str = "latest.log"; diff --git a/src/app/logging/garbage_collector.rs b/src/monitoring/logging/garbage_collector.rs similarity index 100% rename from src/app/logging/garbage_collector.rs rename to src/monitoring/logging/garbage_collector.rs diff --git a/src/app/logging/log_on_error.rs b/src/monitoring/logging/log_on_error.rs similarity index 100% rename from src/app/logging/log_on_error.rs rename to src/monitoring/logging/log_on_error.rs From 46e2aa836f51c7ac7b116917d510f38ece5a80fc Mon Sep 17 00:00:00 2001 From: Lorenzo Bertin Salvador Date: Sat, 19 Apr 2025 12:31:29 -0300 Subject: [PATCH 7/7] refactor: remove Logger module The module occurrences and usage was already replaced in previous commits, so it's possible to remove the whole module. Signed-off-by: Lorenzo Bertin Salvador --- src/app.rs | 7 +- src/app/logging.rs | 299 --------------------------------------------- src/main.rs | 4 +- src/utils.rs | 3 - 4 files changed, 2 insertions(+), 311 deletions(-) delete mode 100644 src/app/logging.rs diff --git a/src/app.rs b/src/app.rs index 69a7efc..a2eee33 100644 --- a/src/app.rs +++ b/src/app.rs @@ -7,7 +7,6 @@ use ansi_to_tui::IntoText; use color_eyre::eyre::bail; use config::Config; use cover_renderer::render_cover; -use logging::Logger; use patch_hub::lore::{ lore_api_client::BlockingLoreAPIClient, lore_session, @@ -30,7 +29,6 @@ use crate::utils; pub mod config; pub mod cover_renderer; -pub mod logging; pub mod patch_renderer; pub mod screens; @@ -61,8 +59,7 @@ pub struct App { impl App { /// Creates a new instance of `App`. It dynamically loads configurations /// based on precedence (see [crate::app::Config::build]), app data - /// (available mailing lists, bookmarked patchsets, reviewed patchsets), and - /// initializes the Logger (see [crate::app::logging::Logger]) + /// (available mailing lists, bookmarked patchsets, reviewed patchsets) /// /// # Returns /// @@ -81,8 +78,6 @@ impl App { let lore_api_client = BlockingLoreAPIClient::default(); - // Initialize the logger before the app starts - Logger::init_log_file(&config)?; event!(Level::INFO, "patch-hub started"); collect_garbage(&config); diff --git a/src/app/logging.rs b/src/app/logging.rs deleted file mode 100644 index 7a52dca..0000000 --- a/src/app/logging.rs +++ /dev/null @@ -1,299 +0,0 @@ -use std::{ - fmt::Display, - fs::{self, File, OpenOptions}, - io::Write, -}; - -use chrono::Local; - -use crate::app::config::Config; - -const LATEST_LOG_FILENAME: &str = "latest.log"; - -static mut LOG_BUFFER: Logger = Logger { - log_file: None, - log_filepath: None, - latest_log_file: None, - latest_log_filepath: None, - logs_to_print: Vec::new(), - print_level: LogLevel::Warning, -}; - -/// Describes the log level of a message -/// -/// This is used to determine the severity of a log message so the logger handles it accordingly to the verbosity level. -/// -/// The levels severity are: `Info` < `Warning` < `Error` -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -#[allow(dead_code)] -pub enum LogLevel { - Info, - Warning, - Error, -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct LogMessage { - level: LogLevel, - message: String, -} - -/// The Logger singleton that manages logging to [`stderr`] (log buffer) and a log file. -/// This is safe to use only in single-threaded scenarios. The messages are written to the log file immediatly, -/// but the messages to the `stderr` are written only after the TUI is closed, so they are kept in memory. -/// -/// The logger also has a log level that can be set to filter the messages that are written to the log file. -/// Only messages with a level equal or higher than the log level are written to the log file. -/// -/// The expected flow is: -/// - Initialize the log file with [`init_log_file`] -/// - Write to the log file with [`info`], [`warn`] or [`error`] -/// - Flush the log buffer to the stderr and close the log file with [`flush`] -/// -/// The log file is created in the logs_path defined in the [`Config`] struct -/// -/// [`Config`]: super::config::Config -/// [`init_log_file`]: Logger::init_log_file -/// [`info`]: Logger::info -/// [`warn`]: Logger::warn -/// [`error`]: Logger::error -/// [`flush`]: Logger::flush -/// [`stderr`]: std::io::stderr -#[derive(Debug)] -pub struct Logger { - log_file: Option, - log_filepath: Option, - latest_log_file: Option, - latest_log_filepath: Option, - logs_to_print: Vec, - print_level: LogLevel, // TODO: Add a log level configuration -} - -impl Logger { - /// Private method to get access to the Logger singleton - /// - /// This function makes use of unsafe code to access a static mut. Also, it's `inline` so won't have any overhead - /// - /// # Safety - /// - /// It's safe to use in single-threaded scenarios only - /// - /// # Examples - /// ```rust norun - /// // Get the logger singleton - /// Logger::init_log_file(&config); // Initialize the log file - /// Logger::info("This is an info log message"); // Write a message to the log file - /// ``` - #[inline] - fn get_logger() -> &'static mut Logger { - #[allow(static_mut_refs)] - unsafe { - &mut LOG_BUFFER - } - } - - /// Write the string `msg` to the logs to print buffer and the log file - /// - /// # Panics - /// - /// If the log file is not initialized - /// - /// # Examples - /// ```rust norun - /// // Make sure to initialize the log file before writing to it - /// Logger::init_log_file(&config); - /// // Get the logger singleton and write a message to the log file - /// Logger::get_logger().log(LogLevel::Info, "This is a log message"); - /// ``` - fn log(&mut self, level: LogLevel, message: M) { - let current_datetime = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); - let message = format!("[{}] {}", current_datetime, message); - - let log = LogMessage { level, message }; - - let file = self.log_file - .as_mut() - .expect("Log file not initialized, make sure to call Logger::init_log_file() before writing to the log file"); - writeln!(file, "{log}").expect("Failed to write to log file"); - - let latest_log_file = self.latest_log_file - .as_mut() - .expect("Latest log file not initialized, make sure to call Logger::init_log_file() before writing to the log file"); - writeln!(latest_log_file, "{log}").expect("Failed to write to real time log file"); - - if self.print_level <= level { - // Only save logs to print w/ level equal or higher than the filter log level - self.logs_to_print.push(log); - } - } - - /// Write an info message to the log - /// - /// # Panics - /// - /// If the log file is not initialized - /// - /// # Safety - /// - /// It's safe to use in single-threaded scenarios only - /// - /// # Examples - /// - /// ```rust norun - /// - /// // Make sure to initialize the log file before writing to it - /// Logger::init_log_file(&config); - /// Logger::info("This is an info message"); // [INFO] [2024-09-11 14:59:00] This is an info message - /// ``` - #[inline] - #[allow(dead_code)] - pub fn info(msg: M) { - Logger::get_logger().log(LogLevel::Info, msg); - } - - /// Write a warn message to the log - /// - /// # Panics - /// - /// If the log file is not initialized - /// - /// # Safety - /// - /// It's safe to use in single-threaded scenarios only - /// - /// # Examples - /// - /// ```rust norun - /// - /// // Make sure to initialize the log file before writing to it - /// Logger::init_log_file(&config); - /// Logger::warn("This is a warning"); // [WARN] [2024-09-11 14:59:00] This is a warning - /// ``` - #[inline] - #[allow(dead_code)] - pub fn warn(msg: M) { - Logger::get_logger().log(LogLevel::Warning, msg); - } - - /// Write an error message to the log - /// - /// # Panics - /// - /// If the log file is not initialized - /// - /// # Safety - /// - /// It's safe to use in single-threaded scenarios only - /// - /// # Examples - /// - /// ```rust norun - /// - /// // Make sure to initialize the log file before writing to it - /// Logger::init_log_file(&config); - /// Logger::error("This is an error message"); // [ERROR] [2024-09-11 14:59:00] This is an error message - /// ``` - #[inline] - #[allow(dead_code)] - pub fn error(msg: M) { - Logger::get_logger().log(LogLevel::Error, msg); - } - - /// Flush the log buffer to stderr and closes the log file. - /// It's intended to be called only once when patch-hub is finishing. - /// - /// # Panics - /// - /// If called before the log file is initialized or if called twice - /// - /// # Examples - /// ```rust norun - /// // Make sure to initialize the log file before writing to it - /// Logger::init_log_file(&config); - /// - /// // Flush before finishing the application - /// Logger::flush(); - /// // Any further attempt to use the logger will panic, unless it's reinitialized - /// ``` - pub fn flush() { - let logger = Logger::get_logger(); - for entry in &logger.logs_to_print { - eprintln!("{}", entry); - } - - if let Some(f) = &logger.log_filepath { - eprintln!("Check the full log file: {}", f); - } - } - - /// Initialize the log file. - /// - /// This function must be called before any other operation with the logging system - /// - /// # Panics - /// - /// If it fails to create the log file - /// - /// # Examples - /// ```rust norun - /// // Once you get the config struct... - /// let config = Config::build(); - /// // ... initialize the log file - /// Logger::init_log_file(&config); - /// ``` - pub fn init_log_file(config: &Config) -> Result<(), std::io::Error> { - let logger = Logger::get_logger(); - - let logs_path = config.logs_path(); - fs::create_dir_all(logs_path)?; - - if logger.latest_log_file.is_none() { - let latest_log_filename = LATEST_LOG_FILENAME.to_string(); - let latest_log_filepath = format!("{}/{}", logs_path, latest_log_filename); - - File::create(&latest_log_filepath)?; - - let log_file = OpenOptions::new() - .create(true) - .append(true) - .open(&latest_log_filepath)?; - - logger.latest_log_file = Some(log_file); - logger.latest_log_filepath = Some(latest_log_filepath); - } - - if logger.log_file.is_none() { - let log_filename = format!( - "patch-hub_{}.log", - chrono::Local::now().format("%Y%m%d-%H%M%S") - ); - let log_filepath = format!("{}/{}", logs_path, log_filename); - - let log_file = OpenOptions::new() - .create(true) - .append(true) - .open(&log_filepath)?; - - logger.log_file = Some(log_file); - logger.log_filepath = Some(log_filepath); - } - - Ok(()) - } -} - -impl Display for LogMessage { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "[{}] {}", self.level, self.message) - } -} - -impl Display for LogLevel { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - LogLevel::Info => write!(f, "INFO"), - LogLevel::Warning => write!(f, "WARN"), - LogLevel::Error => write!(f, "ERROR"), - } - } -} diff --git a/src/main.rs b/src/main.rs index 438bda2..b473f6b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ use std::ops::ControlFlow; use crate::app::App; -use app::{config::Config, logging::Logger}; +use app::config::Config; use clap::Parser; use cli::Cli; use handler::run_app; @@ -51,7 +51,5 @@ fn main() -> color_eyre::Result<()> { event!(Level::INFO, "patch-hub finished"); - Logger::flush(); - Ok(()) } diff --git a/src/utils.rs b/src/utils.rs index 2cc9aef..6eb7f33 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -13,8 +13,6 @@ use ratatui::{ Terminal, }; -use crate::app::logging::Logger; - /// A type alias for the terminal type used in this application pub type Tui = Terminal>; @@ -41,7 +39,6 @@ pub fn install_hooks() -> color_eyre::Result<()> { let panic_hook = panic_hook.into_panic_hook(); panic::set_hook(Box::new(move |panic_info| { restore().unwrap(); - Logger::flush(); panic_hook(panic_info); }));