aboutsummaryrefslogtreecommitdiff
path: root/src/logging/logger.rs
diff options
context:
space:
mode:
authorUMTS at Teleco <crt@teleco.ch>2025-12-13 02:48:13 +0100
committerUMTS at Teleco <crt@teleco.ch>2025-12-13 02:48:13 +0100
commite52b8e1c2e110d0feb74feb7905c2ff064b51d55 (patch)
tree3090814e422250e07e72cf1c83241ffd95cf20f7 /src/logging/logger.rs
committing to insanityHEADmaster
Diffstat (limited to 'src/logging/logger.rs')
-rw-r--r--src/logging/logger.rs373
1 files changed, 373 insertions, 0 deletions
diff --git a/src/logging/logger.rs b/src/logging/logger.rs
new file mode 100644
index 0000000..e063f8f
--- /dev/null
+++ b/src/logging/logger.rs
@@ -0,0 +1,373 @@
+// Audit logging module with request ID tracing and custom filters
+use anyhow::{Context, Result};
+use chrono::{DateTime, Utc};
+use regex::Regex;
+use serde_json::Value;
+use std::fs::OpenOptions;
+use std::io::Write;
+use std::path::Path;
+use std::sync::Arc;
+use tokio::sync::Mutex;
+
+#[derive(Clone)]
+struct CustomFilter {
+ name: String,
+ pattern: Regex,
+ file: Arc<Mutex<std::fs::File>>,
+}
+
+#[derive(Clone)]
+pub struct AuditLogger {
+ mask_passwords: bool,
+ sensitive_fields: Vec<String>,
+ request_file: Option<Arc<Mutex<std::fs::File>>>,
+ query_file: Option<Arc<Mutex<std::fs::File>>>,
+ error_file: Option<Arc<Mutex<std::fs::File>>>,
+ warning_file: Option<Arc<Mutex<std::fs::File>>>,
+ info_file: Option<Arc<Mutex<std::fs::File>>>,
+ combined_file: Option<Arc<Mutex<std::fs::File>>>,
+ custom_filters: Vec<CustomFilter>,
+}
+
+impl AuditLogger {
+ pub fn new(
+ request_log_path: Option<String>,
+ query_log_path: Option<String>,
+ error_log_path: Option<String>,
+ warning_log_path: Option<String>,
+ info_log_path: Option<String>,
+ combined_log_path: Option<String>,
+ mask_passwords: bool,
+ sensitive_fields: Vec<String>,
+ custom_filter_configs: Vec<crate::config::CustomLogFilter>,
+ ) -> Result<Self> {
+ // Helper function to open a log file if path is provided
+ let open_log_file = |path: &Option<String>| -> Result<Option<Arc<Mutex<std::fs::File>>>> {
+ if let Some(path_str) = path {
+ // Ensure log directories exist
+ if let Some(parent) = Path::new(path_str).parent() {
+ std::fs::create_dir_all(parent).context("Failed to create log directory")?;
+ }
+
+ let file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .open(path_str)
+ .context(format!("Failed to open log file: {}", path_str))?;
+
+ Ok(Some(Arc::new(Mutex::new(file))))
+ } else {
+ Ok(None)
+ }
+ };
+
+ // Initialize custom filters
+ let mut custom_filters = Vec::new();
+ for filter_config in custom_filter_configs {
+ if filter_config.enabled {
+ // Compile regex pattern
+ let pattern = Regex::new(&filter_config.pattern).context(format!(
+ "Invalid regex pattern in filter '{}': {}",
+ filter_config.name, filter_config.pattern
+ ))?;
+
+ // Open filter output file
+ if let Some(parent) = Path::new(&filter_config.output_file).parent() {
+ std::fs::create_dir_all(parent)
+ .context("Failed to create filter log directory")?;
+ }
+
+ let file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .open(&filter_config.output_file)
+ .context(format!(
+ "Failed to open filter log file: {}",
+ filter_config.output_file
+ ))?;
+
+ custom_filters.push(CustomFilter {
+ name: filter_config.name.clone(),
+ pattern,
+ file: Arc::new(Mutex::new(file)),
+ });
+ }
+ }
+
+ Ok(Self {
+ mask_passwords,
+ sensitive_fields,
+ request_file: open_log_file(&request_log_path)?,
+ query_file: open_log_file(&query_log_path)?,
+ error_file: open_log_file(&error_log_path)?,
+ warning_file: open_log_file(&warning_log_path)?,
+ info_file: open_log_file(&info_log_path)?,
+ combined_file: open_log_file(&combined_log_path)?,
+ custom_filters,
+ })
+ }
+
+ /// Generate a unique request ID for transaction tracing
+ pub fn generate_request_id() -> String {
+ format!("{}", uuid::Uuid::new_v4().as_u128() & 0xFFFFFFFF_FFFFFFFF) // 16 hex chars
+ }
+
+ /// Write to combined log and apply custom filters
+ async fn write_combined_and_filter(&self, entry: &str) -> Result<()> {
+ // Write to combined log if configured
+ if let Some(ref file_mutex) = self.combined_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(entry.as_bytes())
+ .context("Failed to write to combined log")?;
+ file.flush().context("Failed to flush combined log")?;
+ }
+
+ // Apply custom filters
+ for filter in &self.custom_filters {
+ if filter.pattern.is_match(entry) {
+ let mut file = filter.file.lock().await;
+ file.write_all(entry.as_bytes())
+ .context(format!("Failed to write to filter log: {}", filter.name))?;
+ file.flush()
+ .context(format!("Failed to flush filter log: {}", filter.name))?;
+ }
+ }
+
+ Ok(())
+ }
+
+ pub async fn log_request(
+ &self,
+ request_id: &str,
+ timestamp: DateTime<Utc>,
+ _ip: &str,
+ user: Option<&str>,
+ power: Option<i32>,
+ endpoint: &str,
+ payload: &Value,
+ ) -> Result<()> {
+ let mut masked_payload = payload.clone();
+
+ if self.mask_passwords {
+ self.mask_sensitive_data(&mut masked_payload);
+ }
+
+ let user_str = user.unwrap_or("anonymous");
+ let power_str = power
+ .map(|p| format!("power={}", p))
+ .unwrap_or_else(|| "power=0".to_string());
+
+ let log_entry = format!(
+ "{} [{}] | REQUEST | user={} | {} | endpoint={} | payload={}\n",
+ timestamp.format("%Y-%m-%d %H:%M:%S"),
+ request_id,
+ user_str,
+ power_str,
+ endpoint,
+ serde_json::to_string(&masked_payload).unwrap_or_else(|_| "invalid_json".to_string())
+ );
+
+ // Write to legacy request log if configured
+ if let Some(ref file_mutex) = self.request_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(log_entry.as_bytes())
+ .context("Failed to write to request log")?;
+ file.flush().context("Failed to flush request log")?;
+ }
+
+ // Write to combined log and apply filters
+ self.write_combined_and_filter(&log_entry).await?;
+
+ Ok(())
+ }
+
+ pub async fn log_query(
+ &self,
+ request_id: &str,
+ timestamp: DateTime<Utc>,
+ user: &str,
+ power: Option<i32>,
+ query: &str,
+ parameters: Option<&Value>,
+ rows_affected: Option<u64>,
+ ) -> Result<()> {
+ let params_str = if let Some(params) = parameters {
+ serde_json::to_string(params).unwrap_or_else(|_| "invalid_json".to_string())
+ } else {
+ "null".to_string()
+ };
+
+ let power_str = power
+ .map(|p| format!("power={}", p))
+ .unwrap_or_else(|| "power=0".to_string());
+ let rows_str = rows_affected
+ .map(|r| format!("rows={}", r))
+ .unwrap_or_else(|| "rows=0".to_string());
+
+ let log_entry = format!(
+ "{} [{}] | QUERY | user={} | {} | {} | query={} | params={}\n",
+ timestamp.format("%Y-%m-%d %H:%M:%S"),
+ request_id,
+ user,
+ power_str,
+ rows_str,
+ query,
+ params_str
+ );
+
+ // Write to legacy query log if configured
+ if let Some(ref file_mutex) = self.query_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(log_entry.as_bytes())
+ .context("Failed to write to query log")?;
+ file.flush().context("Failed to flush query log")?;
+ }
+
+ // Write to combined log and apply filters
+ self.write_combined_and_filter(&log_entry).await?;
+
+ Ok(())
+ }
+
+ pub async fn log_error(
+ &self,
+ request_id: &str,
+ timestamp: DateTime<Utc>,
+ error: &str,
+ context: Option<&str>,
+ user: Option<&str>,
+ power: Option<i32>,
+ ) -> Result<()> {
+ let user_str = user.unwrap_or("unknown");
+ let context_str = context.unwrap_or("general");
+ let power_str = power
+ .map(|p| format!("power={}", p))
+ .unwrap_or_else(|| "power=0".to_string());
+
+ let log_entry = format!(
+ "{} [{}] | ERROR | user={} | {} | context={} | error={}\n",
+ timestamp.format("%Y-%m-%d %H:%M:%S"),
+ request_id,
+ user_str,
+ power_str,
+ context_str,
+ error
+ );
+
+ // Write to legacy error log if configured
+ if let Some(ref file_mutex) = self.error_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(log_entry.as_bytes())
+ .context("Failed to write to error log")?;
+ file.flush().context("Failed to flush error log")?;
+ }
+
+ // Write to combined log and apply filters
+ self.write_combined_and_filter(&log_entry).await?;
+
+ Ok(())
+ }
+
+ pub async fn log_warning(
+ &self,
+ request_id: &str,
+ timestamp: DateTime<Utc>,
+ message: &str,
+ context: Option<&str>,
+ user: Option<&str>,
+ power: Option<i32>,
+ ) -> Result<()> {
+ let user_str = user.unwrap_or("unknown");
+ let context_str = context.unwrap_or("general");
+ let power_str = power
+ .map(|p| format!("power={}", p))
+ .unwrap_or_else(|| "power=0".to_string());
+
+ let log_entry = format!(
+ "{} [{}] | WARNING | user={} | {} | context={} | message={}\n",
+ timestamp.format("%Y-%m-%d %H:%M:%S"),
+ request_id,
+ user_str,
+ power_str,
+ context_str,
+ message
+ );
+
+ // Write to warning log if configured
+ if let Some(ref file_mutex) = self.warning_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(log_entry.as_bytes())
+ .context("Failed to write to warning log")?;
+ file.flush().context("Failed to flush warning log")?;
+ }
+
+ // Write to combined log and apply filters
+ self.write_combined_and_filter(&log_entry).await?;
+
+ Ok(())
+ }
+
+ pub async fn log_info(
+ &self,
+ request_id: &str,
+ timestamp: DateTime<Utc>,
+ message: &str,
+ context: Option<&str>,
+ user: Option<&str>,
+ power: Option<i32>,
+ ) -> Result<()> {
+ let user_str = user.unwrap_or("system");
+ let context_str = context.unwrap_or("general");
+ let power_str = power
+ .map(|p| format!("power={}", p))
+ .unwrap_or_else(|| "power=0".to_string());
+
+ let log_entry = format!(
+ "{} [{}] | INFO | user={} | {} | context={} | message={}\n",
+ timestamp.format("%Y-%m-%d %H:%M:%S"),
+ request_id,
+ user_str,
+ power_str,
+ context_str,
+ message
+ );
+
+ // Write to info log if configured
+ if let Some(ref file_mutex) = self.info_file {
+ let mut file = file_mutex.lock().await;
+ file.write_all(log_entry.as_bytes())
+ .context("Failed to write to info log")?;
+ file.flush().context("Failed to flush info log")?;
+ }
+
+ // Write to combined log and apply filters
+ self.write_combined_and_filter(&log_entry).await?;
+
+ Ok(())
+ }
+
+ fn mask_sensitive_data(&self, value: &mut Value) {
+ match value {
+ Value::Object(map) => {
+ for (key, val) in map.iter_mut() {
+ // Always mask password and pin
+ if key == "password" || key == "pin" {
+ *val = Value::String("***MASKED***".to_string());
+ }
+ // Also mask any configured sensitive fields
+ else if self.sensitive_fields.contains(key) {
+ *val = Value::String("***MASKED***".to_string());
+ } else {
+ self.mask_sensitive_data(val);
+ }
+ }
+ }
+ Value::Array(arr) => {
+ for item in arr.iter_mut() {
+ self.mask_sensitive_data(item);
+ }
+ }
+ _ => {}
+ }
+ }
+}