// Audit logging module with request ID tracing and custom filters use anyhow::{Context, Result}; use chrono::{DateTime, Utc}; use regex::Regex; use serde_json::Value; use std::fs::OpenOptions; use std::io::Write; use std::path::Path; use std::sync::Arc; use tokio::sync::Mutex; #[derive(Clone)] struct CustomFilter { name: String, pattern: Regex, file: Arc>, } #[derive(Clone)] pub struct AuditLogger { mask_passwords: bool, sensitive_fields: Vec, request_file: Option>>, query_file: Option>>, error_file: Option>>, warning_file: Option>>, info_file: Option>>, combined_file: Option>>, custom_filters: Vec, } impl AuditLogger { pub fn new( request_log_path: Option, query_log_path: Option, error_log_path: Option, warning_log_path: Option, info_log_path: Option, combined_log_path: Option, mask_passwords: bool, sensitive_fields: Vec, custom_filter_configs: Vec, ) -> Result { // Helper function to open a log file if path is provided let open_log_file = |path: &Option| -> Result>>> { if let Some(path_str) = path { // Ensure log directories exist if let Some(parent) = Path::new(path_str).parent() { std::fs::create_dir_all(parent).context("Failed to create log directory")?; } let file = OpenOptions::new() .create(true) .append(true) .open(path_str) .context(format!("Failed to open log file: {}", path_str))?; Ok(Some(Arc::new(Mutex::new(file)))) } else { Ok(None) } }; // Initialize custom filters let mut custom_filters = Vec::new(); for filter_config in custom_filter_configs { if filter_config.enabled { // Compile regex pattern let pattern = Regex::new(&filter_config.pattern).context(format!( "Invalid regex pattern in filter '{}': {}", filter_config.name, filter_config.pattern ))?; // Open filter output file if let Some(parent) = Path::new(&filter_config.output_file).parent() { std::fs::create_dir_all(parent) .context("Failed to create filter log directory")?; } let file = OpenOptions::new() .create(true) .append(true) .open(&filter_config.output_file) .context(format!( "Failed to open filter log file: {}", filter_config.output_file ))?; custom_filters.push(CustomFilter { name: filter_config.name.clone(), pattern, file: Arc::new(Mutex::new(file)), }); } } Ok(Self { mask_passwords, sensitive_fields, request_file: open_log_file(&request_log_path)?, query_file: open_log_file(&query_log_path)?, error_file: open_log_file(&error_log_path)?, warning_file: open_log_file(&warning_log_path)?, info_file: open_log_file(&info_log_path)?, combined_file: open_log_file(&combined_log_path)?, custom_filters, }) } /// Generate a unique request ID for transaction tracing pub fn generate_request_id() -> String { format!("{}", uuid::Uuid::new_v4().as_u128() & 0xFFFFFFFF_FFFFFFFF) // 16 hex chars } /// Write to combined log and apply custom filters async fn write_combined_and_filter(&self, entry: &str) -> Result<()> { // Write to combined log if configured if let Some(ref file_mutex) = self.combined_file { let mut file = file_mutex.lock().await; file.write_all(entry.as_bytes()) .context("Failed to write to combined log")?; file.flush().context("Failed to flush combined log")?; } // Apply custom filters for filter in &self.custom_filters { if filter.pattern.is_match(entry) { let mut file = filter.file.lock().await; file.write_all(entry.as_bytes()) .context(format!("Failed to write to filter log: {}", filter.name))?; file.flush() .context(format!("Failed to flush filter log: {}", filter.name))?; } } Ok(()) } pub async fn log_request( &self, request_id: &str, timestamp: DateTime, _ip: &str, user: Option<&str>, power: Option, endpoint: &str, payload: &Value, ) -> Result<()> { let mut masked_payload = payload.clone(); if self.mask_passwords { self.mask_sensitive_data(&mut masked_payload); } let user_str = user.unwrap_or("anonymous"); let power_str = power .map(|p| format!("power={}", p)) .unwrap_or_else(|| "power=0".to_string()); let log_entry = format!( "{} [{}] | REQUEST | user={} | {} | endpoint={} | payload={}\n", timestamp.format("%Y-%m-%d %H:%M:%S"), request_id, user_str, power_str, endpoint, serde_json::to_string(&masked_payload).unwrap_or_else(|_| "invalid_json".to_string()) ); // Write to legacy request log if configured if let Some(ref file_mutex) = self.request_file { let mut file = file_mutex.lock().await; file.write_all(log_entry.as_bytes()) .context("Failed to write to request log")?; file.flush().context("Failed to flush request log")?; } // Write to combined log and apply filters self.write_combined_and_filter(&log_entry).await?; Ok(()) } pub async fn log_query( &self, request_id: &str, timestamp: DateTime, user: &str, power: Option, query: &str, parameters: Option<&Value>, rows_affected: Option, ) -> Result<()> { let params_str = if let Some(params) = parameters { serde_json::to_string(params).unwrap_or_else(|_| "invalid_json".to_string()) } else { "null".to_string() }; let power_str = power .map(|p| format!("power={}", p)) .unwrap_or_else(|| "power=0".to_string()); let rows_str = rows_affected .map(|r| format!("rows={}", r)) .unwrap_or_else(|| "rows=0".to_string()); let log_entry = format!( "{} [{}] | QUERY | user={} | {} | {} | query={} | params={}\n", timestamp.format("%Y-%m-%d %H:%M:%S"), request_id, user, power_str, rows_str, query, params_str ); // Write to legacy query log if configured if let Some(ref file_mutex) = self.query_file { let mut file = file_mutex.lock().await; file.write_all(log_entry.as_bytes()) .context("Failed to write to query log")?; file.flush().context("Failed to flush query log")?; } // Write to combined log and apply filters self.write_combined_and_filter(&log_entry).await?; Ok(()) } pub async fn log_error( &self, request_id: &str, timestamp: DateTime, error: &str, context: Option<&str>, user: Option<&str>, power: Option, ) -> Result<()> { let user_str = user.unwrap_or("unknown"); let context_str = context.unwrap_or("general"); let power_str = power .map(|p| format!("power={}", p)) .unwrap_or_else(|| "power=0".to_string()); let log_entry = format!( "{} [{}] | ERROR | user={} | {} | context={} | error={}\n", timestamp.format("%Y-%m-%d %H:%M:%S"), request_id, user_str, power_str, context_str, error ); // Write to legacy error log if configured if let Some(ref file_mutex) = self.error_file { let mut file = file_mutex.lock().await; file.write_all(log_entry.as_bytes()) .context("Failed to write to error log")?; file.flush().context("Failed to flush error log")?; } // Write to combined log and apply filters self.write_combined_and_filter(&log_entry).await?; Ok(()) } pub async fn log_warning( &self, request_id: &str, timestamp: DateTime, message: &str, context: Option<&str>, user: Option<&str>, power: Option, ) -> Result<()> { let user_str = user.unwrap_or("unknown"); let context_str = context.unwrap_or("general"); let power_str = power .map(|p| format!("power={}", p)) .unwrap_or_else(|| "power=0".to_string()); let log_entry = format!( "{} [{}] | WARNING | user={} | {} | context={} | message={}\n", timestamp.format("%Y-%m-%d %H:%M:%S"), request_id, user_str, power_str, context_str, message ); // Write to warning log if configured if let Some(ref file_mutex) = self.warning_file { let mut file = file_mutex.lock().await; file.write_all(log_entry.as_bytes()) .context("Failed to write to warning log")?; file.flush().context("Failed to flush warning log")?; } // Write to combined log and apply filters self.write_combined_and_filter(&log_entry).await?; Ok(()) } pub async fn log_info( &self, request_id: &str, timestamp: DateTime, message: &str, context: Option<&str>, user: Option<&str>, power: Option, ) -> Result<()> { let user_str = user.unwrap_or("system"); let context_str = context.unwrap_or("general"); let power_str = power .map(|p| format!("power={}", p)) .unwrap_or_else(|| "power=0".to_string()); let log_entry = format!( "{} [{}] | INFO | user={} | {} | context={} | message={}\n", timestamp.format("%Y-%m-%d %H:%M:%S"), request_id, user_str, power_str, context_str, message ); // Write to info log if configured if let Some(ref file_mutex) = self.info_file { let mut file = file_mutex.lock().await; file.write_all(log_entry.as_bytes()) .context("Failed to write to info log")?; file.flush().context("Failed to flush info log")?; } // Write to combined log and apply filters self.write_combined_and_filter(&log_entry).await?; Ok(()) } fn mask_sensitive_data(&self, value: &mut Value) { match value { Value::Object(map) => { for (key, val) in map.iter_mut() { // Always mask password and pin if key == "password" || key == "pin" { *val = Value::String("***MASKED***".to_string()); } // Also mask any configured sensitive fields else if self.sensitive_fields.contains(key) { *val = Value::String("***MASKED***".to_string()); } else { self.mask_sensitive_data(val); } } } Value::Array(arr) => { for item in arr.iter_mut() { self.mask_sensitive_data(item); } } _ => {} } } }