aboutsummaryrefslogtreecommitdiff
path: root/src/config.rs
blob: 4d53e292986c591735b8f7329532ef5ee2272cda (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};

#[derive(Debug, Clone)]
pub struct HoardomPaths {
    pub config_file: PathBuf,
    pub cache_dir: PathBuf,
    pub can_save: bool,
    pub caching_enabled: bool,
}

impl HoardomPaths {
    pub fn cache_file(&self, name: &str) -> PathBuf {
        self.cache_dir.join(name)
    }
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
    #[serde(default)]
    pub settings: Settings,
    #[serde(default)]
    pub cache: CacheSettings,
    #[serde(default)]
    pub favorites: Vec<FavoriteEntry>,
    #[serde(default)]
    pub imported_filters: Vec<ImportedFilter>,
    #[serde(default)]
    pub scratchpad: String,
}

/// faved domain with its last known status where once again too many dumbass comments were added when fixing a bug with it... have been removed
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FavoriteEntry {
    pub domain: String,
    #[serde(default = "default_fav_status")]
    pub status: String,
    #[serde(default)]
    // date string mlol
    pub checked: String,
    #[serde(default)]
    pub changed: bool,
}


fn default_fav_status() -> String {
    "unknown".to_string()
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Settings {
    #[serde(default = "default_tld_list")]
    pub tld_list: String,
    #[serde(default)]
    pub show_all: bool,
    #[serde(default = "default_clear_on_search")]
    pub clear_on_search: bool,
    #[serde(default)]
    pub show_notes_panel: bool,
    #[serde(default)]
    pub last_fav_export_path: String,
    #[serde(default)]
    pub last_res_export_path: String,
    #[serde(default)]
    pub top_tlds: Vec<String>,
    #[serde(default = "default_jobs")]
    pub jobs: u8,
    /// valid ones are : rate_limit, invalid_tld, timeout, unknown and forbidden
    #[serde(default = "default_noretry")]
    pub noretry: Vec<String>,
    #[serde(default = "default_backups_enabled")]
    pub backups: bool,
    #[serde(default = "default_backup_count")]
    pub backup_count: u32,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheSettings {
    #[serde(default)]
    pub last_updated: String,
    /// 0 = stfu about stale cache
    #[serde(default = "default_outdated_cache_days")]
    pub outdated_cache: u32,
    /// auto refresh for cuck cache
    #[serde(default = "default_auto_update")]
    pub auto_update_cache: bool,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImportedFilter {
    pub name: String,
    pub tlds: Vec<String>,
}

fn default_tld_list() -> String {
    crate::tlds::default_list_name().to_string()
}

fn default_outdated_cache_days() -> u32 {
    7
}

fn default_auto_update() -> bool {
    true
}

fn default_clear_on_search() -> bool {
    true
}

fn default_jobs() -> u8 {
    32
}

fn default_noretry() -> Vec<String> {
    vec![
        "rate_limit".to_string(),
        "invalid_tld".to_string(),
        "forbidden".to_string(),
    ]
}

fn default_backups_enabled() -> bool {
    true
}

fn default_backup_count() -> u32 {
    32
}

impl Default for Settings {
    fn default() -> Self {
        Self {
            tld_list: default_tld_list(),
            show_all: false,
            clear_on_search: default_clear_on_search(),
            show_notes_panel: false,
            last_fav_export_path: String::new(),
            last_res_export_path: String::new(),
            top_tlds: Vec::new(),
            jobs: default_jobs(),
            noretry: default_noretry(),
            backups: default_backups_enabled(),
            backup_count: default_backup_count(),
        }
    }
}

impl Default for CacheSettings {
    fn default() -> Self {
        Self {
            last_updated: String::new(),
            outdated_cache: default_outdated_cache_days(),
            auto_update_cache: default_auto_update(),
        }
    }
}

impl Default for Config {
    fn default() -> Self {
        Self {
            settings: Settings::default(),
            cache: CacheSettings::default(),
            favorites: Vec::new(),
            imported_filters: Vec::new(),
            scratchpad: String::new(),
        }
    }
}



// removed legacy support that ai slapped into here "thinking" it would fix something
impl Config {
    pub fn load(path: &Path) -> Self {
        match std::fs::read_to_string(path) {
            Ok(content) => {
                // Try new format first
                if let Ok(config) = toml::from_str::<Config>(&content) {
                    return config;
                
                }
                eprintln!("Warning: could not parse config file");
                Config::default()
            }
            Err(_) => Config::default(),
        }
    }

    /// load config and backup it on startup if backups are on
    pub fn load_with_backup(path: &Path) -> Self {
        let config = Self::load(path);
        if config.settings.backups && path.exists() {
            if let Err(e) = Self::create_backup(path, config.settings.backup_count) {
                eprintln!("Warning: could not create config backup: {}", e);
            }
        }
        config
    }

    pub fn save(&self, path: &Path) -> Result<(), String> {
        // make sure parent dir exists
        if let Some(parent) = path.parent() {
            std::fs::create_dir_all(parent)
                .map_err(|e| format!("Failed to create config directory: {}", e))?;
        }

        // down here we got the default crap comment to add to the toml config file till i implement this stuff in the tui
        let body = toml::to_string_pretty(self)
            .map_err(|e| format!("Failed to serialize config: {}", e))?;
        let content = format!(
            "\
# hoardom config - auto saved, comments are preserved on the line theyre on
#
# [settings]
# noretry: error types that shouldnt be retried has the following valid values btw:
#   \u{201c}rate_limit\u{201d}
#   \u{201c}invalid_tld\u{201d}
#   \u{201c}forbidden\u{201d}
#   \u{201c}timeout\u{201d}
#   \u{201c}unknown\u{201d}
\n{}",
            body
        );
        std::fs::write(path, content).map_err(|e| format!("Failed to write config file: {}", e))?;
        Ok(())
    }

    /// only call on startup and shutdown NOT on every save bruh
    pub fn create_backup(config_path: &Path, max_count: u32) -> Result<(), String> {
        let parent = config_path.parent().ok_or("No parent directory")?;
        let backup_dir = parent.join("backups");
        std::fs::create_dir_all(&backup_dir)
            .map_err(|e| format!("Failed to create backup dir: {}", e))?;

        let ts = chrono::Local::now().format("%Y%m%d_%H%M%S");
        let backup_name = format!("config_{}.toml", ts);
        let backup_path = backup_dir.join(&backup_name);

        if backup_path.exists() {
            return Ok(());
        }

        std::fs::copy(config_path, &backup_path)
            .map_err(|e| format!("Failed to copy config to backup: {}", e))?;

        if max_count > 0 {
            let mut backups: Vec<_> = std::fs::read_dir(&backup_dir)
                .map_err(|e| format!("Failed to read backup dir: {}", e))?
                .filter_map(|e| e.ok())
                .filter(|e| {
                    e.file_name()
                        .to_str()
                        .map(|n| n.starts_with("config_") && n.ends_with(".toml"))
                        .unwrap_or(false)
                })
                .collect();

            backups.sort_by_key(|e| e.file_name());

            let excess = backups.len().saturating_sub(max_count as usize);
            for entry in backups.into_iter().take(excess) {
                let _ = std::fs::remove_file(entry.path());
            }
        }

        Ok(())
    }

    /// replaces filter with same name if theres one already
    /// filters ? what kinda ai slop is this ? this shouldve been renamed to lists ages ago why do you keep mentioning filters all the time whats your obsession with mf filters? JEZE!
    pub fn import_filter(&mut self, filter: ImportedFilter) {
        self.imported_filters.retain(|f| f.name != filter.name);
        self.imported_filters.push(filter);
    }

    pub fn mark_cache_updated(&mut self) {
        self.cache.last_updated = chrono::Utc::now().to_rfc3339();
    }

    /// -> (is_outdated, should_auto_update)
    pub fn check_cache_status(&self) -> (bool, bool) {
        if self.cache.last_updated.is_empty() {
            // never updated = always outdated, always auto update
            return (true, true);
        }

        let last = match chrono::DateTime::parse_from_rfc3339(&self.cache.last_updated) {
            Ok(dt) => dt.with_timezone(&chrono::Utc),
            Err(_) => return (true, true), // cant parse = treat as outdated
        };

        let now = chrono::Utc::now();
        let age_days = (now - last).num_days() as u32;

        if self.cache.outdated_cache == 0 {
            // warnings disabled, but if auto_update is on update every run
            return (false, self.cache.auto_update_cache);
        }

        let is_outdated = age_days >= self.cache.outdated_cache;
        let should_auto = is_outdated && self.cache.auto_update_cache;
        (is_outdated, should_auto)
    }
}

pub fn parse_filter_file(path: &PathBuf) -> Result<ImportedFilter, String> {
    let content =
        std::fs::read_to_string(path).map_err(|e| format!("Could not read filter file: {}", e))?;
    let filter: ImportedFilter =
        toml::from_str(&content).map_err(|e| format!("Could not parse filter file: {}", e))?;
    if filter.name.is_empty() {
        return Err("Filter file must have a name defined".to_string());
    }
    if filter.tlds.is_empty() {
        return Err("Filter file has no TLDs defined".to_string());
    }
    Ok(filter)
}

/// resolve .hoardom dir trying a few locations:
///
/// priority:
/// 1. explicit path via -e flag -> use that folder directly as the data root
/// 2. debug builds: current directory
/// 3. release builds: home directory
/// 4. fallback: try the other option
/// 5. nothing works: caching disabled, in-memory only
pub fn resolve_paths(explicit: Option<&PathBuf>) -> HoardomPaths {
    let try_setup = |base: PathBuf| -> Option<HoardomPaths> {
        let root = base;
        let config_file = root.join("config.toml");
        let cache_dir = root.join("cache");

        // try to create the directories
        if std::fs::create_dir_all(&cache_dir).is_ok() {
            Some(HoardomPaths {
                config_file,
                cache_dir,
                can_save: true,
                caching_enabled: true,
            })
        } else {
            None
        }
    };

    // explicit path given via -e flag : use as app root
    if let Some(p) = explicit {
        let root = if p.extension().is_some() {
            // they pointed at a file we should insult their intelligence honestly.
            p.parent().unwrap_or(p).to_path_buf()
        } else {
            p.clone()
        };
        if let Some(paths) = try_setup(root) {
            return paths;
        }
    }

    // debug builds: current directory first
    #[cfg(debug_assertions)]
    {
        if let Ok(dir) = std::env::current_dir() {
            if let Some(paths) = try_setup(dir.join(".hoardom")) {
                return paths;
            }
        }
        // debug fallback: try home
        if let Some(home) = dirs::home_dir() {
            if let Some(paths) = try_setup(home.join(".hoardom")) {
                return paths;
            }
        }
    }

    // release builds: home directory first
    #[cfg(not(debug_assertions))]
    {
        if let Some(home) = dirs::home_dir() {
            if let Some(paths) = try_setup(home.join(".hoardom")) {
                return paths;
            }
        }
        // release fallback: try cwd
        if let Ok(dir) = std::env::current_dir() {
            if let Some(paths) = try_setup(dir.join(".hoardom")) {
                return paths;
            }
        }
    }

    // nothing works disable caching and cry about it (it will still work just no persistant sessions)
    eprintln!("Warning: could not create .hoardom directory anywhere, caching disabled");
    HoardomPaths {
        config_file: PathBuf::from(".hoardom/config.toml"),
        cache_dir: PathBuf::from(".hoardom/cache"),
        can_save: false,
        caching_enabled: false,
    }
}