aboutsummaryrefslogtreecommitdiff
path: root/src/main.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.rs')
-rw-r--r--src/main.rs37
1 files changed, 17 insertions, 20 deletions
diff --git a/src/main.rs b/src/main.rs
index 9625959..c6d88e1 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -38,13 +38,13 @@ async fn main() {
let mut config = Config::load_with_backup(&paths.config_file);
if !paths.can_save {
- eprintln!("Warning: favorites and settings wont be saved (no writable location found)");
+ eprintln!("Warning: Unpriviliged bitch detected! Cant write to {}", paths.config_file.display());
}
// handle -r refresh cache flag
if args.refresh_cache {
if !paths.caching_enabled {
- eprintln!("Caching is disabled (no writable location). Nothing to refresh.");
+ eprintln!("Caching aint gon work without a writable location");
return;
}
let cache_file = paths.cache_file("rdap_bootstrap.json");
@@ -72,15 +72,14 @@ async fn main() {
if is_outdated && !should_auto {
eprintln!("Warning: RDAP cache is outdated. Run `hoardom -r` to refresh.");
}
- // force refresh if auto update says so, or if cache file doesnt exist yet
should_auto || !cf.exists()
} else {
false
};
- // import custom filter if given
- if let Some(filter_path) = &args.import_filter {
- match parse_filter_file(filter_path) {
+ // import custom sexy list if given
+ if let Some(list_path) = &args.import_list {
+ match parse_filter_file(list_path) {
Ok(filter) => {
config.import_filter(filter);
if paths.can_save {
@@ -88,16 +87,15 @@ async fn main() {
}
}
Err(e) => {
- eprintln!("Error importing filter: {}", e);
+ eprintln!("Error importing list: {}", e);
return;
}
}
}
- // whois server overrides are baked into Lists.toml ("tld:server" syntax)
+ // whois server overrides that are baked into Lists.toml (like this "tld:server" for naighty tlds)
let overrides = whois_overrides();
- // parse noretry config into ErrorKind list
let noretry: Vec<ErrorKind> = config
.settings
.noretry
@@ -105,7 +103,7 @@ async fn main() {
.filter_map(|s| ErrorKind::from_config_str(s))
.collect();
- // TUI mode
+ // the sigma mode
if args.is_tui() {
if let Err(e) = tui::run_tui(
&args,
@@ -120,7 +118,7 @@ async fn main() {
{
eprintln!("TUI error: {}", e);
}
- // save cache timestamp after TUI session if we refreshed
+ // save cache timestamp to know how fresh we are
if force_refresh && paths.can_save {
config.mark_cache_updated();
let _ = config.save(&paths.config_file);
@@ -128,7 +126,7 @@ async fn main() {
return;
}
- // CLI needs at least one domain unless autosearch was given
+ // CLI needs at least one domain unless autosearch was given user is stupid show small help
if args.domains.is_empty() {
if let Some(file_path) = &args.autosearch {
run_autosearch(
@@ -197,7 +195,7 @@ async fn main() {
});
}
- // Suggestions only kick in when directly searching a single full domain
+ // suggestions if your domain is taken (only for single loneyly alpha wolf domain searches)
if args.domains.len() == 1 && args.effective_suggestions() > 0 {
if let Some(exact_tld) = specific_tld.as_deref() {
let exact_registered = aggregated_results.iter().any(|item| {
@@ -252,34 +250,33 @@ async fn main() {
let results = sort_aggregated_results(aggregated_results);
- // save cache timestamp if we refreshed
+ // save cache timestamp if we showered
if force_refresh && paths.can_save {
config.mark_cache_updated();
let _ = config.save(&paths.config_file);
}
- // print errors first
+ // print errors bruh
output::print_errors(&results, args.verbose);
- // CSV output
+ // cuntsexv output (csv)
if let Some(csv_opt) = &args.csv {
match csv_opt {
Some(path) => {
- // write to file
+ // you wont believe this but here we are GOING to WRITE TO A FILE WITH THE CSV OUTPUT!!!! MIND BLOWN AND COCK EXPLODED
match output::write_csv_file(&results, path) {
Ok(()) => eprintln!("CSV written to {}", path.display()),
Err(e) => eprintln!("Error writing CSV: {}", e),
}
}
None => {
- // print to stdout, no logs
+ // brint to terminal if user dumb and no filepath
output::print_csv(&results);
}
}
return;
}
- // table output
if args.show_all {
output::print_full_table(&results, args.no_color, args.no_unicode);
} else {
@@ -305,7 +302,7 @@ async fn run_autosearch(
let base_tlds = build_base_tlds(args);
- // collect all search entries, grouping by name so "zapplex.de" + "zapplex.nl" become one batch
+ // collect all search entries and grupe them
let mut batches: Vec<(String, Vec<String>)> = Vec::new();
for line in content.lines() {