aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorUMTS at Teleco <crt@teleco.ch>2026-03-08 07:30:34 +0100
committerUMTS at Teleco <crt@teleco.ch>2026-03-08 07:30:34 +0100
commit8623ef0ee74ff48a5ee24ee032f5b549f662f09d (patch)
tree7f11543d05cfe0e7bd5aaca31ff1d4c86a271fd0
goofy ah
-rw-r--r--.gitignore13
-rw-r--r--Cargo.toml39
-rw-r--r--Lists.toml293
-rw-r--r--README.md133
-rw-r--r--build.rs64
-rw-r--r--doc/CLI.md87
-rw-r--r--doc/TUI.md138
-rw-r--r--doc/example-list.toml21
-rw-r--r--doc/pics/export.pngbin0 -> 118324 bytes
-rw-r--r--doc/pics/image.pngbin0 -> 985392 bytes
-rw-r--r--doc/pics/speed.pngbin0 -> 963193 bytes
-rwxr-xr-xscripts/fetch-tlds.sh816
-rw-r--r--src/cli.rs203
-rw-r--r--src/config.rs436
-rw-r--r--src/lookup.rs860
-rw-r--r--src/main.rs418
-rw-r--r--src/output.rs209
-rw-r--r--src/tlds.rs179
-rw-r--r--src/tui.rs2870
-rw-r--r--src/types.rs93
20 files changed, 6872 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..e0cee18
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,13 @@
+.DS_Store
+*/.DS_Store
+*/**/.DS_Store
+*.log
+*.tmp
+*.bak
+
+target/
+build/
+
+Cargo.lock
+.hoardom
+me.md \ No newline at end of file
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..91f6d15
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,39 @@
+[package]
+name = "hoardom"
+version = "1.0.4"
+edition = "2021"
+description = "Domain hoarding made less painful"
+
+[features]
+default = ["builtin-whois"]
+
+# Use the systems whois command instead of our builtin TCP implementation
+# Disable with: cargo build --no-default-features
+# Cannot be used together with builtin-whois
+system-whois = []
+
+# Use our builtin raw TCP whois implementation (no system dependency)
+# Enable with: cargo build --no-default-features --features builtin-whois
+# Cannot be used together with system-whois
+builtin-whois = []
+
+# Configurable values baked into the binary at compile time via build.rs
+[package.metadata.hoardom]
+whois-command = "whois"
+whois-flags = ""
+rdap-bootstrap-url = "https://data.iana.org/rdap/dns.json"
+
+[dependencies]
+clap = { version = "4", features = ["derive"] }
+tokio = { version = "1", features = ["full"] }
+reqwest = { version = "0.12", features = ["json"] }
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+toml = "0.8"
+dirs = "6"
+colored = "3"
+ratatui = "0.29"
+crossterm = "0.28"
+indicatif = "0.17"
+chrono = "0.4"
+futures = "0.3"
diff --git a/Lists.toml b/Lists.toml
new file mode 100644
index 0000000..3fe44cc
--- /dev/null
+++ b/Lists.toml
@@ -0,0 +1,293 @@
+# Lists.toml — Built-in TLD lists for hoardom
+# Auto-generated on 2026-03-08 from Porkbun + OVH + INWX + RDAP bootstrap + WHOIS server list
+#
+# Format:
+# "tld" — TLD has RDAP support, lookup works directly
+# "tld:whois.server" — No RDAP: use this WHOIS server for fallback
+#
+# 637 purchasable TLDs (handshake/sub-TLDs excluded)
+# 573 have RDAP, 58 need WHOIS override, 6 will auto-probe
+#
+# Lists:
+# standard — common desirable TLDs (com, net, org, io, dev, ...)
+# decent — very best short punchy TLDs for domain hacking
+# swiss — standard-like but with Swiss/Central European TLDs prioritized
+# country — all 2-letter country-code TLDs
+# two — all 2-letter TLDs
+# three — all TLDs with 3 or fewer letters
+# four — all TLDs with exactly 4 letters
+# long — all TLDs with 5+ letters
+# all — everything
+
+standard = [
+ "com", "net", "org", "io:whois.nic.io", "co:whois.registry.co", "dev", "app",
+ "me:whois.identitydigital.services", "info", "biz", "one", "xyz", "online",
+ "site", "tech", "pro", "tv", "cc", "to", "sh:whois.nic.sh",
+ "li:whois.nic.li", "fm", "am:whois.amnic.net", "gg:whois.gg",
+ "ws:whois.website.ws", "la:whois.nic.la", "ms", "nu:whois.iis.nu", "cx",
+ "mn:whois.nic.mn", "st", "tel", "ai", "id", "in", "it:whois.nic.it", "is",
+ "at:whois.nic.at", "be:whois.dns.be", "de:whois.denic.de", "eu:whois.eu",
+ "fr", "nl", "se:whois.iis.se", "uk", "us:whois.nic.us", "ca", "au",
+ "nz:whois.irs.net.nz", "club", "blog", "art", "fun", "lol", "wtf", "page",
+ "link", "space", "store", "shop",
+]
+
+decent = [
+ "com", "net", "org", "io:whois.nic.io", "dev", "app", "co:whois.registry.co",
+ "me:whois.identitydigital.services", "ai", "sh:whois.nic.sh", "to", "fm",
+ "tv", "gg:whois.gg", "cc", "li:whois.nic.li", "am:whois.amnic.net",
+ "la:whois.nic.la", "nu:whois.iis.nu", "id", "in", "it:whois.nic.it", "is",
+ "at:whois.nic.at", "ws:whois.website.ws", "one", "pro", "bio", "art", "ink",
+ "run", "win", "new", "lol", "pub", "fun", "vet", "fit", "rip", "wtf", "zip",
+]
+
+swiss = [
+ "com", "net", "org", "ch:whois.nic.ch", "li:whois.nic.li", "swiss",
+ "zuerich", "io:whois.nic.io", "co:whois.registry.co", "dev", "app",
+ "me:whois.identitydigital.services", "info", "one", "pro",
+ "de:whois.denic.de", "at:whois.nic.at", "fr", "it:whois.nic.it",
+ "eu:whois.eu", "tech", "online", "site", "shop", "store", "biz", "xyz", "tv",
+ "cc", "to", "sh:whois.nic.sh", "fm", "am:whois.amnic.net", "gg:whois.gg",
+]
+
+country = [
+ "ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
+ "am:whois.amnic.net", "as", "at:whois.nic.at", "au", "be:whois.dns.be",
+ "bh:whois.nic.bh", "bo:whois.nic.bo", "bz:whois.identitydigital.services",
+ "ca", "cc", "ch:whois.nic.ch", "cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn",
+ "co:whois.registry.co", "cr", "cu", "cv", "cx", "cz", "de:whois.denic.de",
+ "dk:whois.punktum.dk", "do:whois.nic.do", "ec", "es", "eu:whois.eu", "fi",
+ "fm", "fo", "fr", "gd", "gg:whois.gg", "gs", "gt", "gy", "hk:whois.hkirc.hk",
+ "hn", "hr:whois.dns.hr", "ht", "id", "ie:whois.weare.ie", "im:whois.nic.im",
+ "in", "io:whois.nic.io", "it:whois.nic.it", "je:whois.je", "ki:whois.nic.ki",
+ "la:whois.nic.la", "lc:whois.identitydigital.services", "li:whois.nic.li",
+ "lt:whois.domreg.lt", "lu:whois.dns.lu", "lv:whois.nic.lv",
+ "me:whois.identitydigital.services", "mg", "mk:whois.marnet.mk",
+ "mn:whois.nic.mn", "ms", "mu", "mx:whois.mx", "my:whois.mynic.my", "nf",
+ "ni", "nl", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "pe:kero.yachay.pe",
+ "ph", "pl", "pm", "pr:whois.identitydigital.services", "pt:whois.dns.pt",
+ "pw", "re", "ro:whois.rotld.ro", "sb:whois.nic.net.sb", "sc:whois.nic.sc",
+ "se:whois.iis.se", "sh:whois.nic.sh", "si", "sn:whois.nic.sn",
+ "so:whois.nic.so", "sx:whois.sx", "tf", "tl:whois.nic.tl", "tm:whois.nic.tm",
+ "tn:whois.ati.tn", "to", "tv", "tw", "uk", "us:whois.nic.us", "uy",
+ "vc:whois.identitydigital.services", "vg", "wf", "ws:whois.website.ws", "yt",
+]
+
+two = [
+ "ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
+ "am:whois.amnic.net", "as", "at:whois.nic.at", "au", "be:whois.dns.be",
+ "bh:whois.nic.bh", "bo:whois.nic.bo", "bz:whois.identitydigital.services",
+ "ca", "cc", "ch:whois.nic.ch", "cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn",
+ "co:whois.registry.co", "cr", "cu", "cv", "cx", "cz", "de:whois.denic.de",
+ "dk:whois.punktum.dk", "do:whois.nic.do", "ec", "es", "eu:whois.eu", "fi",
+ "fm", "fo", "fr", "gd", "gg:whois.gg", "gs", "gt", "gy", "hk:whois.hkirc.hk",
+ "hn", "hr:whois.dns.hr", "ht", "id", "ie:whois.weare.ie", "im:whois.nic.im",
+ "in", "io:whois.nic.io", "it:whois.nic.it", "je:whois.je", "ki:whois.nic.ki",
+ "la:whois.nic.la", "lc:whois.identitydigital.services", "li:whois.nic.li",
+ "lt:whois.domreg.lt", "lu:whois.dns.lu", "lv:whois.nic.lv",
+ "me:whois.identitydigital.services", "mg", "mk:whois.marnet.mk",
+ "mn:whois.nic.mn", "ms", "mu", "mx:whois.mx", "my:whois.mynic.my", "nf",
+ "ni", "nl", "nu:whois.iis.nu", "nz:whois.irs.net.nz", "pe:kero.yachay.pe",
+ "ph", "pl", "pm", "pr:whois.identitydigital.services", "pt:whois.dns.pt",
+ "pw", "re", "ro:whois.rotld.ro", "sb:whois.nic.net.sb", "sc:whois.nic.sc",
+ "se:whois.iis.se", "sh:whois.nic.sh", "si", "sn:whois.nic.sn",
+ "so:whois.nic.so", "sx:whois.sx", "tf", "tl:whois.nic.tl", "tm:whois.nic.tm",
+ "tn:whois.ati.tn", "to", "tv", "tw", "uk", "us:whois.nic.us", "uy",
+ "vc:whois.identitydigital.services", "vg", "wf", "ws:whois.website.ws", "yt",
+]
+
+three = [
+ "ac:whois.nic.ac", "af:whois.nic.af", "ag:whois.nic.ag", "ai",
+ "am:whois.amnic.net", "app", "art", "as", "at:whois.nic.at", "au", "bar",
+ "be:whois.dns.be", "bet", "bh:whois.nic.bh", "bid", "bio", "biz",
+ "bo:whois.nic.bo", "boo", "bot", "bz:whois.identitydigital.services", "bzh",
+ "ca", "cab", "cam", "car", "cat", "cc", "ceo", "cfd", "ch:whois.nic.ch",
+ "cl:whois.nic.cl", "cm", "cn:whois.cnnic.cn", "co:whois.registry.co", "com",
+ "cr", "cu", "cv", "cx", "cz", "dad", "day", "de:whois.denic.de", "dev",
+ "diy", "dk:whois.punktum.dk", "do:whois.nic.do", "dog", "ec", "eco", "es",
+ "esq", "eu:whois.eu", "eus", "fan", "fi", "fit", "fly", "fm", "fo", "foo",
+ "fr", "fun", "fyi", "gal", "gay", "gd", "gdn", "gg:whois.gg", "gs", "gt",
+ "gy", "hiv", "hk:whois.hkirc.hk", "hn", "hot", "how", "hr:whois.dns.hr",
+ "ht", "icu", "id", "ie:whois.weare.ie", "im:whois.nic.im", "in", "inc",
+ "ing", "ink", "io:whois.nic.io", "ist", "it:whois.nic.it", "je:whois.je",
+ "ki:whois.nic.ki", "kim", "la:whois.nic.la", "lat", "law",
+ "lc:whois.identitydigital.services", "li:whois.nic.li", "llc", "lol",
+ "lt:whois.domreg.lt", "ltd", "lu:whois.dns.lu", "lv:whois.nic.lv", "mba",
+ "me:whois.identitydigital.services", "med", "men", "mg",
+ "mk:whois.marnet.mk", "mn:whois.nic.mn", "moe", "moi", "mom", "mov", "ms",
+ "mu", "mx:whois.mx", "my:whois.mynic.my", "net", "new", "nf", "ngo", "ni",
+ "nl", "now", "nrw", "nu:whois.iis.nu", "nyc", "nz:whois.irs.net.nz", "one",
+ "ong", "onl", "ooo", "org", "ovh", "pe:kero.yachay.pe", "pet", "ph", "phd",
+ "pl", "pm", "pr:whois.identitydigital.services", "pro", "pt:whois.dns.pt",
+ "pub", "pw", "re", "red", "rip", "ro:whois.rotld.ro", "run",
+ "sb:whois.nic.net.sb", "sbs", "sc:whois.nic.sc", "se:whois.iis.se", "sex",
+ "sh:whois.nic.sh", "si", "ski", "sn:whois.nic.sn", "so:whois.nic.so", "soy",
+ "spa", "srl", "sx:whois.sx", "tax", "tel", "tf", "tl:whois.nic.tl",
+ "tm:whois.nic.tm", "tn:whois.ati.tn", "to", "top", "tv", "tw", "uk", "uno",
+ "us:whois.nic.us", "uy", "vc:whois.identitydigital.services", "vet", "vg",
+ "vin", "vip", "wf", "win", "ws:whois.website.ws", "wtf", "xin", "xxx", "xyz",
+ "you", "yt", "zip",
+]
+
+four = [
+ "army", "asia", "auto", "baby", "band", "beer", "best", "bike", "blog",
+ "blue", "bond", "buzz", "cafe", "camp", "care", "cars", "casa", "cash",
+ "chat", "city", "club", "cool", "cyou", "date", "deal", "desi", "diet",
+ "fail", "fans", "farm", "fast", "film", "fish", "food", "free", "fund",
+ "game", "gift", "gmbh", "gold", "golf", "guru", "hair", "haus", "help",
+ "host", "immo", "info", "jobs", "kids", "kiwi", "land", "lgbt", "life",
+ "limo", "link", "live", "loan", "love", "ltda", "luxe", "meme", "menu",
+ "mobi", "moda", "name", "navy", "news", "page", "pics", "pink", "plus",
+ "porn", "prof", "qpon", "rent", "rest", "rich", "rsvp", "ruhr", "sale",
+ "sarl", "scot", "sexy", "shop", "show", "site", "skin", "spot", "surf",
+ "talk", "taxi", "team", "tech", "tips", "town", "toys", "tube", "vana",
+ "vote", "voto", "wang", "wiki", "wine", "work", "yoga", "zone",
+]
+
+long = [
+ "abogado", "academy", "accountant", "accountants", "actor", "adult",
+ "agency", "airforce", "alsace", "amsterdam", "apartments", "archi",
+ "associates", "attorney", "auction", "audio", "autos", "barcelona",
+ "bargains", "basketball", "bayern", "beauty", "berlin", "bible", "bingo",
+ "black", "blackfriday", "boats", "boston", "boutique", "broker", "brussels",
+ "build", "builders", "business", "camera", "capital", "cards", "career",
+ "careers", "casino", "catering", "center", "channel", "charity", "cheap",
+ "christmas", "church", "claims", "cleaning", "click", "clinic", "clothing",
+ "cloud", "coach", "codes", "coffee", "college", "cologne", "community",
+ "company", "compare", "computer", "condos", "construction", "consulting",
+ "contact", "contractors", "cooking", "corsica", "country", "coupons",
+ "courses", "credit", "creditcard", "cricket", "cruises", "cymru", "dance",
+ "dating", "dealer", "deals", "degree", "delivery", "democrat", "dental",
+ "dentist", "design", "diamonds", "digital", "direct", "directory",
+ "discount", "doctor", "domains", "download", "earth", "education", "email",
+ "energy", "engineer", "engineering", "enterprises", "equipment", "estate",
+ "events", "exchange", "expert", "exposed", "express", "faith", "family",
+ "fashion", "feedback", "finance", "financial", "fishing", "fitness",
+ "flights", "florist", "flowers", "football", "forex", "forsale", "forum",
+ "foundation", "furniture", "futbol", "gallery", "games", "garden", "gifts",
+ "gives", "giving", "glass", "global", "graphics", "gratis", "green", "gripe",
+ "group", "guide", "guitars", "hamburg", "health", "healthcare", "hiphop",
+ "hockey", "holdings", "holiday", "homes", "horse", "hospital", "hosting",
+ "house", "immobilien", "industries", "institute", "insure", "international",
+ "investments", "irish", "istanbul", "jetzt", "jewelry", "juegos", "kaufen",
+ "kitchen", "koeln", "kyoto", "lawyer", "lease", "legal", "lifestyle",
+ "lighting", "limited", "living", "loans", "locker", "london", "lotto",
+ "luxury", "madrid", "maison", "makeup", "management", "market", "marketing",
+ "markets", "media", "melbourne", "memorial", "miami", "mobile", "money",
+ "monster", "mortgage", "motorcycles", "movie", "museum", "music", "nagoya",
+ "network", "nexus", "ninja", "observer", "okinawa", "online", "organic",
+ "osaka", "paris", "partners", "parts", "party", "photo", "photography",
+ "photos", "pictures", "pizza", "place", "plumbing", "poker", "press",
+ "productions", "promo", "properties", "property", "protection", "quebec",
+ "quest", "racing", "radio", "realty", "recipes", "rehab", "reise", "reisen",
+ "rentals", "repair", "report", "republican", "restaurant", "review",
+ "reviews", "rocks", "rodeo", "rugby", "ryukyu", "saarland", "salon",
+ "school", "schule", "science", "security", "select", "services", "shiksha",
+ "shoes", "shopping", "singles", "soccer", "social", "software", "solar",
+ "solutions", "space", "storage", "store", "stream", "studio", "study",
+ "style", "sucks", "supplies", "supply", "support", "surgery", "swiss",
+ "sydney", "systems", "taipei", "tattoo", "technology", "tennis", "theater",
+ "theatre", "tickets", "tienda", "tires", "tirol", "today", "tokyo", "tools",
+ "tours", "trade", "trading", "training", "travel", "university", "vacations",
+ "vegas", "ventures", "viajes", "video", "villas", "vision", "vlaanderen",
+ "vodka", "voyage", "wales", "watch", "watches", "webcam", "website",
+ "wedding", "works", "world", "xn--45q11c", "xn--5tzm5g", "xn--6frz82g",
+ "xn--c1avg", "xn--czrs0t", "xn--e1a4c:whois.eu", "xn--fjq720a",
+ "xn--hxt814e", "xn--i1b6b1a6a2e", "xn--ngbc5azd", "xn--nqv7f",
+ "xn--pgbs0dh:whois.ati.tn", "xn--q9jyb4c", "xn--unup4y", "xn--vhquv",
+ "yachts", "yokohama",
+]
+
+all = [
+ "abogado", "ac:whois.nic.ac", "academy", "accountant", "accountants",
+ "actor", "adult", "af:whois.nic.af", "ag:whois.nic.ag", "agency", "ai",
+ "airforce", "alsace", "am:whois.amnic.net", "amsterdam", "apartments", "app",
+ "archi", "army", "art", "as", "asia", "associates", "at:whois.nic.at",
+ "attorney", "au", "auction", "audio", "auto", "autos", "baby", "band", "bar",
+ "barcelona", "bargains", "basketball", "bayern", "be:whois.dns.be", "beauty",
+ "beer", "berlin", "best", "bet", "bh:whois.nic.bh", "bible", "bid", "bike",
+ "bingo", "bio", "biz", "black", "blackfriday", "blog", "blue",
+ "bo:whois.nic.bo", "boats", "bond", "boo", "boston", "bot", "boutique",
+ "broker", "brussels", "build", "builders", "business", "buzz",
+ "bz:whois.identitydigital.services", "bzh", "ca", "cab", "cafe", "cam",
+ "camera", "camp", "capital", "car", "cards", "care", "career", "careers",
+ "cars", "casa", "cash", "casino", "cat", "catering", "cc", "center", "ceo",
+ "cfd", "ch:whois.nic.ch", "channel", "charity", "chat", "cheap", "christmas",
+ "church", "city", "cl:whois.nic.cl", "claims", "cleaning", "click", "clinic",
+ "clothing", "cloud", "club", "cm", "cn:whois.cnnic.cn",
+ "co:whois.registry.co", "coach", "codes", "coffee", "college", "cologne",
+ "com", "community", "company", "compare", "computer", "condos",
+ "construction", "consulting", "contact", "contractors", "cooking", "cool",
+ "corsica", "country", "coupons", "courses", "cr", "credit", "creditcard",
+ "cricket", "cruises", "cu", "cv", "cx", "cymru", "cyou", "cz", "dad",
+ "dance", "date", "dating", "day", "de:whois.denic.de", "deal", "dealer",
+ "deals", "degree", "delivery", "democrat", "dental", "dentist", "desi",
+ "design", "dev", "diamonds", "diet", "digital", "direct", "directory",
+ "discount", "diy", "dk:whois.punktum.dk", "do:whois.nic.do", "doctor", "dog",
+ "domains", "download", "earth", "ec", "eco", "education", "email", "energy",
+ "engineer", "engineering", "enterprises", "equipment", "es", "esq", "estate",
+ "eu:whois.eu", "eus", "events", "exchange", "expert", "exposed", "express",
+ "fail", "faith", "family", "fan", "fans", "farm", "fashion", "fast",
+ "feedback", "fi", "film", "finance", "financial", "fish", "fishing", "fit",
+ "fitness", "flights", "florist", "flowers", "fly", "fm", "fo", "foo", "food",
+ "football", "forex", "forsale", "forum", "foundation", "fr", "free", "fun",
+ "fund", "furniture", "futbol", "fyi", "gal", "gallery", "game", "games",
+ "garden", "gay", "gd", "gdn", "gg:whois.gg", "gift", "gifts", "gives",
+ "giving", "glass", "global", "gmbh", "gold", "golf", "graphics", "gratis",
+ "green", "gripe", "group", "gs", "gt", "guide", "guitars", "guru", "gy",
+ "hair", "hamburg", "haus", "health", "healthcare", "help", "hiphop", "hiv",
+ "hk:whois.hkirc.hk", "hn", "hockey", "holdings", "holiday", "homes", "horse",
+ "hospital", "host", "hosting", "hot", "house", "how", "hr:whois.dns.hr",
+ "ht", "icu", "id", "ie:whois.weare.ie", "im:whois.nic.im", "immo",
+ "immobilien", "in", "inc", "industries", "info", "ing", "ink", "institute",
+ "insure", "international", "investments", "io:whois.nic.io", "irish", "ist",
+ "istanbul", "it:whois.nic.it", "je:whois.je", "jetzt", "jewelry", "jobs",
+ "juegos", "kaufen", "ki:whois.nic.ki", "kids", "kim", "kitchen", "kiwi",
+ "koeln", "kyoto", "la:whois.nic.la", "land", "lat", "law", "lawyer",
+ "lc:whois.identitydigital.services", "lease", "legal", "lgbt",
+ "li:whois.nic.li", "life", "lifestyle", "lighting", "limited", "limo",
+ "link", "live", "living", "llc", "loan", "loans", "locker", "lol", "london",
+ "lotto", "love", "lt:whois.domreg.lt", "ltd", "ltda", "lu:whois.dns.lu",
+ "luxe", "luxury", "lv:whois.nic.lv", "madrid", "maison", "makeup",
+ "management", "market", "marketing", "markets", "mba",
+ "me:whois.identitydigital.services", "med", "media", "melbourne", "meme",
+ "memorial", "men", "menu", "mg", "miami", "mk:whois.marnet.mk",
+ "mn:whois.nic.mn", "mobi", "mobile", "moda", "moe", "moi", "mom", "money",
+ "monster", "mortgage", "motorcycles", "mov", "movie", "ms", "mu", "museum",
+ "music", "mx:whois.mx", "my:whois.mynic.my", "nagoya", "name", "navy", "net",
+ "network", "new", "news", "nexus", "nf", "ngo", "ni", "ninja", "nl", "now",
+ "nrw", "nu:whois.iis.nu", "nyc", "nz:whois.irs.net.nz", "observer",
+ "okinawa", "one", "ong", "onl", "online", "ooo", "org", "organic", "osaka",
+ "ovh", "page", "paris", "partners", "parts", "party", "pe:kero.yachay.pe",
+ "pet", "ph", "phd", "photo", "photography", "photos", "pics", "pictures",
+ "pink", "pizza", "pl", "place", "plumbing", "plus", "pm", "poker", "porn",
+ "pr:whois.identitydigital.services", "press", "pro", "productions", "prof",
+ "promo", "properties", "property", "protection", "pt:whois.dns.pt", "pub",
+ "pw", "qpon", "quebec", "quest", "racing", "radio", "re", "realty",
+ "recipes", "red", "rehab", "reise", "reisen", "rent", "rentals", "repair",
+ "report", "republican", "rest", "restaurant", "review", "reviews", "rich",
+ "rip", "ro:whois.rotld.ro", "rocks", "rodeo", "rsvp", "rugby", "ruhr", "run",
+ "ryukyu", "saarland", "sale", "salon", "sarl", "sb:whois.nic.net.sb", "sbs",
+ "sc:whois.nic.sc", "school", "schule", "science", "scot", "se:whois.iis.se",
+ "security", "select", "services", "sex", "sexy", "sh:whois.nic.sh",
+ "shiksha", "shoes", "shop", "shopping", "show", "si", "singles", "site",
+ "ski", "skin", "sn:whois.nic.sn", "so:whois.nic.so", "soccer", "social",
+ "software", "solar", "solutions", "soy", "spa", "space", "spot", "srl",
+ "storage", "store", "stream", "studio", "study", "style", "sucks",
+ "supplies", "supply", "support", "surf", "surgery", "swiss", "sx:whois.sx",
+ "sydney", "systems", "taipei", "talk", "tattoo", "tax", "taxi", "team",
+ "tech", "technology", "tel", "tennis", "tf", "theater", "theatre", "tickets",
+ "tienda", "tips", "tires", "tirol", "tl:whois.nic.tl", "tm:whois.nic.tm",
+ "tn:whois.ati.tn", "to", "today", "tokyo", "tools", "top", "tours", "town",
+ "toys", "trade", "trading", "training", "travel", "tube", "tv", "tw", "uk",
+ "university", "uno", "us:whois.nic.us", "uy", "vacations", "vana",
+ "vc:whois.identitydigital.services", "vegas", "ventures", "vet", "vg",
+ "viajes", "video", "villas", "vin", "vip", "vision", "vlaanderen", "vodka",
+ "vote", "voto", "voyage", "wales", "wang", "watch", "watches", "webcam",
+ "website", "wedding", "wf", "wiki", "win", "wine", "work", "works", "world",
+ "ws:whois.website.ws", "wtf", "xin", "xn--45q11c", "xn--5tzm5g",
+ "xn--6frz82g", "xn--c1avg", "xn--czrs0t", "xn--e1a4c:whois.eu",
+ "xn--fjq720a", "xn--hxt814e", "xn--i1b6b1a6a2e", "xn--ngbc5azd", "xn--nqv7f",
+ "xn--pgbs0dh:whois.ati.tn", "xn--q9jyb4c", "xn--unup4y", "xn--vhquv", "xxx",
+ "xyz", "yachts", "yoga", "yokohama", "you", "yt", "zip", "zone",
+]
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..de3fdfa
--- /dev/null
+++ b/README.md
@@ -0,0 +1,133 @@
+# Multithreaded Domain Hoarding !?
+
+> "How to get my IP unbanned by Whois servers" <br>
+> -- You at 3am after using this tool for a few hours
+
+## What even is `hoardom`? (tldr)
+
+The work of art you see below is a tool with which you will be able to achieve the pinacle of hoarding and money wasting when commiting to once again falling down the Domain Hoarding Hole! <br>(
+for example when gambling online just isnt doing it for you anymore)
+
+Seriously for once : It is meant to atleast make the journey you'll have loosing your mind finding a banger domain more tolerable than siting on your computer until 5am scrolling through domain registrars crappy ass domain look up webpanel just to know whats not taken yet.
+
+![CUCKKOM](doc/pics/image.png)
+
+
+
+Dont believe me on this being the best? Fair, but what if one of my test subjects* described to you how it felt to use this tool :
+
+> THIS IS THE BEST domain hoarding tool ever!!! Ive gone completely mental and now have loads and loads of debt because I bought a bunch of worthless domains I will do nothing with!
+
+*(the subject was me and voices i got in my head making this)
+
+Dont wanna use its TUI? Fine it can also run as a normal CLI tool if you just wanna search some stuff real quick and move on with your sad chud life before going insane and launching it in TUI mode and start HOARDING you favorites, scribble your schizophrenia into its scratchpad and lose all your money to domains you wont ever use <3
+
+## what this thing does
+
+- searches suffixes or full domains across TLD lists
+- has a TUI mode with:
+ - results
+ - favorites
+ - settings
+ - scratchpad
+ - exports
+- has a CLI mode for quick searches, scripts or sigma terminal maxxers.
+- lets you import custom TLD lists from toml files because I wont bother keeping a list of "technically obtainable ones" up to date (Check and edit List.toml before compiling if the embedded ones I prepared dont satisfy you)
+- keeps a `.hoardom` config folder with your saved stuff
+ - favorites
+ - imported lists
+ - em notes
+ - settings
+ - cach and stuff idk
+
+## known bugs
+- missing basic ah tui features
+- half of the app untested its 6am i havent slept leave me alone
+- scrolling then selecting something sometimes takes a while until the button hitboxes catch up with scrolling (if using mouse or touchpad that is)
+- will display fallse positives for some domains that have a minimum length that this tool doesnt know about (usually 4/5 lette domains where ur part is only 2)
+
+## planed features
+that will probably not come because I have no time.
+- search within results
+
+## more screenshots
+
+### speedy as fuck ? heck yea !!!
+
+How long does it takes to search all domains purchasable through either : Porkbun, INWX or OVH (~637 Domains)? 15ish seconds on wifi at my home most time coming from retries at some uglier tlds
+![CRACKCOCAINE](doc/pics/speed.png)
+
+Oh and ofcourse you can save favorites and check their status here and there, if one changes it shows an ! next to it until you confirm it with enter.
+
+### export popup
+
+you can export stuff from tui wow!
+
+![EXTORT](doc/pics/export.png)
+
+### cli example
+
+put a CLI output screenshot here later
+
+<!-- add cli screenshot here -->
+
+## quick start
+
+if you just wanna run it from source real quick:
+
+```bash
+cargo run -- --tui
+```
+
+some normal CLI examples:
+
+```bash
+cargo run -- coolproject
+cargo run -- coolproject.io
+cargo run -- --list Country hoardom
+```
+
+import a custom list and jump into the TUI:
+
+```bash
+cargo run -- --import-filter ./doc/example-custom-list.toml --tui
+```
+
+refresh the RDAP cache manually:
+
+```bash
+cargo run -- --refresh-cache
+```
+
+## docs
+
+the proper usage docs live here cause i didnt want this readme to turn into a kilometer long wall of flags:
+
+- [CLI usage](doc/CLI.md)
+- [TUI usage](doc/TUI.md)
+
+so yeah if you want the actual button/key stuff or the CLI flag soup, go there.
+
+## custom lists
+
+there is an importable example file here:
+
+- [`doc/example-custom-list.toml`](doc/example-custom-list.toml)
+
+custom lists are just simple toml files with a name and a `tlds` array.
+
+example shape:
+
+```toml
+name = "somecoollist"
+tlds = ["com", "io", "dev", "sh"]
+```
+
+then import it with:
+
+```bash
+cargo run -- --import-filter ./path/to/list.toml --tui
+```
+
+## did i use ai for coding this ?
+for helping me fix bugs yea obv. other than that only some markdown structures as guidance were made by agentic llms and comments to the code were sanetized from very harsh swearing by an llm lol.
diff --git a/build.rs b/build.rs
new file mode 100644
index 0000000..fe84b39
--- /dev/null
+++ b/build.rs
@@ -0,0 +1,64 @@
+fn main() {
+ // cant have both whois features at the same time
+ if cfg!(feature = "system-whois") && cfg!(feature = "builtin-whois") {
+ panic!("Cannot enable both 'system-whois' and 'builtin-whois' features at the same time");
+ }
+
+ let manifest = std::fs::read_to_string("Cargo.toml").expect("Could not read Cargo.toml");
+
+ let mut whois_cmd = "whois".to_string();
+ let mut whois_flags = String::new();
+ let mut rdap_bootstrap_url = "https://data.iana.org/rdap/dns.json".to_string();
+
+ let mut in_metadata = false;
+ for line in manifest.lines() {
+ if line.trim() == "[package.metadata.hoardom]" {
+ in_metadata = true;
+ continue;
+ }
+ if line.starts_with('[') {
+ in_metadata = false;
+ continue;
+ }
+ if in_metadata {
+ if let Some(val) = line.strip_prefix("whois-command") {
+ if let Some(val) = val.trim().strip_prefix('=') {
+ whois_cmd = val.trim().trim_matches('"').to_string();
+ }
+ }
+ if let Some(val) = line.strip_prefix("whois-flags") {
+ if let Some(val) = val.trim().strip_prefix('=') {
+ whois_flags = val.trim().trim_matches('"').to_string();
+ }
+ }
+ if let Some(val) = line.strip_prefix("rdap-bootstrap-url") {
+ if let Some(val) = val.trim().strip_prefix('=') {
+ rdap_bootstrap_url = val.trim().trim_matches('"').to_string();
+ }
+ }
+ }
+ }
+
+ println!("cargo:rustc-env=HOARDOM_WHOIS_CMD={}", whois_cmd);
+ println!("cargo:rustc-env=HOARDOM_WHOIS_FLAGS={}", whois_flags);
+ println!("cargo:rustc-env=HOARDOM_RDAP_BOOTSTRAP_URL={}", rdap_bootstrap_url);
+
+ // Extract list names from Lists.toml (keys that have array values)
+ let lists_toml = std::fs::read_to_string("Lists.toml").expect("Could not read Lists.toml");
+ let mut list_names = Vec::new();
+ for line in lists_toml.lines() {
+ let trimmed = line.trim();
+ // Match lines like `standard = [` or `all = [`
+ if let Some(eq_pos) = trimmed.find(" = [") {
+ let name = trimmed[..eq_pos].trim();
+ if !name.is_empty() && !name.starts_with('#') {
+ list_names.push(name.to_string());
+ }
+ }
+ }
+ println!("cargo:rustc-env=HOARDOM_LIST_NAMES={}", list_names.join(","));
+
+ // rerun if Cargo.toml or Lists.toml changes
+ println!("cargo:rerun-if-changed=Cargo.toml");
+ println!("cargo:rerun-if-changed=Lists.toml");
+}
diff --git a/doc/CLI.md b/doc/CLI.md
new file mode 100644
index 0000000..63ca92f
--- /dev/null
+++ b/doc/CLI.md
@@ -0,0 +1,87 @@
+# CLI Usage
+
+hoardom can run as a normal CLI tool for quick domain lookups.
+
+written before i went nuts making the tui tool
+
+## Basic Usage
+
+```bash
+# search a name across all TLDs in the Standard list
+hoardom coolproject
+
+# search a specific full domain
+hoardom coolproject.io
+
+# search multiple domains at once
+hoardom coolproject mysite bigidea
+```
+
+## Modes
+
+| Flag | Description |
+|---------|----------------------------------------------------|
+| `--cli` | Default non-interactive mode (implied when no flag) |
+| `--tui` | Launch the Terminal UI |
+
+## Basics
+
+| Flag | Description |
+|-----------------------------|-------------------------------------------------------------------|
+| `-e, --environement=PATH` | Where to store the `.hoardom` config folder |
+| `-a, --all` | Show all results including unavailable domains |
+| `-h, --help` | Basic help |
+| `-H, --fullhelp` | Full help with all flags |
+
+## Advanced
+
+| Flag | Description |
+|-----------------------------|-------------------------------------------------------------------|
+| `-c, --csv[=PATH]` | Output as CSV. If PATH is given writes to file, otherwise stdout |
+| `-l, --list=LIST` | TLD list to use: `Standard`, `Decent`, `Country`, `All` |
+| `-i, --import-filter=PATH` | Import a custom TOML TLD list for this session |
+| `-t, --top=TLD,TLD` | Pin certain TLDs to the top of results |
+| `-o, --onlytop=TLD,TLD` | Only search these specific TLDs |
+| `-s, --suggestions=NUMBER` | How many alternative suggestions to show (default: 0 / disabled) |
+
+## Various
+
+| Flag | Description |
+|-----------------------------|-------------------------------------------------------------------|
+| `-j, --jobs=NUMBER` | Number of concurrent lookup requests (default: 1). Controls how many TLDs are looked up at the same time. Higher values speed up searches but may trigger rate limiting from RDAP/WHOIS servers. Max 99. |
+| `-D, --delay=SECONDS` | Delay in seconds between lookup requests |
+| `-R, --retry=NUMBER` | Retry count on lookup errors (default: 1) |
+| `-V, --verbose` | Verbose output for debugging |
+| `-A, --autosearch=FILE` | Search domains from a text file (one per line) |
+| `-C, --no-color` | Monochrome output |
+| `-U, --no-unicode` | ASCII-only output (no unicode box drawing) |
+| `-M, --no-mouse` | Disable mouse integration in TUI |
+| `-r, --refresh-cache` | Force refresh the RDAP bootstrap cache |
+
+## Examples
+
+```bash
+# fast search with 8 concurrent requests
+hoardom -j 8 coolproject
+
+# search with the Country TLD list
+hoardom -l Country mysite
+
+# only check .com and .net
+hoardom -o com,net bigidea
+
+# CSV output to file
+hoardom -c results.csv coolproject
+
+# CSV to stdout (no logs)
+hoardom -c coolproject
+
+# search domains from a file
+hoardom -A domains.txt
+
+# import a custom list and search
+hoardom -i custom-tlds.toml coolproject
+
+# verbose with retry and delay
+hoardom -V -R 3 -D 0.5 coolproject
+```
diff --git a/doc/TUI.md b/doc/TUI.md
new file mode 100644
index 0000000..ff1a501
--- /dev/null
+++ b/doc/TUI.md
@@ -0,0 +1,138 @@
+# TUI Usage
+
+dis da fun mode. makes u feel like hackerman or smth, launch it with:
+
+```bash
+hoardom --tui
+```
+
+or from source like a true alpha:
+
+```bash
+cargo run -- --tui
+```
+
+you can also pass CLI flags alongside `--tui` and theyll apply as the starting config. like if you want to start with the Country list and some prioritized TLDs:
+
+```bash
+hoardom --tui --list Country --top ch,li
+```
+
+## layout
+
+the TUI has a few panels:
+
+- **search bar** at the bottom: type your domain names here then obliterate enter to hacksor them
+- **results** in the middle: shows whats available and whats taken
+- **favorites** on the right side: your watchlist on pulling the trigger and shelling out moneu for em domainers
+- **settings** below favorites: toggle stuff duh
+- **scratchpad** on the left (if enabled in toggler settings): just a little text area for gathering inspiration and other stuff like amongus memes or the bee movie script (saves to config.toml btw so u dont loose your mommy asmr converted to base64 that you saved in ur notes)
+
+and since version 2.0.1 theres also a top bar with an export button and help button.
+
+## searching
+
+type a domain suffix or full domain in the search bar and press Enter. you can search multiple things at once seperated by spaces or commas:
+
+```
+coolproject shitproject durchfall
+```
+
+or mix full domains and suffixes:
+
+```
+coolproject.ch shitproject
+```
+
+results stream in as they come back, theres a progress bar at the top of the results panel during a search.
+
+press `s` during a search to cancel it if youre impatient.
+
+if `Clear on Search` is off in settings, results accumulate across searches. press `C` to clear manually.
+
+## keyboard shortcuts
+
+### global
+| key | what |
+|-----|------|
+| `F1` | toggle help overlay |
+| `F2` | open/close export popup |
+| `Ctrl+C` | quit |
+| `s` | cancel running search |
+| `Tab` / `Shift+Tab` | cycle between panels |
+| `Esc` | close help/dropdown, or clear selection in current panel |
+
+### search bar
+| key | what |
+|-----|------|
+| `Enter` | start the search |
+| typing | works normally when no search is running |
+| `Home` / `End` | jump to start/end of input |
+
+### results
+| key | what |
+|-----|------|
+| `Up` / `Down` | navigate the list |
+| `Enter` | add highlighted domain to favorites |
+| mouse scroll | scroll through results |
+
+### favorites
+| key | what |
+|-----|------|
+| `Up` / `Down` | navigate |
+| `Backspace` / `Delete` | remove the focused favorite |
+
+### settings
+| key | what |
+|-----|------|
+| `Up` / `Down` | move between settings rows |
+| `Enter` / `Space` | toggle checkboxes or open the TLD list dropdown |
+
+### scratchpad
+as was already said... put whatever u want in here
+
+## mouse and scrolling support
+mouse is on by default. you can click and interact with stuff
+
+if you hate mouse in terminal apps (or your terminal doesnt like it for reasons) can be disabled :
+
+```bash
+hoardom --tui --no-mouse
+```
+
+## settings panel
+
+theres 4 things in there:
+
+- **TLD List** dropdown: pick Standard, Decent, Country, All, or any custom imported lists
+- **Show Unavailable** checkbox: toggles whether taken domains show with premium details in results
+- **Show Notes Panel** checkbox: toggles the scratchpad panel on the left
+- **Clear on Search** checkbox: if on, results get cleared before each new search. if off they pile up for the true hoarding feeling.
+
+oh and settings auto save to config
+
+## favorites
+
+when you find a domain you like in the results, highlight it and press Enter to add it to favorites. favorites show up in the right panel and persist across sessions.
+
+if you (for whatever reason) decide to stop hoard domains in ther you can REMOVE THEM OMG WHAT CRAZY AHH FEATURE I KNOW RIGHT ???, focus the favorites panel, navigate to it, and hit Backspace or Delete.
+
+## export
+
+press `F2` or click the Export batten in the top bar.
+
+two modes:
+- **Favorites TXT**: dumps your favorites list to a text file, one per line, usefull if u wanna search em all for availability and shiet again.
+- **Results CSV**: exports currently visible results as CSV ... if u want that.
+
+will warn u when u bouta overwrite ur precious files and also last used export paths get remembered for faster extorting.
+
+## the too small thing
+
+if you resize the terminal too small, hoardom will refuse to work and complain about being claustrophone! please respect its feelings and dont scare it like that
+
+## tips
+
+- you can pass `--import-filter` alongside `--tui` to import a list and jump straight into the TUI with it
+- `--verbose` works in TUI too, it prints debug stuff to stderr (which you wont see in the TUI itself, but its there if you redirect it)
+- `--no-color` also works here for your 90's dial up crt tty
diff --git a/doc/example-list.toml b/doc/example-list.toml
new file mode 100644
index 0000000..cc35628
--- /dev/null
+++ b/doc/example-list.toml
@@ -0,0 +1,21 @@
+# Example custom TLD list for hoardom
+# Import with: hoardom -i docs/example-list.toml <domain>
+#
+# The name field is required and used to identify the list.
+# Importing a list with the same name replaces the old one.
+# The tlds array defines which TLDs to search.
+# Order matters - domains are shown in the same order as listed here.
+
+name = "Tech Startup"
+tlds = [
+ "com",
+ "io",
+ "dev",
+ "app",
+ "tech",
+ "ai",
+ "co",
+ "net",
+ "org",
+ "sh",
+]
diff --git a/doc/pics/export.png b/doc/pics/export.png
new file mode 100644
index 0000000..6bb7044
--- /dev/null
+++ b/doc/pics/export.png
Binary files differ
diff --git a/doc/pics/image.png b/doc/pics/image.png
new file mode 100644
index 0000000..a2d10b4
--- /dev/null
+++ b/doc/pics/image.png
Binary files differ
diff --git a/doc/pics/speed.png b/doc/pics/speed.png
new file mode 100644
index 0000000..8937a1b
--- /dev/null
+++ b/doc/pics/speed.png
Binary files differ
diff --git a/scripts/fetch-tlds.sh b/scripts/fetch-tlds.sh
new file mode 100755
index 0000000..0892f42
--- /dev/null
+++ b/scripts/fetch-tlds.sh
@@ -0,0 +1,816 @@
+#!/usr/bin/env bash
+# fetch-tlds.sh — Scrape purchasable TLD lists from registrar APIs
+# Outputs clean sorted lists for use in Lists.toml
+#
+# Usage:
+# ./scripts/fetch-tlds.sh # fetch all, show summary
+# ./scripts/fetch-tlds.sh porkbun # porkbun only
+# ./scripts/fetch-tlds.sh inwx # inwx only
+# ./scripts/fetch-tlds.sh --raw # output raw TLD lists (one per line)
+# ./scripts/fetch-tlds.sh --toml # output TOML-ready arrays
+# ./scripts/fetch-tlds.sh --diff # compare against current Lists.toml
+# ./scripts/fetch-tlds.sh --template # generate full Lists.toml with whois overrides if necessary
+#
+# Notes : yea this is ai slop, didnt make it myself oooo scary, but most of the rust i did myself just didnt feel like doing this at 4am and it somewhat works
+
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+LISTS_TOML="$PROJECT_DIR/Lists.toml"
+CACHE_DIR="$PROJECT_DIR/.tld-cache"
+mkdir -p "$CACHE_DIR"
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+CYAN='\033[0;36m'
+BOLD='\033[1m'
+NC='\033[0m'
+
+# ─── Porkbun ────────────────────────────────────────────────────────────────
+fetch_porkbun() {
+ local cache="$CACHE_DIR/porkbun.json"
+ local max_age=86400 # 24h cache
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching Porkbun pricing API...${NC}" >&2
+ # abusing porkbun public no money needed ah endpoint is no API key is even needed
+ if curl -sf -X POST "https://api.porkbun.com/api/json/v3/pricing/get" \
+ -H "Content-Type: application/json" \
+ -d '{}' \
+ -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${RED}Failed to fetch Porkbun data${NC}" >&2
+ return 1
+ fi
+}
+
+parse_porkbun() {
+ local json_file="$1"
+ # Extract TLD keys from the pricing response
+ # Response format: {"status":"SUCCESS","pricing":{"com":{...},"net":{...},...}}
+ if command -v jq &>/dev/null; then
+ jq -r '.pricing // {} | keys[]' "$json_file" 2>/dev/null | sort -u
+ else
+ # Fallback: grep for TLD keys (less reliable but works)
+ grep -o '"[a-z][a-z0-9.-]*":{' "$json_file" | sed 's/"//g; s/:{//' | sort -u
+ fi
+}
+
+# ─── INWX ───────────────────────────────────────────────────────────────────
+fetch_inwx() {
+ local cache="$CACHE_DIR/inwx.html"
+ local max_age=86400
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching INWX domain list...${NC}" >&2
+ # INWX domain check page has TLD list embedded as JSON
+ if curl -sfL "https://www.inwx.de/en/domain/check" \
+ -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+ -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${YELLOW}Could not fetch INWX${NC}" >&2
+ return 1
+ fi
+}
+
+parse_inwx() {
+ local html_file="$1"
+ # TLDs are embedded as JSON objects with "tld":"xxx" in the page
+ grep -oE '"tld":"[a-z]{2,20}"' "$html_file" | sed 's/"tld":"//;s/"//' | sort -u
+}
+
+# ─── OVH ────────────────────────────────────────────────────────────────────
+fetch_ovh() {
+ local cache="$CACHE_DIR/ovh.json"
+ local max_age=86400
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching OVH domain extensions...${NC}" >&2
+ if curl -sf "https://www.ovh.com/engine/apiv6/domain/extensions" \
+ -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+ -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${YELLOW}Could not fetch OVH extensions${NC}" >&2
+ return 1
+ fi
+}
+
+parse_ovh() {
+ local json_file="$1"
+ if command -v jq &>/dev/null; then
+ # Only top-level TLDs (no dots = not sub-TLDs like com.au)
+ jq -r '.[]' "$json_file" 2>/dev/null | grep -vE '\.' | sort -u
+ else
+ grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | grep -vE '\.' | sort -u
+ fi
+}
+
+# ─── tld-list.com (comprehensive registry, free basic list) ────────────────
+fetch_tldlist() {
+ local cache="$CACHE_DIR/tldlist-basic.txt"
+ local max_age=86400
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching tld-list.com basic list...${NC}" >&2
+ if curl -sf "https://tld-list.com/df/tld-list-basic.csv" \
+ -H "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
+ -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${YELLOW}Could not fetch tld-list.com${NC}" >&2
+ return 1
+ fi
+}
+
+parse_tldlist() {
+ local file="$1"
+ # One TLD per line, CR/LF endings, includes IDN entries — filter to ASCII only
+ tr -d '\r' < "$file" | grep -E '^[a-z][a-z0-9]*$' | sort -u
+}
+
+# ─── IANA root zone (fallback) ──────────────────────────────────────────────
+fetch_iana() {
+ local cache="$CACHE_DIR/iana-tlds.txt"
+ local max_age=604800 # 7 days
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching IANA TLD list...${NC}" >&2
+ if curl -sf "https://data.iana.org/TLD/tlds-alpha-by-domain.txt" -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${RED}Failed to fetch IANA list${NC}" >&2
+ return 1
+ fi
+}
+
+parse_iana() {
+ local file="$1"
+ # Skip header line, lowercase everything, filter to 2-3 char ccTLDs
+ tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | sort -u
+}
+
+parse_iana_cctlds() {
+ local file="$1"
+ tail -n +2 "$file" | tr '[:upper:]' '[:lower:]' | grep -E '^[a-z]{2}$' | sort -u
+}
+
+# ─── RDAP bootstrap (what actually has lookup servers) ──────────────────────
+fetch_rdap() {
+ local cache="$CACHE_DIR/rdap-dns.json"
+ local max_age=86400
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching RDAP bootstrap...${NC}" >&2
+ if curl -sf "https://data.iana.org/rdap/dns.json" -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${RED}Failed to fetch RDAP bootstrap${NC}" >&2
+ return 1
+ fi
+}
+
+parse_rdap_tlds() {
+ local json_file="$1"
+ if command -v jq &>/dev/null; then
+ jq -r '.services[][] | .[]' "$json_file" 2>/dev/null | grep -v '^http' | tr '[:upper:]' '[:lower:]' | sort -u
+ else
+ grep -oE '"[a-z]{2,20}"' "$json_file" | tr -d '"' | sort -u
+ fi
+}
+
+# ─── WHOIS server list (rfc1036/whois project) ─────────────────────────────
+fetch_whois_servers() {
+ local cache="$CACHE_DIR/tld_serv_list.txt"
+ local max_age=604800 # 7 days
+
+ if [[ -f "$cache" ]]; then
+ local age=$(( $(date +%s) - $(stat -f%m "$cache" 2>/dev/null || stat -c%Y "$cache" 2>/dev/null || echo 0) ))
+ if (( age < max_age )); then
+ echo "$cache"
+ return 0
+ fi
+ fi
+
+ echo -e "${CYAN}Fetching WHOIS server list...${NC}" >&2
+ if curl -sf "https://raw.githubusercontent.com/rfc1036/whois/next/tld_serv_list" -o "$cache" 2>/dev/null; then
+ echo "$cache"
+ else
+ echo -e "${YELLOW}Could not fetch WHOIS server list${NC}" >&2
+ return 1
+ fi
+}
+
+# Get the WHOIS server for a given TLD from the cached server list
+# Returns empty string if no server found or server is NONE/ARPA/etc
+get_whois_server() {
+ local tld="$1"
+ local serv_file="$2"
+ # Format: .tld [optional-tag] server
+ # Some entries have VERISIGN or similar tag before the server
+ local line
+ line=$(grep -E "^\\.${tld}[[:space:]]" "$serv_file" 2>/dev/null | head -1)
+ if [[ -z "$line" ]]; then
+ echo ""
+ return
+ fi
+ # Extract server: last word on the line that looks like a hostname
+ local server
+ server=$(echo "$line" | awk '{
+ for (i=NF; i>=2; i--) {
+ if ($i ~ /^[a-z0-9].*\.[a-z]/) { print $i; exit }
+ }
+ }')
+ # Filter out unusable entries
+ if [[ "$server" == "NONE" || "$server" == "ARPA" || -z "$server" || "$server" == http* ]]; then
+ echo ""
+ else
+ echo "$server"
+ fi
+}
+
+# Get WHOIS server from IANA directly (slower, single TLD at a time)
+get_iana_whois_server() {
+ local tld="$1"
+ curl -s "https://www.iana.org/domains/root/db/${tld}.html" 2>/dev/null \
+ | sed -n 's/.*WHOIS Server:<\/b> *\([^ <]*\).*/\1/p' \
+ | head -1
+}
+
+# ─── Extract current Lists.toml entries ─────────────────────────────────────
+parse_current_lists() {
+ local list_name="${1:-all}"
+ if [[ ! -f "$LISTS_TOML" ]]; then
+ echo -e "${RED}No Lists.toml found at $LISTS_TOML${NC}" >&2
+ return 1
+ fi
+ # Extract TLDs from a named list, stripping quotes, colons (whois overrides), commas
+ awk -v list="$list_name" '
+ $0 ~ "^"list" *= *\\[" { found=1; next }
+ found && /^\]/ { exit }
+ found && /^[[:space:]]*\[/ { exit }
+ found {
+ gsub(/["\t,]/, " ")
+ n = split($0, parts, " ")
+ for (i=1; i<=n; i++) {
+ if (parts[i] != "") {
+ # Strip whois override suffix
+ sub(/:.*/, "", parts[i])
+ print parts[i]
+ }
+ }
+ }
+ ' "$LISTS_TOML" | sort -u
+}
+
+# ─── Helpers ────────────────────────────────────────────────────────────────
+to_toml_array() {
+ # Reads TLDs from stdin, outputs TOML array format (wrapped at ~80 chars)
+ local tlds=()
+ while IFS= read -r tld; do
+ [[ -z "$tld" ]] && continue
+ tlds+=("$tld")
+ done
+ local line='\t'
+ local first=true
+ for tld in "${tlds[@]}"; do
+ local entry="\"$tld\""
+ if $first; then
+ line+="$entry"
+ first=false
+ else
+ local test_line="$line, $entry"
+ if (( ${#test_line} > 78 )); then
+ echo -e "$line,"
+ line="\t$entry"
+ else
+ line+=", $entry"
+ fi
+ fi
+ done
+ [[ -n "$line" ]] && echo -e "$line,"
+}
+
+filter_cctlds() {
+ grep -E '^[a-z]{2}$'
+}
+
+filter_short_tlds() {
+ # 2-6 char TLDs that are useful for domain hacking
+ grep -E '^[a-z]{2,6}$'
+}
+
+# ─── Known broken/unregistrable TLDs ────────────────────────────────────────
+SKIP_TLDS="bl bq eh mf gb bv sj kp hm"
+
+filter_skip() {
+ local skip_pattern
+ skip_pattern=$(echo "$SKIP_TLDS" | tr ' ' '|')
+ grep -vE "^($skip_pattern)$"
+}
+
+# ─── Template generation ────────────────────────────────────────────────────
+# Generates a full Lists.toml with:
+# - "tld" for TLDs with RDAP support (direct lookup works)
+# - "tld:whois.server" for TLDs needing WHOIS fallback
+# - skip TLDs omitted entirely (no Patch.toml needed)
+#
+# Uses: Porkbun + OVH + INWX (purchasable), RDAP bootstrap (has server?), WHOIS server list
+# With --all-sources: also cross-references tld-list.com
+generate_template() {
+ local all_registrar_tlds="$1"
+ local rdap_tlds="$2"
+ local source_summary="$3"
+
+ # Fetch WHOIS server list for fallback
+ local whois_serv_file=""
+ if whois_serv_file=$(fetch_whois_servers 2>/dev/null); then
+ true # got it
+ fi
+
+ # The input is already merged + filtered from all registrar sources
+ local buyable_tlds
+ buyable_tlds=$(echo "$all_registrar_tlds" | filter_skip | sort -u)
+
+ local buyable_count
+ buyable_count=$(echo "$buyable_tlds" | grep -c . || echo 0)
+
+ # Build annotated TLD list: "tld" or "tld:whois_server"
+ # A TLD needs a whois override if it's NOT in the RDAP bootstrap
+ local annotated_all=()
+ local annotated_cc=()
+ local rdap_hit=0 whois_hit=0 bare_hit=0
+
+ while IFS= read -r tld; do
+ [[ -z "$tld" ]] && continue
+ local entry=""
+ if echo "$rdap_tlds" | grep -qx "$tld" 2>/dev/null; then
+ # Has RDAP server — no override needed
+ entry="$tld"
+ ((rdap_hit++)) || true
+ else
+ # No RDAP — try to find WHOIS server
+ local server=""
+ if [[ -n "$whois_serv_file" ]]; then
+ server=$(get_whois_server "$tld" "$whois_serv_file")
+ fi
+ if [[ -n "$server" ]]; then
+ entry="${tld}:${server}"
+ ((whois_hit++)) || true
+ else
+ # No known server — include bare, hoardom will try common patterns
+ entry="$tld"
+ ((bare_hit++)) || true
+ fi
+ fi
+ annotated_all+=("$entry")
+ # Also track ccTLDs (2-letter entries)
+ local base_tld="${tld%%:*}"
+ if [[ "$base_tld" =~ ^[a-z]{2}$ ]]; then
+ annotated_cc+=("$entry")
+ fi
+ done <<< "$buyable_tlds"
+
+ echo -e "${CYAN}Building template...${NC}" >&2
+ echo -e " ${GREEN}${rdap_hit}${NC} TLDs with RDAP (direct lookup)" >&2
+ echo -e " ${YELLOW}${whois_hit}${NC} TLDs with WHOIS override" >&2
+ echo -e " ${RED}${bare_hit}${NC} TLDs with no known server (will probe)" >&2
+ echo "" >&2
+
+ # ── Curated lists (bare TLD names, annotated automatically) ─────────
+
+ # Standard: com, net, org + generally desirable / well-known TLDs
+ local standard_tlds=(
+ "com" "net" "org" "io" "co" "dev" "app" "me" "info"
+ "biz" "one" "xyz" "online" "site" "tech" "pro" "tv"
+ "cc" "to" "sh" "li" "fm" "am" "gg" "ws" "la"
+ "ms" "nu" "cx" "mn" "st" "tel" "ai" "id" "in"
+ "it" "is" "at" "be" "de" "eu" "fr" "nl" "se"
+ "uk" "us" "ca" "au" "nz" "club" "blog" "art" "fun"
+ "lol" "wtf" "page" "link" "space" "store" "shop"
+ )
+
+ # Decent: the best of the best — com, net, org, io + short desirable ones
+ # that work great for domain hacking and are punchy
+ local decent_tlds=(
+ "com" "net" "org" "io" "dev" "app" "co" "me"
+ "ai" "sh" "to" "fm" "tv" "gg" "cc" "li" "am"
+ "la" "nu" "id" "in" "it" "is" "at" "ws"
+ "one" "pro" "bio" "art" "ink" "run" "win" "new"
+ "lol" "pub" "fun" "vet" "fit" "rip" "wtf" "zip"
+ )
+
+ # Swiss: standard-like but with Swiss / Central European related TLDs up front
+ local swiss_tlds=(
+ "com" "net" "org" "ch" "li" "swiss" "zuerich"
+ "io" "co" "dev" "app" "me" "info" "one" "pro"
+ "de" "at" "fr" "it" "eu"
+ "tech" "online" "site" "shop" "store"
+ "biz" "xyz" "tv" "cc" "to" "sh" "fm" "am" "gg"
+ )
+
+ # Annotate curated lists with whois overrides where needed
+ annotate_list() {
+ local -n input_list=$1
+ local result=()
+ for bare_tld in "${input_list[@]}"; do
+ local found=false
+ for ann in "${annotated_all[@]}"; do
+ local ann_base="${ann%%:*}"
+ if [[ "$ann_base" == "$bare_tld" ]]; then
+ result+=("$ann")
+ found=true
+ break
+ fi
+ done
+ if ! $found; then
+ result+=("$bare_tld")
+ fi
+ done
+ printf '%s\n' "${result[@]}"
+ }
+
+ # Length-based filtered lists from annotated_all
+ filter_annotated_by_length() {
+ local min="$1"
+ local max="$2"
+ for ann in "${annotated_all[@]}"; do
+ local base="${ann%%:*}"
+ local len=${#base}
+ if (( len >= min && len <= max )); then
+ echo "$ann"
+ fi
+ done
+ }
+
+ # ─── Output ─────────────────────────────────────────────────────────
+ local date_str
+ date_str=$(date +%Y-%m-%d)
+
+ cat <<HEADER
+# Lists.toml — Built-in TLD lists for hoardom
+# Auto-generated on ${date_str} from ${source_summary}
+#
+# Format:
+# "tld" — TLD has RDAP support, lookup works directly
+# "tld:whois.server" — No RDAP: use this WHOIS server for fallback
+#
+# ${buyable_count} purchasable TLDs (handshake/sub-TLDs excluded)
+# ${rdap_hit} have RDAP, ${whois_hit} need WHOIS override, ${bare_hit} will auto-probe
+#
+# Lists:
+# standard — common desirable TLDs (com, net, org, io, dev, ...)
+# decent — very best short punchy TLDs for domain hacking
+# swiss — standard-like but with Swiss/Central European TLDs prioritized
+# country — all 2-letter country-code TLDs
+# two — all 2-letter TLDs
+# three — all TLDs with 3 or fewer letters
+# four — all TLDs with exactly 4 letters
+# long — all TLDs with 5+ letters
+# all — everything
+
+HEADER
+
+ echo "standard = ["
+ annotate_list standard_tlds | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "decent = ["
+ annotate_list decent_tlds | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "swiss = ["
+ annotate_list swiss_tlds | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "country = ["
+ printf '%s\n' "${annotated_cc[@]}" | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "two = ["
+ filter_annotated_by_length 2 2 | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "three = ["
+ filter_annotated_by_length 2 3 | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "four = ["
+ filter_annotated_by_length 4 4 | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "long = ["
+ filter_annotated_by_length 5 99 | to_toml_array
+ echo "]"
+ echo ""
+
+ echo "all = ["
+ printf '%s\n' "${annotated_all[@]}" | to_toml_array
+ echo "]"
+}
+
+# ─── Main ───────────────────────────────────────────────────────────────────
+main() {
+ local mode="summary"
+ local source="all"
+ local all_sources=false
+
+ for arg in "$@"; do
+ case "$arg" in
+ --raw) mode="raw" ;;
+ --toml) mode="toml" ;;
+ --diff) mode="diff" ;;
+ --template) mode="template" ;;
+ --all-sources) all_sources=true ;;
+ porkbun) source="porkbun" ;;
+ inwx) source="inwx" ;;
+ ovh) source="ovh" ;;
+ iana) source="iana" ;;
+ rdap) source="rdap" ;;
+ tldlist) source="tldlist" ;;
+ --help|-h)
+ echo "Usage: $0 [source] [--raw|--toml|--diff|--template] [--all-sources]"
+ echo ""
+ echo "Sources: porkbun, ovh, inwx, iana, rdap, tldlist"
+ echo ""
+ echo "Flags:"
+ echo " --raw Output raw TLD list (one per line)"
+ echo " --toml Output TOML-ready arrays"
+ echo " --diff Compare against current Lists.toml"
+ echo " --template Generate full Lists.toml with whois overrides"
+ echo " --all-sources Include tld-list.com for extra coverage (used as"
+ echo " a filter: only TLDs also in a registrar are kept)"
+ exit 0 ;;
+ esac
+ done
+
+ local porkbun_tlds="" inwx_tlds="" ovh_tlds="" iana_tlds="" rdap_tlds="" tldlist_tlds=""
+ local porkbun_count=0 inwx_count=0 ovh_count=0 iana_count=0 rdap_count=0 tldlist_count=0
+
+ # Template mode needs all registrar sources + rdap regardless of source filter
+ if [[ "$mode" == "template" ]]; then
+ source="all"
+ fi
+
+ # ── Fetch from selected sources ──
+
+ if [[ "$source" == "all" || "$source" == "porkbun" ]]; then
+ if porkbun_file=$(fetch_porkbun); then
+ porkbun_tlds=$(parse_porkbun "$porkbun_file")
+ porkbun_count=$(echo "$porkbun_tlds" | grep -c . || true)
+ fi
+ fi
+
+ if [[ "$source" == "all" || "$source" == "ovh" ]]; then
+ if ovh_file=$(fetch_ovh); then
+ ovh_tlds=$(parse_ovh "$ovh_file")
+ ovh_count=$(echo "$ovh_tlds" | grep -c . || true)
+ fi
+ fi
+
+ if [[ "$source" == "all" || "$source" == "inwx" ]]; then
+ if inwx_file=$(fetch_inwx 2>/dev/null); then
+ inwx_tlds=$(parse_inwx "$inwx_file")
+ inwx_count=$(echo "$inwx_tlds" | grep -c . || true)
+ fi
+ fi
+
+ if [[ "$source" == "all" || "$source" == "iana" ]]; then
+ if iana_file=$(fetch_iana); then
+ iana_tlds=$(parse_iana "$iana_file")
+ iana_count=$(echo "$iana_tlds" | grep -c . || true)
+ fi
+ fi
+
+ if [[ "$source" == "all" || "$source" == "rdap" ]]; then
+ if rdap_file=$(fetch_rdap); then
+ rdap_tlds=$(parse_rdap_tlds "$rdap_file")
+ rdap_count=$(echo "$rdap_tlds" | grep -c . || true)
+ fi
+ fi
+
+ if [[ "$all_sources" == true || "$source" == "tldlist" ]]; then
+ if tldlist_file=$(fetch_tldlist); then
+ tldlist_tlds=$(parse_tldlist "$tldlist_file")
+ tldlist_count=$(echo "$tldlist_tlds" | grep -c . || true)
+ fi
+ fi
+
+ # ── Filter porkbun: no handshake, no sub-TLDs ──
+ local porkbun_filtered=""
+ if [[ -n "$porkbun_tlds" ]]; then
+ local porkbun_file="$CACHE_DIR/porkbun.json"
+ if command -v jq &>/dev/null && [[ -f "$porkbun_file" ]]; then
+ porkbun_filtered=$(jq -r '
+ .pricing // {} | to_entries[] |
+ select(.key | contains(".") | not) |
+ select(.value.specialType // "" | test("handshake") | not) |
+ .key
+ ' "$porkbun_file" 2>/dev/null | sort -u)
+ else
+ porkbun_filtered=$(echo "$porkbun_tlds" | grep -v '\.' | sort -u)
+ fi
+ fi
+
+ # ── Merge all registrar-confirmed purchasable TLDs ──
+ # Only TLDs that have pricing at a real registrar are included
+ local registrar_tlds
+ registrar_tlds=$(echo -e "${porkbun_filtered}\n${ovh_tlds}\n${inwx_tlds}" | grep -E '^[a-z]' | sort -u | filter_skip)
+
+ # If --all-sources, also include tld-list.com TLDs that appear in at least
+ # one registrar (cross-reference = purchasable + known to community list)
+ if [[ "$all_sources" == true && -n "$tldlist_tlds" ]]; then
+ # tld-list.com entries that are ALSO in a registrar = confirmed purchasable
+ local tldlist_confirmed
+ tldlist_confirmed=$(comm -12 <(echo "$tldlist_tlds") <(echo "$registrar_tlds") 2>/dev/null || true)
+ # They're already in registrar_tlds, so this just validates.
+ # More useful: tld-list entries NOT in any registrar = brand/reserved (skip them)
+ local tldlist_extra
+ tldlist_extra=$(comm -23 <(echo "$tldlist_tlds") <(echo "$registrar_tlds") 2>/dev/null || true)
+ local extra_count
+ extra_count=$(echo "$tldlist_extra" | grep -c . || echo 0)
+ echo -e " ${YELLOW}tld-list.com:${NC} $extra_count TLDs with no registrar pricing (brand/reserved, excluded)" >&2
+ fi
+
+ local all_tlds="$registrar_tlds"
+ local all_cctlds
+ all_cctlds=$(echo "$all_tlds" | filter_cctlds)
+
+ # Build source summary string for template header
+ local sources_used=()
+ [[ $porkbun_count -gt 0 ]] && sources_used+=("Porkbun")
+ [[ $ovh_count -gt 0 ]] && sources_used+=("OVH")
+ [[ $inwx_count -gt 0 ]] && sources_used+=("INWX")
+ local source_summary
+ local joined
+ joined=$(printf " + %s" "${sources_used[@]}")
+ joined="${joined:3}" # strip leading " + "
+ source_summary="${joined} + RDAP bootstrap + WHOIS server list"
+
+ case "$mode" in
+ raw)
+ echo "$all_tlds"
+ ;;
+ toml)
+ echo -e "${BOLD}# Purchasable TLDs from all registrars ($(echo "$all_tlds" | wc -l | tr -d ' ') total)${NC}"
+ echo "all_registrars = ["
+ echo "$all_tlds" | to_toml_array
+ echo "]"
+ echo ""
+ echo "# Country-code TLDs (purchasable)"
+ echo "cctlds = ["
+ echo "$all_cctlds" | to_toml_array
+ echo "]"
+ ;;
+ diff)
+ echo -e "${BOLD}Comparing registrar data vs current Lists.toml${NC}"
+ echo ""
+ local current_all current_country
+ current_all=$(parse_current_lists "all")
+ current_country=$(parse_current_lists "country")
+
+ # TLDs in registrars but NOT in our 'all' list
+ if [[ -n "$all_tlds" ]]; then
+ local missing_from_all
+ missing_from_all=$(comm -23 <(echo "$all_tlds" | filter_short_tlds | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
+ if [[ -n "$missing_from_all" ]]; then
+ local mc
+ mc=$(echo "$missing_from_all" | wc -l | tr -d ' ')
+ echo -e "${YELLOW}TLDs at registrars but NOT in our 'all' list ($mc):${NC}"
+ echo "$missing_from_all" | tr '\n' ' '
+ echo ""
+ echo ""
+ fi
+
+ # ccTLDs at registrars but NOT in our 'country' list
+ local missing_cc
+ missing_cc=$(comm -23 <(echo "$all_cctlds" | sort) <(echo "$current_country" | sort) 2>/dev/null || true)
+ if [[ -n "$missing_cc" ]]; then
+ local mcc
+ mcc=$(echo "$missing_cc" | wc -l | tr -d ' ')
+ echo -e "${YELLOW}ccTLDs at registrars but NOT in 'country' list ($mcc):${NC}"
+ echo "$missing_cc" | tr '\n' ' '
+ echo ""
+ echo ""
+ fi
+
+ # TLDs in our 'all' list but NOT at any registrar
+ local extra
+ extra=$(comm -13 <(echo "$all_tlds" | sort) <(echo "$current_all" | sort) 2>/dev/null || true)
+ if [[ -n "$extra" ]]; then
+ local ec
+ ec=$(echo "$extra" | wc -l | tr -d ' ')
+ echo -e "${CYAN}TLDs in our 'all' list but NOT at any registrar ($ec):${NC}"
+ echo "$extra" | tr '\n' ' '
+ echo ""
+ echo ""
+ fi
+ fi
+
+ # Check which of our TLDs have RDAP servers
+ if [[ -n "$rdap_tlds" && -n "$current_all" ]]; then
+ local no_rdap
+ no_rdap=$(comm -23 <(echo "$current_all" | sort) <(echo "$rdap_tlds" | sort) 2>/dev/null || true)
+ if [[ -n "$no_rdap" ]]; then
+ local nrc
+ nrc=$(echo "$no_rdap" | wc -l | tr -d ' ')
+ echo -e "${RED}TLDs in our lists with NO RDAP server ($nrc) — need WHOIS fallback:${NC}"
+ echo "$no_rdap" | tr '\n' ' '
+ echo ""
+ fi
+ fi
+ ;;
+ template)
+ generate_template "$registrar_tlds" "$rdap_tlds" "$source_summary"
+ ;;
+ summary)
+ echo -e "${BOLD}═══ TLD Source Summary ═══${NC}"
+ echo ""
+ [[ $porkbun_count -gt 0 ]] && echo -e " ${GREEN}Porkbun${NC} $(echo "$porkbun_filtered" | grep -c . || echo 0) TLDs ($(echo "$porkbun_filtered" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+ [[ $ovh_count -gt 0 ]] && echo -e " ${GREEN}OVH${NC} $ovh_count TLDs ($(echo "$ovh_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+ [[ $inwx_count -gt 0 ]] && echo -e " ${GREEN}INWX${NC} $inwx_count TLDs ($(echo "$inwx_tlds" | filter_cctlds | wc -l | tr -d ' ') ccTLDs)"
+ [[ $tldlist_count -gt 0 ]] && echo -e " ${GREEN}tld-list.com${NC} $tldlist_count TLDs (community registry, no pricing)"
+ [[ $iana_count -gt 0 ]] && echo -e " ${GREEN}IANA${NC} $iana_count TLDs"
+ [[ $rdap_count -gt 0 ]] && echo -e " ${GREEN}RDAP${NC} $rdap_count TLDs with lookup servers"
+ echo ""
+
+ # Show what each registrar uniquely contributes
+ if [[ $porkbun_count -gt 0 && $ovh_count -gt 0 ]]; then
+ local ovh_unique inwx_unique
+ ovh_unique=$(comm -23 <(echo "$ovh_tlds" | sort) <(echo "$porkbun_filtered" | sort) | wc -l | tr -d ' ')
+ echo -e " ${CYAN}OVH adds${NC} $ovh_unique TLDs not on Porkbun"
+ if [[ $inwx_count -gt 0 ]]; then
+ inwx_unique=$(comm -23 <(echo "$inwx_tlds" | sort) <(echo -e "${porkbun_filtered}\n${ovh_tlds}" | sort -u) | wc -l | tr -d ' ')
+ echo -e " ${CYAN}INWX adds${NC} $inwx_unique TLDs not on Porkbun/OVH"
+ fi
+ echo ""
+ fi
+
+ echo -e " ${BOLD}Merged purchasable:${NC} $(echo "$all_tlds" | wc -l | tr -d ' ') TLDs"
+ echo -e " ${BOLD}Merged ccTLDs:${NC} $(echo "$all_cctlds" | wc -l | tr -d ' ')"
+ echo ""
+ echo -e " Cached data in: ${CYAN}$CACHE_DIR${NC}"
+ echo -e " Use ${BOLD}--diff${NC} to compare against Lists.toml"
+ echo -e " Use ${BOLD}--toml${NC} to output TOML-ready arrays"
+ echo -e " Use ${BOLD}--template${NC} to generate template Lists.toml"
+ echo -e " Use ${BOLD}--all-sources${NC} to also fetch tld-list.com"
+ echo -e " Use ${BOLD}--raw${NC} for raw TLD list (one per line)"
+ ;;
+ esac
+}
+
+main "$@"
diff --git a/src/cli.rs b/src/cli.rs
new file mode 100644
index 0000000..297e4e3
--- /dev/null
+++ b/src/cli.rs
@@ -0,0 +1,203 @@
+use clap::Parser;
+use std::path::PathBuf;
+
+#[derive(Parser, Debug)]
+#[command(name = "hoardom", version = "0.0.1", about = "Domain hoarding made less painful")]
+#[command(disable_help_flag = true, disable_version_flag = true)]
+pub struct Args {
+ /// One or more domain names to search for
+ #[arg(value_name = "DOMAIN")]
+ pub domains: Vec<String>,
+
+ // -- Mode --
+ /// Default non interactive mode
+ #[arg(long = "cli", default_value_t = false)]
+ pub cli_mode: bool,
+
+ /// Easy to use Terminal based Graphical user interface
+ #[arg(long = "tui", default_value_t = false)]
+ pub tui_mode: bool,
+
+ // -- Basics --
+ /// Define where environement file should be saved
+ #[arg(short = 'e', long = "environement")]
+ pub env_path: Option<PathBuf>,
+
+ /// Show all in list even when unavailable
+ #[arg(short = 'a', long = "all", default_value_t = false)]
+ pub show_all: bool,
+
+ // -- Advanced --
+ /// Out in CSV, Path is optional. If path isnt given will be printed to terminal with no logs
+ #[arg(short = 'c', long = "csv")]
+ pub csv: Option<Option<PathBuf>>,
+
+ /// Built in TLD list to use (from Lists.toml)
+ #[arg(short = 'l', long = "list")]
+ pub tld_list: Option<String>,
+
+ /// Import a custom toml list for this session
+ #[arg(short = 'i', long = "import-filter")]
+ pub import_filter: Option<PathBuf>,
+
+ /// Set certain TLDs to show up as first result (comma separated)
+ #[arg(short = 't', long = "top", value_delimiter = ',')]
+ pub top_tlds: Option<Vec<String>>,
+
+ /// Only search these TLDs (comma separated)
+ #[arg(short = 'o', long = "onlytop", value_delimiter = ',')]
+ pub only_top: Option<Vec<String>>,
+
+ /// How many suggestions to look up and try to show (defaults to 0 aka disabled)
+ #[arg(short = 's', long = "suggestions")]
+ pub suggestions: Option<usize>,
+
+ // -- Various --
+ /// Number of concurrent lookup requests (default: 1)
+ #[arg(short = 'j', long = "jobs")]
+ pub jobs: Option<u8>,
+
+ /// Set the global delay in seconds between lookup requests
+ #[arg(short = 'D', long = "delay")]
+ pub delay: Option<f64>,
+
+ /// Retry NUMBER amount of times if domain lookup errors out
+ #[arg(short = 'R', long = "retry")]
+ pub retry: Option<u32>,
+
+ /// Verbose output for debugging
+ #[arg(short = 'V', long = "verbose", default_value_t = false)]
+ pub verbose: bool,
+
+ /// Search for names/domains in text file, one domain per new line
+ #[arg(short = 'A', long = "autosearch")]
+ pub autosearch: Option<PathBuf>,
+
+ /// Use a monochrome color scheme
+ #[arg(short = 'C', long = "no-color", default_value_t = false)]
+ pub no_color: bool,
+
+ /// Do not use unicode only plain ASCII
+ #[arg(short = 'U', long = "no-unicode", default_value_t = false)]
+ pub no_unicode: bool,
+
+ /// Disable the mouse integration for TUI
+ #[arg(short = 'M', long = "no-mouse", default_value_t = false)]
+ pub no_mouse: bool,
+
+ /// Force refresh the RDAP bootstrap cache
+ #[arg(short = 'r', long = "refresh-cache", default_value_t = false)]
+ pub refresh_cache: bool,
+
+ /// Basic Help
+ #[arg(short = 'h', long = "help", default_value_t = false)]
+ pub help: bool,
+
+ /// Show full help
+ #[arg(short = 'H', long = "fullhelp", default_value_t = false)]
+ pub fullhelp: bool,
+}
+
+impl Args {
+ pub fn is_tui(&self) -> bool {
+ self.tui_mode
+ }
+
+ pub fn effective_list(&self) -> String {
+ self.tld_list
+ .clone()
+ .map(|s| s.to_lowercase())
+ .unwrap_or_else(|| crate::tlds::default_list_name().to_string())
+ }
+
+ pub fn effective_suggestions(&self) -> usize {
+ self.suggestions.unwrap_or(0)
+ }
+
+ pub fn effective_retry(&self) -> u32 {
+ self.retry.unwrap_or(1)
+ }
+
+ pub fn effective_delay(&self) -> f64 {
+ self.delay.unwrap_or(0.0)
+ }
+
+ pub fn effective_jobs(&self) -> u8 {
+ self.jobs.unwrap_or(1).max(1)
+ }
+}
+
+pub fn print_help() {
+ println!(
+ "hoardom {} - Domain hoarding made less painful
+Mode :
+--cli Default none interactive mode
+--tui Easy to use Terminal based Graphical user interface
+
+Basics :
+-e --environement=PATH Define where .hoardom folder should be
+ Defaults to /home/USER/.hoardom/
+ Stores settings, imported lists, favs, cache etc.
+-a --all Show all in list even when unavailable
+ (Unless changed after launch in TUI mode)
+
+-H --fullhelp Show full help",
+ env!("CARGO_PKG_VERSION")
+ );
+}
+
+pub fn print_fullhelp() {
+ println!(
+ "hoardom {} - they are inside your walls!
+
+## if you see this send a fax to +41 43 543 04 47 that mentions hoardom to know your fate ##
+
+Mode :
+--cli Default none interactive mode
+--tui Easy to use Terminal based Graphical user interface
+
+Basics :
+-e --environement=PATH Define where .hoardom folder should be
+ Defaults to /home/USER/.hoardom/
+ Stores settings, imported lists, favs, cache etc.
+-a --all Show all in list even when unavailable
+ (Unless changed after launch in TUI mode)
+
+Advanced :
+-c --csv=PATH Out in CSV,Path is optional
+ if path isnt given will be printed to terminal with no logs
+-l --list=LIST Built in TLD Lists are : {}
+ Selects which list is applied
+ (Unless changed after launch in TUI mode)
+-i --import-filter=PATH Import a custom toml list for this session
+-t --top=TLD,TLD Set certain TLDs to show up as first result
+ for when you need a domain in your country or for searching
+ a specific one.
+ (Unless changed after launch in TUI mode)
+-o --onlytop=TLD,TLD Only search these TLDs
+-s --suggestions=NUMBER How many suggestions to look up and try to show from list
+ Number of available alternativ domains to try and find when
+ Searching for full domain name directly.
+ Defaults to 0 (aka disabled)
+
+Various :
+-j --jobs=NUMBER Number of concurrent lookup requests
+ How many TLDs to look up at the same time (default: 1)
+-D --delay=DELAY Set the global delay in Seconds between lookup requests
+-R --retry=NUMBER Retry NUMBER amount of times if domain lookup errors out
+-V --verbose Verbose output for debugging
+-A --autosearch=FILE Search for names/domains in text file one domain per new line,
+ lines starting with invalid character for a domain are ignored
+ (allows for commenting)
+-C --no-color Use a monochrome color scheme
+-U --no-unicode Do not use unicode only plain ASCII
+-M --no-mouse Disable the mouse integration for TUI
+-r --refresh-cache Force refresh the RDAP bootstrap cache
+
+Help :
+-h --help Basic Help
+-H --fullhelp You just did this",
+ env!("CARGO_PKG_VERSION"),
+ crate::tlds::list_names().join(", ")
+ );
+}
diff --git a/src/config.rs b/src/config.rs
new file mode 100644
index 0000000..650f616
--- /dev/null
+++ b/src/config.rs
@@ -0,0 +1,436 @@
+use serde::{Deserialize, Serialize};
+use std::path::{Path, PathBuf};
+
+#[derive(Debug, Clone)]
+pub struct HoardomPaths {
+ pub config_file: PathBuf,
+ pub cache_dir: PathBuf,
+ pub can_save: bool,
+ pub caching_enabled: bool,
+}
+
+impl HoardomPaths {
+ pub fn cache_file(&self, name: &str) -> PathBuf {
+ self.cache_dir.join(name)
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Config {
+ #[serde(default)]
+ pub settings: Settings,
+ #[serde(default)]
+ pub cache: CacheSettings,
+ #[serde(default)]
+ pub favorites: Vec<FavoriteEntry>,
+ #[serde(default)]
+ pub imported_filters: Vec<ImportedFilter>,
+ #[serde(default)]
+ pub scratchpad: String,
+}
+
+/// faved domain with its last known status
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct FavoriteEntry {
+ pub domain: String,
+ /// last known status: "available", "registered", "error", or "unknown"
+ #[serde(default = "default_fav_status")]
+ pub status: String,
+ /// when it was last checked (RFC 3339)
+ #[serde(default)]
+ pub checked: String,
+ /// true when status changed since last check (shows ! in TUI)
+ #[serde(default)]
+ pub changed: bool,
+}
+
+impl FavoriteEntry {
+ pub fn new(domain: String) -> Self {
+ Self {
+ domain,
+ status: "unknown".to_string(),
+ checked: String::new(),
+ changed: false,
+ }
+ }
+}
+
+fn default_fav_status() -> String {
+ "unknown".to_string()
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Settings {
+ #[serde(default = "default_tld_list")]
+ pub tld_list: String,
+ #[serde(default)]
+ pub show_all: bool,
+ #[serde(default = "default_clear_on_search")]
+ pub clear_on_search: bool,
+ #[serde(default)]
+ pub show_notes_panel: bool,
+ #[serde(default)]
+ pub last_fav_export_path: String,
+ #[serde(default)]
+ pub last_res_export_path: String,
+ #[serde(default)]
+ pub top_tlds: Vec<String>,
+ #[serde(default = "default_jobs")]
+ pub jobs: u8,
+ /// error types that shouldnt be retried
+ /// valid: "rate_limit", "invalid_tld", "timeout", "unknown"
+ #[serde(default = "default_noretry")]
+ pub noretry: Vec<String>,
+ /// auto config backups on/off
+ #[serde(default = "default_backups_enabled")]
+ pub backups: bool,
+ /// how many backup copies to keep
+ #[serde(default = "default_backup_count")]
+ pub backup_count: u32,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct CacheSettings {
+ #[serde(default)]
+ pub last_updated: String,
+ /// 0 = never nag about stale cache
+ #[serde(default = "default_outdated_cache_days")]
+ pub outdated_cache: u32,
+ /// auto refresh when outdated if true
+ #[serde(default = "default_auto_update")]
+ pub auto_update_cache: bool,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ImportedFilter {
+ pub name: String,
+ pub tlds: Vec<String>,
+}
+
+fn default_tld_list() -> String {
+ crate::tlds::default_list_name().to_string()
+}
+
+fn default_outdated_cache_days() -> u32 {
+ 7
+}
+
+fn default_auto_update() -> bool {
+ true
+}
+
+fn default_clear_on_search() -> bool {
+ true
+}
+
+fn default_jobs() -> u8 {
+ 4
+}
+
+fn default_noretry() -> Vec<String> {
+ vec!["rate_limit".to_string(), "invalid_tld".to_string(), "forbidden".to_string()]
+}
+
+fn default_backups_enabled() -> bool {
+ true
+}
+
+fn default_backup_count() -> u32 {
+ 6
+}
+
+impl Default for Settings {
+ fn default() -> Self {
+ Self {
+ tld_list: default_tld_list(),
+ show_all: false,
+ clear_on_search: default_clear_on_search(),
+ show_notes_panel: false,
+ last_fav_export_path: String::new(),
+ last_res_export_path: String::new(),
+ top_tlds: Vec::new(),
+ jobs: default_jobs(),
+ noretry: default_noretry(),
+ backups: default_backups_enabled(),
+ backup_count: default_backup_count(),
+ }
+ }
+}
+
+impl Default for CacheSettings {
+ fn default() -> Self {
+ Self {
+ last_updated: String::new(),
+ outdated_cache: default_outdated_cache_days(),
+ auto_update_cache: default_auto_update(),
+ }
+ }
+}
+
+impl Default for Config {
+ fn default() -> Self {
+ Self {
+ settings: Settings::default(),
+ cache: CacheSettings::default(),
+ favorites: Vec::new(),
+ imported_filters: Vec::new(),
+ scratchpad: String::new(),
+ }
+ }
+}
+
+/// old config format where favorites were just strings
+#[derive(Debug, Deserialize)]
+struct LegacyConfig {
+ #[serde(default)]
+ settings: Settings,
+ #[serde(default)]
+ cache: CacheSettings,
+ #[serde(default)]
+ favorites: Vec<String>,
+ #[serde(default)]
+ imported_filters: Vec<ImportedFilter>,
+ #[serde(default)]
+ scratchpad: String,
+}
+
+impl Config {
+ pub fn load(path: &Path) -> Self {
+ match std::fs::read_to_string(path) {
+ Ok(content) => {
+ // Try new format first
+ if let Ok(config) = toml::from_str::<Config>(&content) {
+ return config;
+ }
+ // Fall back to legacy format (favorites as plain strings)
+ if let Ok(legacy) = toml::from_str::<LegacyConfig>(&content) {
+ return Config {
+ settings: legacy.settings,
+ cache: legacy.cache,
+ favorites: legacy.favorites.into_iter().map(FavoriteEntry::new).collect(),
+ imported_filters: legacy.imported_filters,
+ scratchpad: legacy.scratchpad,
+ };
+ }
+ eprintln!("Warning: could not parse config file");
+ Config::default()
+ }
+ Err(_) => Config::default(),
+ }
+ }
+
+ /// load config and backup it on startup if backups are on
+ pub fn load_with_backup(path: &Path) -> Self {
+ let config = Self::load(path);
+ if config.settings.backups && path.exists() {
+ if let Err(e) = Self::create_backup(path, config.settings.backup_count) {
+ eprintln!("Warning: could not create config backup: {}", e);
+ }
+ }
+ config
+ }
+
+ pub fn save(&self, path: &Path) -> Result<(), String> {
+ // make sure parent dir exists
+ if let Some(parent) = path.parent() {
+ std::fs::create_dir_all(parent)
+ .map_err(|e| format!("Failed to create config directory: {}", e))?;
+ }
+
+ let body = toml::to_string_pretty(self)
+ .map_err(|e| format!("Failed to serialize config: {}", e))?;
+ let content = format!("\
+# hoardom config - auto saved, comments are preserved on the line theyre on
+#
+# [settings]
+# noretry: error types that shouldnt be retried
+# \u{201c}rate_limit\u{201d} - server said slow down, retrying immediately wont help
+# \u{201c}invalid_tld\u{201d} - TLD is genuinely broken, no point retrying
+# \u{201c}forbidden\u{201d} - server returned 403, access denied, retrying wont fix it
+# \u{201c}timeout\u{201d} - uncomment if youd rather skip slow TLDs than wait
+# \u{201c}unknown\u{201d} - uncomment to skip any unrecognized errors too
+\n{}", body);
+ std::fs::write(path, content)
+ .map_err(|e| format!("Failed to write config file: {}", e))?;
+ Ok(())
+ }
+
+ /// copy current config into backups/ folder.
+ /// keeps at most `max_count` backups, tosses the oldest.
+ /// only call on startup and shutdown - NOT on every save.
+ pub fn create_backup(config_path: &Path, max_count: u32) -> Result<(), String> {
+ let parent = config_path.parent().ok_or("No parent directory")?;
+ let backup_dir = parent.join("backups");
+ std::fs::create_dir_all(&backup_dir)
+ .map_err(|e| format!("Failed to create backup dir: {}", e))?;
+
+ // Timestamp-based filename: config_20260308_143022.toml
+ let ts = chrono::Local::now().format("%Y%m%d_%H%M%S");
+ let backup_name = format!("config_{}.toml", ts);
+ let backup_path = backup_dir.join(&backup_name);
+
+ // dont backup if same-second backup already exists
+ if backup_path.exists() {
+ return Ok(());
+ }
+
+ std::fs::copy(config_path, &backup_path)
+ .map_err(|e| format!("Failed to copy config to backup: {}", e))?;
+
+ // prune old backups: sort by name (timestamp order), keep newest N
+ if max_count > 0 {
+ let mut backups: Vec<_> = std::fs::read_dir(&backup_dir)
+ .map_err(|e| format!("Failed to read backup dir: {}", e))?
+ .filter_map(|e| e.ok())
+ .filter(|e| {
+ e.file_name()
+ .to_str()
+ .map(|n| n.starts_with("config_") && n.ends_with(".toml"))
+ .unwrap_or(false)
+ })
+ .collect();
+
+ backups.sort_by_key(|e| e.file_name());
+
+ let excess = backups.len().saturating_sub(max_count as usize);
+ for entry in backups.into_iter().take(excess) {
+ let _ = std::fs::remove_file(entry.path());
+ }
+ }
+
+ Ok(())
+ }
+
+ /// replaces filter with same name if theres one already
+ pub fn import_filter(&mut self, filter: ImportedFilter) {
+ self.imported_filters.retain(|f| f.name != filter.name);
+ self.imported_filters.push(filter);
+ }
+
+ pub fn mark_cache_updated(&mut self) {
+ self.cache.last_updated = chrono::Utc::now().to_rfc3339();
+ }
+
+ /// -> (is_outdated, should_auto_update)
+ pub fn check_cache_status(&self) -> (bool, bool) {
+ if self.cache.last_updated.is_empty() {
+ // never updated = always outdated, always auto update
+ return (true, true);
+ }
+
+ let last = match chrono::DateTime::parse_from_rfc3339(&self.cache.last_updated) {
+ Ok(dt) => dt.with_timezone(&chrono::Utc),
+ Err(_) => return (true, true), // cant parse = treat as outdated
+ };
+
+ let now = chrono::Utc::now();
+ let age_days = (now - last).num_days() as u32;
+
+ if self.cache.outdated_cache == 0 {
+ // warnings disabled, but if auto_update is on, update every run
+ return (false, self.cache.auto_update_cache);
+ }
+
+ let is_outdated = age_days >= self.cache.outdated_cache;
+ let should_auto = is_outdated && self.cache.auto_update_cache;
+ (is_outdated, should_auto)
+ }
+}
+
+pub fn parse_filter_file(path: &PathBuf) -> Result<ImportedFilter, String> {
+ let content = std::fs::read_to_string(path)
+ .map_err(|e| format!("Could not read filter file: {}", e))?;
+ let filter: ImportedFilter = toml::from_str(&content)
+ .map_err(|e| format!("Could not parse filter file: {}", e))?;
+ if filter.name.is_empty() {
+ return Err("Filter file must have a name defined".to_string());
+ }
+ if filter.tlds.is_empty() {
+ return Err("Filter file has no TLDs defined".to_string());
+ }
+ Ok(filter)
+}
+
+/// resolve .hoardom dir, tries a few locations:
+///
+/// priority:
+/// 1. explicit path via -e flag -> use as root dir (create .hoardom folder there)
+/// 2. debug builds: current directory
+/// 3. release builds: home directory
+/// 4. fallback: try the other option
+/// 5. nothing works: caching disabled, in-memory only
+pub fn resolve_paths(explicit: Option<&PathBuf>) -> HoardomPaths {
+ let try_setup = |base: PathBuf| -> Option<HoardomPaths> {
+ let root = base;
+ let config_file = root.join("config.toml");
+ let cache_dir = root.join("cache");
+
+ // try to create the directories
+ if std::fs::create_dir_all(&cache_dir).is_ok() {
+ Some(HoardomPaths {
+ config_file,
+ cache_dir,
+ can_save: true,
+ caching_enabled: true,
+ })
+ } else {
+ None
+ }
+ };
+
+ // explicit path given via -e flag
+ if let Some(p) = explicit {
+ // if user gave a path, use it as the .hoardom folder root
+ let root = if p.extension().is_some() {
+ // looks like they pointed at a file, use parent dir
+ p.parent().unwrap_or(p).join(".hoardom")
+ } else {
+ p.clone()
+ };
+ if let Some(paths) = try_setup(root) {
+ return paths;
+ }
+ }
+
+ // debug builds: current directory first
+ #[cfg(debug_assertions)]
+ {
+ if let Ok(dir) = std::env::current_dir() {
+ if let Some(paths) = try_setup(dir.join(".hoardom")) {
+ return paths;
+ }
+ }
+ // debug fallback: try home
+ if let Some(home) = dirs::home_dir() {
+ if let Some(paths) = try_setup(home.join(".hoardom")) {
+ return paths;
+ }
+ }
+ }
+
+ // release builds: home directory first
+ #[cfg(not(debug_assertions))]
+ {
+ if let Some(home) = dirs::home_dir() {
+ if let Some(paths) = try_setup(home.join(".hoardom")) {
+ return paths;
+ }
+ }
+ // release fallback: try cwd
+ if let Ok(dir) = std::env::current_dir() {
+ if let Some(paths) = try_setup(dir.join(".hoardom")) {
+ return paths;
+ }
+ }
+ }
+
+ // nothing works - disable caching, use a dummy path
+ eprintln!("Warning: could not create .hoardom directory anywhere, caching disabled");
+ HoardomPaths {
+ config_file: PathBuf::from(".hoardom/config.toml"),
+ cache_dir: PathBuf::from(".hoardom/cache"),
+ can_save: false,
+ caching_enabled: false,
+ }
+}
+
diff --git a/src/lookup.rs b/src/lookup.rs
new file mode 100644
index 0000000..f5b3177
--- /dev/null
+++ b/src/lookup.rs
@@ -0,0 +1,860 @@
+use crate::tlds::WhoisOverrides;
+use crate::types::{DomainResult, DomainStatus, ErrorKind};
+use futures::stream::{self, StreamExt};
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+use std::time::Duration;
+
+#[cfg(feature = "builtin-whois")]
+use tokio::io::{AsyncReadExt, AsyncWriteExt};
+#[cfg(feature = "builtin-whois")]
+use tokio::net::TcpStream;
+
+// IANA RDAP bootstrap URL, set in Cargo.toml [package.metadata.hoardom]
+const RDAP_BOOTSTRAP_URL: &str = env!("HOARDOM_RDAP_BOOTSTRAP_URL");
+
+// TLD -> RDAP server map, grabbed once and reused
+pub struct RdapBootstrap {
+ tld_map: HashMap<String, String>,
+ raw_json: Option<String>,
+}
+
+impl RdapBootstrap {
+ pub async fn fetch(client: &reqwest::Client, verbose: bool) -> Result<Self, String> {
+ if verbose {
+ eprintln!("[verbose] Fetching RDAP bootstrap from {}", RDAP_BOOTSTRAP_URL);
+ }
+
+ let resp = client
+ .get(RDAP_BOOTSTRAP_URL)
+ .send()
+ .await
+ .map_err(|e| format!("Failed to fetch RDAP bootstrap: {}", e))?;
+
+ if !resp.status().is_success() {
+ return Err(format!("RDAP bootstrap returned HTTP {}", resp.status()));
+ }
+
+ let body = resp
+ .text()
+ .await
+ .map_err(|e| format!("Failed to read RDAP bootstrap body: {}", e))?;
+
+ let json: serde_json::Value = serde_json::from_str(&body)
+ .map_err(|e| format!("Failed to parse RDAP bootstrap JSON: {}", e))?;
+
+ let tld_map = Self::parse_bootstrap_json(&json);
+
+ if verbose {
+ eprintln!("[verbose] RDAP bootstrap loaded, {} TLDs mapped", tld_map.len());
+ }
+
+ Ok(Self { tld_map, raw_json: Some(body) })
+ }
+
+ pub fn load_cached(cache_path: &Path, verbose: bool) -> Result<Self, String> {
+ if verbose {
+ eprintln!("[verbose] Loading cached RDAP bootstrap from {}", cache_path.display());
+ }
+ let body = std::fs::read_to_string(cache_path)
+ .map_err(|e| format!("Could not read cached bootstrap: {}", e))?;
+ let json: serde_json::Value = serde_json::from_str(&body)
+ .map_err(|e| format!("Could not parse cached bootstrap: {}", e))?;
+ let tld_map = Self::parse_bootstrap_json(&json);
+ if verbose {
+ eprintln!("[verbose] Cached RDAP bootstrap loaded, {} TLDs mapped", tld_map.len());
+ }
+ Ok(Self { tld_map, raw_json: Some(body) })
+ }
+
+ pub fn save_cache(&self, cache_path: &Path) -> Result<(), String> {
+ if let Some(ref json) = self.raw_json {
+ if let Some(parent) = cache_path.parent() {
+ std::fs::create_dir_all(parent)
+ .map_err(|e| format!("Failed to create cache dir: {}", e))?;
+ }
+ std::fs::write(cache_path, json)
+ .map_err(|e| format!("Failed to write cache file: {}", e))?;
+ }
+ Ok(())
+ }
+
+ fn parse_bootstrap_json(json: &serde_json::Value) -> HashMap<String, String> {
+ let mut tld_map = HashMap::new();
+ // bootstrap format: { "services": [ [ ["tld1", "tld2"], ["https://rdap.server.example/"] ], ... ] }
+ if let Some(services) = json.get("services").and_then(|s| s.as_array()) {
+ for service in services {
+ if let Some(arr) = service.as_array() {
+ if arr.len() >= 2 {
+ let tlds = arr[0].as_array();
+ let urls = arr[1].as_array();
+ if let (Some(tlds), Some(urls)) = (tlds, urls) {
+ if let Some(base_url) = urls.first().and_then(|u| u.as_str()) {
+ let base = base_url.trim_end_matches('/').to_string();
+ for tld in tlds {
+ if let Some(t) = tld.as_str() {
+ tld_map.insert(t.to_lowercase(), base.clone());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ tld_map
+ }
+
+ pub fn get_server(&self, tld: &str) -> Option<&String> {
+ self.tld_map.get(&tld.to_lowercase())
+ }
+}
+
+pub async fn lookup_domain(
+ client: &reqwest::Client,
+ bootstrap: &RdapBootstrap,
+ whois_overrides: &WhoisOverrides,
+ name: &str,
+ tld: &str,
+ verbose: bool,
+) -> DomainResult {
+ let full = format!("{}.{}", name, tld);
+
+ let base_url = match bootstrap.get_server(tld) {
+ Some(url) => url.clone(),
+ None => {
+ // no RDAP server for this TLD, fall back to WHOIS
+ if verbose {
+ eprintln!("[verbose] No RDAP server for {}, falling back to WHOIS", tld);
+ }
+ return whois_lookup(whois_overrides, name, tld, verbose).await;
+ }
+ };
+
+ let url = format!("{}/domain/{}", base_url, full);
+
+ if verbose {
+ eprintln!("[verbose] Looking up: {}", url);
+ }
+
+ let resp = match client.get(&url).send().await {
+ Ok(r) => r,
+ Err(e) => {
+ if verbose {
+ eprintln!("[verbose] Request error for {}: {}", full, e);
+ }
+ let kind = if e.is_timeout() {
+ ErrorKind::Timeout
+ } else {
+ ErrorKind::Unknown
+ };
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind,
+ message: "unknown error".to_string(),
+ });
+ }
+ };
+
+ let status_code = resp.status();
+
+ if verbose {
+ eprintln!("[verbose] {} -> HTTP {}", full, status_code);
+ }
+
+ // 404 = not found in RDAP = domain is available (not registered)
+ if status_code == 404 {
+ return DomainResult::new(name, tld, DomainStatus::Available);
+ }
+
+ // 400 = probably invalid query
+ if status_code == 400 {
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::InvalidTld,
+ message: "invalid tld".to_string(),
+ });
+ }
+
+ // 429 = rate limited
+ if status_code == 429 {
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::RateLimit,
+ message: "rate limited".to_string(),
+ });
+ }
+
+ // 403 = forbidden (some registries block queries)
+ if status_code == 403 {
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Forbidden,
+ message: "forbidden".to_string(),
+ });
+ }
+
+ // anything else thats not success
+ if !status_code.is_success() {
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Unknown,
+ message: format!("HTTP {}", status_code),
+ });
+ }
+
+ // 200 = domain exists, try to parse expiry from RDAP json
+ let expiry = match resp.json::<serde_json::Value>().await {
+ Ok(json) => extract_expiry(&json),
+ Err(_) => None,
+ };
+
+ DomainResult::new(name, tld, DomainStatus::Registered { expiry })
+}
+
+fn extract_expiry(json: &serde_json::Value) -> Option<String> {
+ // RDAP stores events as an array, expiration is eventAction = "expiration"
+ if let Some(events) = json.get("events").and_then(|e| e.as_array()) {
+ for event in events {
+ if let Some(action) = event.get("eventAction").and_then(|a| a.as_str()) {
+ if action == "expiration" {
+ if let Some(date) = event.get("eventDate").and_then(|d| d.as_str()) {
+ // RDAP dates are ISO 8601, just grab the date part
+ return Some(date.chars().take(10).collect());
+ }
+ }
+ }
+ }
+ }
+ None
+}
+
+// ---- WHOIS fallback for TLDs not in RDAP bootstrap ----
+
+// -- No whois feature: just return an error --
+#[cfg(not(any(feature = "system-whois", feature = "builtin-whois")))]
+async fn whois_lookup(_whois_overrides: &WhoisOverrides, name: &str, tld: &str, _verbose: bool) -> DomainResult {
+ DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::InvalidTld,
+ message: "no RDAP server (whois disabled)".to_string(),
+ })
+}
+
+// -- System whois: shells out to the systems whois binary --
+#[cfg(feature = "system-whois")]
+async fn whois_lookup(_whois_overrides: &WhoisOverrides, name: &str, tld: &str, verbose: bool) -> DomainResult {
+ let full = format!("{}.{}", name, tld);
+ let whois_cmd = env!("HOARDOM_WHOIS_CMD");
+ let whois_flags = env!("HOARDOM_WHOIS_FLAGS");
+
+ if verbose {
+ if whois_flags.is_empty() {
+ eprintln!("[verbose] System WHOIS: {} {}", whois_cmd, full);
+ } else {
+ eprintln!("[verbose] System WHOIS: {} {} {}", whois_cmd, whois_flags, full);
+ }
+ }
+
+ let mut cmd = tokio::process::Command::new(whois_cmd);
+ // add flags if any are configured
+ if !whois_flags.is_empty() {
+ for flag in whois_flags.split_whitespace() {
+ cmd.arg(flag);
+ }
+ }
+ cmd.arg(&full);
+
+ let output = match tokio::time::timeout(
+ Duration::from_secs(15),
+ cmd.output(),
+ ).await {
+ Ok(Ok(out)) => out,
+ Ok(Err(e)) => {
+ if verbose {
+ eprintln!("[verbose] System whois error for {}: {}", full, e);
+ }
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Unknown,
+ message: format!("whois command failed: {}", e),
+ });
+ }
+ Err(_) => {
+ if verbose {
+ eprintln!("[verbose] System whois timeout for {}", full);
+ }
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Timeout,
+ message: "whois timeout".to_string(),
+ });
+ }
+ };
+
+ let response_str = String::from_utf8_lossy(&output.stdout);
+
+ if verbose {
+ eprintln!("[verbose] WHOIS response for {} ({} bytes)", full, output.stdout.len());
+ }
+
+ if !output.status.success() {
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ if verbose {
+ eprintln!("[verbose] whois stderr: {}", stderr.trim());
+ }
+ // some whois commands exit non-zero for "not found" but still give useful stdout
+ if !response_str.is_empty() {
+ return parse_whois_response(name, tld, &response_str);
+ }
+ return DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Unknown,
+ message: "whois command returned error".to_string(),
+ });
+ }
+
+ parse_whois_response(name, tld, &response_str)
+}
+
+// -- Builtin whois: raw TCP to whois servers directly --
+
+/// try a whois server, returns the response string or errors out
+#[cfg(feature = "builtin-whois")]
+async fn try_whois_server(server: &str, domain: &str, verbose: bool) -> Result<String, &'static str> {
+ let addr = format!("{}:43", server);
+
+ let stream = match tokio::time::timeout(
+ Duration::from_secs(4),
+ TcpStream::connect(&addr),
+ ).await {
+ Ok(Ok(s)) => s,
+ Ok(Err(_)) => return Err("connect error"),
+ Err(_) => return Err("connect timeout"),
+ };
+
+ if verbose {
+ eprintln!("[verbose] WHOIS connected: {} -> {}", domain, server);
+ }
+
+ let (mut reader, mut writer) = stream.into_split();
+
+ let query = format!("{}\r\n", domain);
+ if writer.write_all(query.as_bytes()).await.is_err() {
+ return Err("write error");
+ }
+
+ let mut response = Vec::new();
+ match tokio::time::timeout(
+ Duration::from_secs(8),
+ reader.read_to_end(&mut response),
+ ).await {
+ Ok(Ok(_)) => {}
+ Ok(Err(_)) => return Err("read error"),
+ Err(_) => return Err("read timeout"),
+ }
+
+ Ok(String::from_utf8_lossy(&response).to_string())
+}
+
+/// candidate whois servers for a TLD based on common naming patterns
+#[cfg(feature = "builtin-whois")]
+fn whois_candidates(tld: &str) -> Vec<String> {
+ // most registries follow one of these patterns
+ vec![
+ format!("whois.nic.{}", tld),
+ format!("whois.{}", tld),
+ format!("{}.whois-servers.net", tld),
+ ]
+}
+
+#[cfg(feature = "builtin-whois")]
+async fn whois_lookup(whois_overrides: &WhoisOverrides, name: &str, tld: &str, verbose: bool) -> DomainResult {
+ let full = format!("{}.{}", name, tld);
+
+ // if Lists.toml has an explicit server ("tld:server"), use ONLY that one
+ if let Some(server) = whois_overrides.get_server(tld) {
+ if verbose {
+ eprintln!("[verbose] WHOIS (override): {} -> {}", full, server);
+ }
+ return match try_whois_server(server, &full, verbose).await {
+ Ok(resp) if !resp.is_empty() => parse_whois_response(name, tld, &resp),
+ Ok(_) => DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Unknown,
+ message: "empty whois response".to_string(),
+ }),
+ Err(e) => DomainResult::new(name, tld, DomainStatus::Error {
+ kind: if e.contains("timeout") { ErrorKind::Timeout } else { ErrorKind::Unknown },
+ message: format!("whois {}: {}", server, e),
+ }),
+ };
+ }
+
+ // no override: try common server patterns until one responds
+ let candidates = whois_candidates(tld);
+
+ if verbose {
+ eprintln!("[verbose] WHOIS probing {} candidates for .{}", candidates.len(), tld);
+ }
+
+ for server in &candidates {
+ match try_whois_server(server, &full, verbose).await {
+ Ok(resp) if !resp.is_empty() => {
+ return parse_whois_response(name, tld, &resp);
+ }
+ Ok(_) => {
+ if verbose {
+ eprintln!("[verbose] WHOIS {} returned empty for {}", server, full);
+ }
+ }
+ Err(e) => {
+ if verbose {
+ eprintln!("[verbose] WHOIS {} failed for {}: {}", server, full, e);
+ }
+ }
+ }
+ }
+
+ // nothing worked
+ DomainResult::new(name, tld, DomainStatus::Error {
+ kind: ErrorKind::Unknown,
+ message: "no whois server reachable".to_string(),
+ })
+}
+
+fn parse_whois_response(name: &str, tld: &str, response: &str) -> DomainResult {
+ let lower = response.to_lowercase();
+
+ // common "not found" / "available" patterns across registrars
+ let available_patterns = [
+ "no match for",
+ "not found",
+ "no entries found",
+ "no data found",
+ "status: free",
+ "status: available",
+ "is free",
+ "no object found",
+ "object not found",
+ "nothing found",
+ "domain not found",
+ "no information available",
+ "we do not have an entry",
+ ];
+
+ for pattern in &available_patterns {
+ if lower.contains(pattern) {
+ return DomainResult::new(name, tld, DomainStatus::Available);
+ }
+ }
+
+ // if we got a response and it wasnt "not found", domain is probably registered
+ // try to extract expiry date
+ let expiry = extract_whois_expiry(&lower);
+
+ DomainResult::new(name, tld, DomainStatus::Registered { expiry })
+}
+
+fn extract_whois_expiry(response: &str) -> Option<String> {
+ let expiry_patterns = [
+ "expiry date:",
+ "expiration date:",
+ "registry expiry date:",
+ "registrar registration expiration date:",
+ "paid-till:",
+ "expires:",
+ "expire:",
+ "renewal date:",
+ "expires on:",
+ ];
+
+ for line in response.lines() {
+ let trimmed = line.trim().to_lowercase();
+ for pattern in &expiry_patterns {
+ if trimmed.starts_with(pattern) {
+ let value = trimmed[pattern.len()..].trim();
+ // try to extract a date-looking thing (first 10 chars if it looks like YYYY-MM-DD)
+ if value.len() >= 10 {
+ let date_part: String = value.chars().take(10).collect();
+ // basic sanity check: contains digits and dashes
+ if date_part.contains('-') && date_part.chars().any(|c| c.is_ascii_digit()) {
+ return Some(date_part);
+ }
+ }
+ // maybe its in a different format, just return what we got
+ if !value.is_empty() {
+ return Some(value.to_string());
+ }
+ }
+ }
+ }
+ None
+}
+
+pub async fn lookup_with_retry(
+ client: &reqwest::Client,
+ bootstrap: &RdapBootstrap,
+ whois_overrides: &WhoisOverrides,
+ name: &str,
+ tld: &str,
+ retries: u32,
+ noretry: &[ErrorKind],
+ verbose: bool,
+) -> DomainResult {
+ let mut result = lookup_domain(client, bootstrap, whois_overrides, name, tld, verbose).await;
+
+ for attempt in 1..=retries {
+ if !result.is_error() {
+ break;
+ }
+ // skip retry if the error kind is in the noretry list
+ if let DomainStatus::Error { kind, .. } = &result.status {
+ if noretry.contains(kind) {
+ if verbose {
+ eprintln!("[verbose] Not retrying {}.{} (error kind in noretry list)", name, tld);
+ }
+ break;
+ }
+ }
+ if verbose {
+ eprintln!("[verbose] Retry {}/{} for {}.{}", attempt, retries, name, tld);
+ }
+ tokio::time::sleep(Duration::from_millis(500)).await;
+ result = lookup_domain(client, bootstrap, whois_overrides, name, tld, verbose).await;
+ }
+
+ result
+}
+
+pub fn build_client() -> reqwest::Client {
+ reqwest::Client::builder()
+ .timeout(Duration::from_secs(10))
+ .user_agent(format!("hoardom/{}", env!("CARGO_PKG_VERSION")))
+ .build()
+ .expect("Failed to create HTTP client")
+}
+
+pub async fn lookup_all(
+ name: &str,
+ tlds: &[&str],
+ delay_secs: f64,
+ retries: u32,
+ verbose: bool,
+ cache_path: Option<&Path>,
+ force_refresh: bool,
+ jobs: u8,
+ whois_overrides: &WhoisOverrides,
+ noretry: &[ErrorKind],
+ on_progress: impl Fn(usize, usize),
+) -> Vec<DomainResult> {
+ let client = build_client();
+
+ let bootstrap = match resolve_bootstrap(&client, cache_path, force_refresh, verbose).await {
+ Some(b) => b,
+ None => return Vec::new(),
+ };
+
+ let total = tlds.len();
+ let concurrent = (jobs as usize).max(1);
+
+ if concurrent <= 1 {
+ // sequential path (original behaviour)
+ let mut results = Vec::with_capacity(total);
+ let delay = Duration::from_secs_f64(delay_secs);
+ for (i, tld) in tlds.iter().enumerate() {
+ let result = lookup_with_retry(&client, &bootstrap, whois_overrides, name, tld, retries, noretry, verbose).await;
+ results.push(result);
+ on_progress(i + 1, total);
+ if delay_secs > 0.0 && i + 1 < total {
+ tokio::time::sleep(delay).await;
+ }
+ }
+ results
+ } else {
+ // concurrent path
+ let bootstrap = Arc::new(bootstrap);
+ let client = Arc::new(client);
+ let whois_overrides = Arc::new(whois_overrides.clone());
+ let noretry = Arc::new(noretry.to_vec());
+ let name_owned = name.to_string();
+
+ let mut stream = stream::iter(tlds.iter().enumerate())
+ .map(|(i, tld)| {
+ let client = Arc::clone(&client);
+ let bootstrap = Arc::clone(&bootstrap);
+ let whois_overrides = Arc::clone(&whois_overrides);
+ let noretry = Arc::clone(&noretry);
+ let name = name_owned.clone();
+ let tld = tld.to_string();
+ async move {
+ let result = lookup_with_retry(&client, &bootstrap, &whois_overrides, &name, &tld, retries, &noretry, verbose).await;
+ (i, result)
+ }
+ })
+ .buffer_unordered(concurrent);
+
+ let mut results: Vec<(usize, DomainResult)> = Vec::with_capacity(total);
+ let mut done_count = 0usize;
+ while let Some(item) = stream.next().await {
+ results.push(item);
+ done_count += 1;
+ on_progress(done_count, total);
+ }
+
+ // sort by original order
+ results.sort_by_key(|(i, _)| *i);
+
+ results.into_iter().map(|(_, r)| r).collect()
+ }
+}
+
+pub async fn refresh_cache(cache_path: &Path, verbose: bool) -> Result<(), String> {
+ let client = build_client();
+ let bootstrap = RdapBootstrap::fetch(&client, verbose).await?;
+ bootstrap.save_cache(cache_path)?;
+ eprintln!("RDAP bootstrap cache refreshed ({} TLDs)", bootstrap.tld_map.len());
+ Ok(())
+}
+
+async fn resolve_bootstrap(
+ client: &reqwest::Client,
+ cache_path: Option<&Path>,
+ force_refresh: bool,
+ verbose: bool,
+) -> Option<RdapBootstrap> {
+ // try loading bootstrap from cache first (unless force refresh)
+ let cached = if !force_refresh {
+ if let Some(cp) = cache_path {
+ if cp.exists() {
+ match RdapBootstrap::load_cached(cp, verbose) {
+ Ok(b) => Some(b),
+ Err(e) => {
+ if verbose {
+ eprintln!("[verbose] Cache load failed: {}, fetching fresh", e);
+ }
+ None
+ }
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ } else {
+ if verbose {
+ eprintln!("[verbose] Force refresh requested, skipping cache");
+ }
+ None
+ };
+
+ match cached {
+ Some(b) => Some(b),
+ None => {
+ match RdapBootstrap::fetch(client, verbose).await {
+ Ok(b) => {
+ if let Some(cp) = cache_path {
+ if let Err(e) = b.save_cache(cp) {
+ if verbose {
+ eprintln!("[verbose] Failed to save cache: {}", e);
+ }
+ } else if verbose {
+ eprintln!("[verbose] RDAP bootstrap cached to {}", cp.display());
+ }
+ }
+ Some(b)
+ }
+ Err(e) => {
+ eprintln!("Error: {}", e);
+ eprintln!("Cannot perform lookups without RDAP bootstrap data.");
+ None
+ }
+ }
+ }
+ }
+}
+
+pub enum StreamMsg {
+ Result { result: DomainResult, sort_index: usize },
+ Progress { current: usize, total: usize },
+ Error(String),
+ Done,
+}
+
+pub struct LookupStream {
+ pub receiver: tokio::sync::mpsc::Receiver<StreamMsg>,
+ pub handle: tokio::task::JoinHandle<()>,
+}
+
+pub type LookupBatch = Vec<(String, Vec<String>)>;
+
+// spawns a bg task, sends results via channel so TUI gets em live
+pub fn lookup_streaming(
+ name: String,
+ tlds: Vec<String>,
+ delay_secs: f64,
+ retries: u32,
+ verbose: bool,
+ cache_path: Option<PathBuf>,
+ force_refresh: bool,
+ jobs: u8,
+ whois_overrides: WhoisOverrides,
+ noretry: Vec<ErrorKind>,
+) -> LookupStream {
+ let (tx, rx) = tokio::sync::mpsc::channel(64);
+
+ let handle = tokio::spawn(async move {
+ let client = build_client();
+
+ let bootstrap = match resolve_bootstrap(
+ &client,
+ cache_path.as_deref(),
+ force_refresh,
+ verbose,
+ ).await {
+ Some(b) => b,
+ None => {
+ let _ = tx.send(StreamMsg::Error("Failed to load RDAP bootstrap".to_string())).await;
+ let _ = tx.send(StreamMsg::Done).await;
+ return;
+ }
+ };
+
+ let total = tlds.len();
+ let concurrent = (jobs as usize).max(1);
+
+ if concurrent <= 1 {
+ let delay = Duration::from_secs_f64(delay_secs);
+ for (i, tld) in tlds.iter().enumerate() {
+ let result = lookup_with_retry(&client, &bootstrap, &whois_overrides, &name, tld, retries, &noretry, verbose).await;
+ let _ = tx.send(StreamMsg::Result { result, sort_index: i }).await;
+ let _ = tx.send(StreamMsg::Progress { current: i + 1, total }).await;
+ if delay_secs > 0.0 && i + 1 < total {
+ tokio::time::sleep(delay).await;
+ }
+ }
+ } else {
+ let bootstrap = Arc::new(bootstrap);
+ let client = Arc::new(client);
+ let whois_overrides = Arc::new(whois_overrides);
+ let noretry = Arc::new(noretry);
+ let tx2 = tx.clone();
+
+ let mut stream = stream::iter(tlds.into_iter().enumerate())
+ .map(|(idx, tld)| {
+ let client = Arc::clone(&client);
+ let bootstrap = Arc::clone(&bootstrap);
+ let whois_overrides = Arc::clone(&whois_overrides);
+ let noretry = Arc::clone(&noretry);
+ let name = name.clone();
+ async move {
+ let result = lookup_with_retry(&client, &bootstrap, &whois_overrides, &name, &tld, retries, &noretry, verbose).await;
+ (idx, result)
+ }
+ })
+ .buffer_unordered(concurrent);
+
+ let mut done_count = 0usize;
+ while let Some((idx, result)) = stream.next().await {
+ done_count += 1;
+ let _ = tx2.send(StreamMsg::Result { result, sort_index: idx }).await;
+ let _ = tx2.send(StreamMsg::Progress { current: done_count, total }).await;
+ }
+ }
+
+ let _ = tx.send(StreamMsg::Done).await;
+ });
+
+ LookupStream {
+ receiver: rx,
+ handle,
+ }
+}
+
+pub fn lookup_many_streaming(
+ batches: LookupBatch,
+ delay_secs: f64,
+ retries: u32,
+ verbose: bool,
+ cache_path: Option<PathBuf>,
+ force_refresh: bool,
+ jobs: u8,
+ whois_overrides: WhoisOverrides,
+ noretry: Vec<ErrorKind>,
+) -> LookupStream {
+ if batches.len() == 1 {
+ let (name, tlds) = batches.into_iter().next().unwrap();
+ return lookup_streaming(name, tlds, delay_secs, retries, verbose, cache_path, force_refresh, jobs, whois_overrides, noretry);
+ }
+
+ let (tx, rx) = tokio::sync::mpsc::channel(64);
+
+ let handle = tokio::spawn(async move {
+ let client = build_client();
+
+ let bootstrap = match resolve_bootstrap(
+ &client,
+ cache_path.as_deref(),
+ force_refresh,
+ verbose,
+ ).await {
+ Some(b) => b,
+ None => {
+ let _ = tx.send(StreamMsg::Error("Failed to load RDAP bootstrap".to_string())).await;
+ let _ = tx.send(StreamMsg::Done).await;
+ return;
+ }
+ };
+
+ let total: usize = batches.iter().map(|(_, tlds)| tlds.len()).sum();
+ let concurrent = (jobs as usize).max(1);
+
+ if concurrent <= 1 {
+ let delay = Duration::from_secs_f64(delay_secs);
+ let mut current = 0usize;
+ let mut global_idx = 0usize;
+ for (name, tlds) in batches {
+ for tld in tlds {
+ let result = lookup_with_retry(&client, &bootstrap, &whois_overrides, &name, &tld, retries, &noretry, verbose).await;
+ current += 1;
+ let _ = tx.send(StreamMsg::Result { result, sort_index: global_idx }).await;
+ let _ = tx.send(StreamMsg::Progress { current, total }).await;
+ if delay_secs > 0.0 && current < total {
+ tokio::time::sleep(delay).await;
+ }
+ global_idx += 1;
+ }
+ }
+ } else {
+ let bootstrap = Arc::new(bootstrap);
+ let client = Arc::new(client);
+ let whois_overrides = Arc::new(whois_overrides);
+ let noretry = Arc::new(noretry);
+ let tx2 = tx.clone();
+
+ // flatten all (name, tld) pairs with their global index
+ let pairs: Vec<(usize, String, String)> = batches
+ .into_iter()
+ .flat_map(|(name, tlds)| tlds.into_iter().map(move |tld| (name.clone(), tld)))
+ .enumerate()
+ .map(|(idx, (name, tld))| (idx, name, tld))
+ .collect();
+
+ let mut stream = stream::iter(pairs.into_iter())
+ .map(|(idx, name, tld)| {
+ let client = Arc::clone(&client);
+ let bootstrap = Arc::clone(&bootstrap);
+ let whois_overrides = Arc::clone(&whois_overrides);
+ let noretry = Arc::clone(&noretry);
+ async move {
+ let result = lookup_with_retry(&client, &bootstrap, &whois_overrides, &name, &tld, retries, &noretry, verbose).await;
+ (idx, result)
+ }
+ })
+ .buffer_unordered(concurrent);
+
+ let mut done_count = 0usize;
+ while let Some((idx, result)) = stream.next().await {
+ done_count += 1;
+ let _ = tx2.send(StreamMsg::Result { result, sort_index: idx }).await;
+ let _ = tx2.send(StreamMsg::Progress { current: done_count, total }).await;
+ }
+ }
+
+ let _ = tx.send(StreamMsg::Done).await;
+ });
+
+ LookupStream { receiver: rx, handle }
+}
diff --git a/src/main.rs b/src/main.rs
new file mode 100644
index 0000000..aa0b993
--- /dev/null
+++ b/src/main.rs
@@ -0,0 +1,418 @@
+mod cli;
+mod config;
+mod lookup;
+mod output;
+mod tlds;
+mod tui;
+mod types;
+
+use clap::Parser;
+use cli::{print_fullhelp, print_help, Args};
+use config::{parse_filter_file, resolve_paths, Config};
+use tlds::{apply_top_tlds, get_tlds_or_default, whois_overrides};
+use types::{DomainResult, ErrorKind};
+
+#[derive(Debug)]
+struct AggregatedResult {
+ domain_idx: usize,
+ tld_idx: usize,
+ result: DomainResult,
+}
+
+#[tokio::main]
+async fn main() {
+ let args = Args::parse();
+
+ // handle help flags
+ if args.help {
+ print_help();
+ return;
+ }
+ if args.fullhelp {
+ print_fullhelp();
+ return;
+ }
+
+ // resolve .hoardom directory structure
+ let paths = resolve_paths(args.env_path.as_ref());
+ let mut config = Config::load_with_backup(&paths.config_file);
+
+ if !paths.can_save {
+ eprintln!("Warning: favorites and settings wont be saved (no writable location found)");
+ }
+
+ // handle -r refresh cache flag
+ if args.refresh_cache {
+ if !paths.caching_enabled {
+ eprintln!("Caching is disabled (no writable location). Nothing to refresh.");
+ return;
+ }
+ let cache_file = paths.cache_file("rdap_bootstrap.json");
+ match lookup::refresh_cache(&cache_file, args.verbose).await {
+ Ok(()) => {
+ config.mark_cache_updated();
+ if paths.can_save {
+ let _ = config.save(&paths.config_file);
+ }
+ }
+ Err(e) => eprintln!("Error refreshing cache: {}", e),
+ }
+ return;
+ }
+
+ // check if cache is stale and auto update if needed
+ let cache_file = if paths.caching_enabled {
+ Some(paths.cache_file("rdap_bootstrap.json"))
+ } else {
+ None
+ };
+
+ let force_refresh = if let Some(ref cf) = cache_file {
+ let (is_outdated, should_auto) = config.check_cache_status();
+ if is_outdated && !should_auto {
+ eprintln!("Warning: RDAP cache is outdated. Run `hoardom -r` to refresh.");
+ }
+ // force refresh if auto update says so, or if cache file doesnt exist yet
+ should_auto || !cf.exists()
+ } else {
+ false
+ };
+
+ // import custom filter if given
+ if let Some(filter_path) = &args.import_filter {
+ match parse_filter_file(filter_path) {
+ Ok(filter) => {
+ config.import_filter(filter);
+ if paths.can_save {
+ let _ = config.save(&paths.config_file);
+ }
+ }
+ Err(e) => {
+ eprintln!("Error importing filter: {}", e);
+ return;
+ }
+ }
+ }
+
+ // whois server overrides are baked into Lists.toml ("tld:server" syntax)
+ let overrides = whois_overrides();
+
+ // parse noretry config into ErrorKind list
+ let noretry: Vec<ErrorKind> = config.settings.noretry.iter()
+ .filter_map(|s| ErrorKind::from_config_str(s))
+ .collect();
+
+ // TUI mode
+ if args.is_tui() {
+ if let Err(e) = tui::run_tui(&args, &config, paths.clone(), cache_file.clone(), force_refresh, overrides.clone(), noretry.clone()).await {
+ eprintln!("TUI error: {}", e);
+ }
+ // save cache timestamp after TUI session if we refreshed
+ if force_refresh && paths.can_save {
+ config.mark_cache_updated();
+ let _ = config.save(&paths.config_file);
+ }
+ return;
+ }
+
+ // CLI needs at least one domain unless autosearch was given
+ if args.domains.is_empty() {
+ if let Some(file_path) = &args.autosearch {
+ run_autosearch(&args, file_path, cache_file.as_deref(), force_refresh, overrides, &noretry).await;
+ if force_refresh && paths.can_save {
+ config.mark_cache_updated();
+ let _ = config.save(&paths.config_file);
+ }
+ return;
+ }
+ print_help();
+ return;
+ }
+
+ let base_tlds = build_base_tlds(&args);
+ let total_lookups = estimate_total_lookups(&args.domains, &base_tlds);
+ let mut completed_lookups = 0usize;
+ let mut refresh_remaining = force_refresh;
+ let mut aggregated_results = Vec::new();
+
+ for (domain_idx, raw_domain) in args.domains.iter().enumerate() {
+ let (search_name, specific_tld) = parse_domain_input(raw_domain);
+ let effective_tlds = build_effective_tlds(&base_tlds, specific_tld.as_deref());
+
+ if effective_tlds.is_empty() {
+ eprintln!("No TLDs to search. Check your filter settings.");
+ return;
+ }
+
+ let results = lookup::lookup_all(
+ &search_name,
+ &effective_tlds,
+ args.effective_delay(),
+ args.effective_retry(),
+ args.verbose,
+ cache_file.as_deref(),
+ refresh_remaining,
+ args.effective_jobs(),
+ overrides,
+ &noretry,
+ |current, _total| {
+ output::print_progress(completed_lookups + current, total_lookups.max(1));
+ },
+ )
+ .await;
+
+ refresh_remaining = false;
+ completed_lookups += effective_tlds.len();
+
+ for result in results {
+ let tld_idx = effective_tlds
+ .iter()
+ .position(|tld| *tld == result.tld)
+ .unwrap_or(usize::MAX - 1);
+ aggregated_results.push(AggregatedResult {
+ domain_idx,
+ tld_idx,
+ result,
+ });
+ }
+
+ // Suggestions only kick in when directly searching a single full domain
+ if args.domains.len() == 1 && args.effective_suggestions() > 0 {
+ if let Some(exact_tld) = specific_tld.as_deref() {
+ let exact_registered = aggregated_results.iter().any(|item| {
+ item.result.name == search_name
+ && item.result.tld == exact_tld
+ && !item.result.is_available()
+ });
+
+ if exact_registered {
+ let suggestion_tlds: Vec<&'static str> = base_tlds
+ .iter()
+ .copied()
+ .filter(|tld| *tld != exact_tld)
+ .collect();
+
+ if !suggestion_tlds.is_empty() {
+ let suggestion_results = lookup::lookup_all(
+ &search_name,
+ &suggestion_tlds,
+ args.effective_delay(),
+ args.effective_retry(),
+ args.verbose,
+ cache_file.as_deref(),
+ false,
+ args.effective_jobs(),
+ overrides,
+ &noretry,
+ |_current, _total| {},
+ )
+ .await;
+
+ for result in suggestion_results
+ .into_iter()
+ .filter(|result| result.is_available())
+ .take(args.effective_suggestions())
+ {
+ let tld_idx = base_tlds
+ .iter()
+ .position(|tld| *tld == result.tld)
+ .unwrap_or(usize::MAX);
+ aggregated_results.push(AggregatedResult {
+ domain_idx,
+ tld_idx,
+ result,
+ });
+ }
+ }
+ }
+ }
+ }
+ }
+
+ let results = sort_aggregated_results(aggregated_results);
+
+ // save cache timestamp if we refreshed
+ if force_refresh && paths.can_save {
+ config.mark_cache_updated();
+ let _ = config.save(&paths.config_file);
+ }
+
+ // print errors first
+ output::print_errors(&results, args.verbose);
+
+ // CSV output
+ if let Some(csv_opt) = &args.csv {
+ match csv_opt {
+ Some(path) => {
+ // write to file
+ match output::write_csv_file(&results, path) {
+ Ok(()) => eprintln!("CSV written to {}", path.display()),
+ Err(e) => eprintln!("Error writing CSV: {}", e),
+ }
+ }
+ None => {
+ // print to stdout, no logs
+ output::print_csv(&results);
+ }
+ }
+ return;
+ }
+
+ // table output
+ if args.show_all {
+ output::print_full_table(&results, args.no_color, args.no_unicode);
+ } else {
+ output::print_available_table(&results, args.no_color, args.no_unicode);
+ }
+}
+
+async fn run_autosearch(
+ args: &Args,
+ file_path: &std::path::PathBuf,
+ cache_path: Option<&std::path::Path>,
+ force_refresh: bool,
+ overrides: &tlds::WhoisOverrides,
+ noretry: &[ErrorKind],
+) {
+ let content = match std::fs::read_to_string(file_path) {
+ Ok(c) => c,
+ Err(e) => {
+ eprintln!("Could not read autosearch file: {}", e);
+ return;
+ }
+ };
+
+ let base_tlds = build_base_tlds(args);
+
+ // collect all search entries, grouping by name so "zapplex.de" + "zapplex.nl" become one batch
+ let mut batches: Vec<(String, Vec<String>)> = Vec::new();
+
+ for line in content.lines() {
+ let trimmed = line.trim();
+ if trimmed.is_empty() {
+ continue;
+ }
+ if let Some(first) = trimmed.chars().next() {
+ if !first.is_alphanumeric() {
+ continue;
+ }
+ }
+
+ let (search_name, specific_tld) = if trimmed.contains('.') {
+ let parts: Vec<&str> = trimmed.splitn(2, '.').collect();
+ (parts[0].to_string(), Some(parts[1].to_string()))
+ } else {
+ (trimmed.to_string(), None)
+ };
+
+ let effective_tlds = build_effective_tlds(&base_tlds, specific_tld.as_deref());
+
+ let entry = if let Some(pos) = batches.iter().position(|(name, _)| *name == search_name) {
+ &mut batches[pos].1
+ } else {
+ batches.push((search_name, Vec::new()));
+ &mut batches.last_mut().unwrap().1
+ };
+ for tld in effective_tlds {
+ if !entry.contains(&tld.to_string()) {
+ entry.push(tld.to_string());
+ }
+ }
+ }
+
+ if batches.is_empty() {
+ eprintln!("No valid search terms in file");
+ return;
+ }
+
+ let total_lookups: usize = batches.iter().map(|(_, tlds)| tlds.len()).sum();
+ let mut completed = 0usize;
+ let mut all_results: Vec<DomainResult> = Vec::new();
+ let mut refresh_remaining = force_refresh;
+
+ for (search_name, tlds) in &batches {
+ let tld_refs: Vec<&str> = tlds.iter().map(|s| s.as_str()).collect();
+
+ let results = lookup::lookup_all(
+ search_name,
+ &tld_refs,
+ args.effective_delay(),
+ args.effective_retry(),
+ args.verbose,
+ cache_path,
+ refresh_remaining,
+ args.effective_jobs(),
+ overrides,
+ noretry,
+ |current, _total| {
+ output::print_progress(completed + current, total_lookups.max(1));
+ },
+ )
+ .await;
+
+ refresh_remaining = false;
+ completed += tlds.len();
+ all_results.extend(results);
+ }
+
+ output::print_errors(&all_results, args.verbose);
+
+ if args.show_all {
+ output::print_full_table(&all_results, args.no_color, args.no_unicode);
+ } else {
+ output::print_available_table(&all_results, args.no_color, args.no_unicode);
+ }
+}
+
+fn build_base_tlds(args: &Args) -> Vec<&'static str> {
+ let tld_list = args.effective_list();
+ let mut tld_vec = get_tlds_or_default(&tld_list);
+
+ if let Some(ref only) = args.only_top {
+ tld_vec = only
+ .iter()
+ .filter(|s| !s.is_empty())
+ .map(|s| -> &'static str { Box::leak(s.clone().into_boxed_str()) })
+ .collect();
+ }
+
+ if let Some(ref top) = args.top_tlds {
+ tld_vec = apply_top_tlds(tld_vec, top);
+ }
+
+ tld_vec
+}
+
+fn parse_domain_input(raw_domain: &str) -> (String, Option<String>) {
+ if raw_domain.contains('.') {
+ let parts: Vec<&str> = raw_domain.splitn(2, '.').collect();
+ (parts[0].to_string(), Some(parts[1].to_string()))
+ } else {
+ (raw_domain.to_string(), None)
+ }
+}
+
+fn build_effective_tlds(base_tlds: &[&'static str], specific_tld: Option<&str>) -> Vec<&'static str> {
+ if let Some(tld) = specific_tld {
+ vec![Box::leak(tld.to_string().into_boxed_str()) as &'static str]
+ } else {
+ base_tlds.to_vec()
+ }
+}
+
+fn estimate_total_lookups(domains: &[String], base_tlds: &[&'static str]) -> usize {
+ domains
+ .iter()
+ .map(|domain| if domain.contains('.') { 1 } else { base_tlds.len() })
+ .sum()
+}
+
+fn sort_aggregated_results(mut aggregated: Vec<AggregatedResult>) -> Vec<DomainResult> {
+ aggregated.sort_by(|a, b| {
+ a.tld_idx
+ .cmp(&b.tld_idx)
+ .then(a.domain_idx.cmp(&b.domain_idx))
+ });
+ aggregated.into_iter().map(|item| item.result).collect()
+}
+
diff --git a/src/output.rs b/src/output.rs
new file mode 100644
index 0000000..fe3b141
--- /dev/null
+++ b/src/output.rs
@@ -0,0 +1,209 @@
+use crate::types::{DomainResult, DomainStatus, ErrorKind};
+use colored::*;
+use std::io::Write;
+use std::path::PathBuf;
+
+pub fn print_available_table(results: &[DomainResult], no_color: bool, no_unicode: bool) {
+ let available: Vec<&DomainResult> = results.iter().filter(|r| r.is_available()).collect();
+
+ if available.is_empty() {
+ println!("No available domains found.");
+ return;
+ }
+
+ let max_len = available.iter().map(|r| r.full.len()).max().unwrap_or(20);
+ let width = max_len + 4; // padding
+
+ let title = "Available Domains";
+ let title_padded = format!("{:^width$}", title, width = width);
+
+ if no_unicode {
+ // ASCII box
+ let border = format!("+{}+", "-".repeat(width));
+ println!("{}", border);
+ if no_color {
+ println!("|{}|", title_padded);
+ } else {
+ println!("|{}|", title_padded.green());
+ }
+ println!("+{}+", "-".repeat(width));
+ for r in &available {
+ println!("| {:<pad$} |", r.full, pad = width - 2);
+ }
+ println!("{}", border);
+ } else {
+ // Unicode box
+ let top = format!("┌{}┐", "─".repeat(width));
+ let sep = format!("├{}┤", "─".repeat(width));
+ let bot = format!("└{}┘", "─".repeat(width));
+ println!("{}", top);
+ if no_color {
+ println!("│{}│", title_padded);
+ } else {
+ println!("│{}│", title_padded.green());
+ }
+ println!("{}", sep);
+ for r in &available {
+ println!("│ {:<pad$} │", r.full, pad = width - 2);
+ }
+ println!("{}", bot);
+ }
+}
+
+pub fn print_full_table(results: &[DomainResult], no_color: bool, no_unicode: bool) {
+ if results.is_empty() {
+ println!("No results.");
+ return;
+ }
+
+ // calc column widths
+ let domain_w = results.iter().map(|r| r.full.len()).max().unwrap_or(10).max(7);
+ let status_w = 10; // "registered" is the longest
+ let note_w = results.iter().map(|r| r.note_str().len()).max().unwrap_or(4).max(4);
+
+ let domain_col = domain_w + 2;
+ let status_col = status_w + 2;
+ let note_col = note_w + 2;
+
+ if no_unicode {
+ print_full_table_ascii(results, domain_col, status_col, note_col, no_color);
+ } else {
+ print_full_table_unicode(results, domain_col, status_col, note_col, no_color);
+ }
+}
+
+fn print_full_table_unicode(
+ results: &[DomainResult],
+ dc: usize,
+ sc: usize,
+ nc: usize,
+ no_color: bool,
+) {
+ let top = format!("┌{}┬{}┬{}┐", "─".repeat(dc), "─".repeat(sc), "─".repeat(nc));
+ let sep = format!("├{}┼{}┼{}┤", "─".repeat(dc), "─".repeat(sc), "─".repeat(nc));
+ let bot = format!("└{}┴{}┴{}┘", "─".repeat(dc), "─".repeat(sc), "─".repeat(nc));
+
+ println!("{}", top);
+ println!(
+ "│{:^dc$}│{:^sc$}│{:^nc$}│",
+ "Domains",
+ "Status",
+ "Note",
+ dc = dc,
+ sc = sc,
+ nc = nc,
+ );
+ println!("{}", sep);
+
+ for r in results {
+ let domain_str = format!(" {:<width$} ", r.full, width = dc - 2);
+ let status_str = format!(" {:<width$} ", r.status_str(), width = sc - 2);
+ let note_str = format!(" {:<width$} ", r.note_str(), width = nc - 2);
+
+ if no_color {
+ println!("│{}│{}│{}│", domain_str, status_str, note_str);
+ } else {
+ let colored_domain = color_domain(&domain_str, &r.status);
+ println!("│{}│{}│{}│", colored_domain, status_str, note_str);
+ }
+ }
+
+ println!("{}", bot);
+}
+
+fn print_full_table_ascii(
+ results: &[DomainResult],
+ dc: usize,
+ sc: usize,
+ nc: usize,
+ no_color: bool,
+) {
+ let border = format!("+{}+{}+{}+", "-".repeat(dc), "-".repeat(sc), "-".repeat(nc));
+
+ println!("{}", border);
+ println!(
+ "|{:^dc$}|{:^sc$}|{:^nc$}|",
+ "Domains",
+ "Status",
+ "Note",
+ dc = dc,
+ sc = sc,
+ nc = nc,
+ );
+ println!("{}", border);
+
+ for r in results {
+ let domain_str = format!(" {:<width$} ", r.full, width = dc - 2);
+ let status_str = format!(" {:<width$} ", r.status_str(), width = sc - 2);
+ let note_str = format!(" {:<width$} ", r.note_str(), width = nc - 2);
+
+ if no_color {
+ println!("|{}|{}|{}|", domain_str, status_str, note_str);
+ } else {
+ let colored_domain = color_domain(&domain_str, &r.status);
+ println!("|{}|{}|{}|", colored_domain, status_str, note_str);
+ }
+ }
+
+ println!("{}", border);
+}
+
+fn color_domain(domain: &str, status: &DomainStatus) -> ColoredString {
+ match status {
+ DomainStatus::Available => domain.green(),
+ DomainStatus::Registered { .. } => domain.red(),
+ DomainStatus::Error { kind, .. } => match kind {
+ ErrorKind::InvalidTld => domain.yellow(),
+ _ => domain.blue(),
+ },
+ }
+}
+
+pub fn print_csv(results: &[DomainResult]) {
+ println!("Domains, Status, Note");
+ for r in results {
+ println!("{}, {}, {}", r.full, r.status_str(), r.note_str());
+ }
+}
+
+pub fn write_csv_file(results: &[DomainResult], path: &PathBuf) -> Result<(), String> {
+ let mut file = std::fs::File::create(path)
+ .map_err(|e| format!("Could not create CSV file: {}", e))?;
+ writeln!(file, "Domains, Status, Note")
+ .map_err(|e| format!("Write error: {}", e))?;
+ for r in results {
+ writeln!(file, "{}, {}, {}", r.full, r.status_str(), r.note_str())
+ .map_err(|e| format!("Write error: {}", e))?;
+ }
+ Ok(())
+}
+
+pub fn print_errors(results: &[DomainResult], verbose: bool) {
+ for r in results {
+ if let DomainStatus::Error { kind, message } = &r.status {
+ match kind {
+ ErrorKind::InvalidTld => {
+ eprintln!("Error for {}, tld does not seem to exist", r.full);
+ }
+ _ => {
+ if verbose {
+ eprintln!("Error for {}, {} (raw: {})", r.full, message, message);
+ } else {
+ eprintln!(
+ "Error for {}, unknown error (enable verbose to see raw error)",
+ r.full
+ );
+ }
+ }
+ }
+ }
+ }
+}
+
+pub fn print_progress(current: usize, total: usize) {
+ let percent = (current as f64 / total as f64 * 100.0) as u32;
+ eprint!("\rParsing results : {}%", percent);
+ if current == total {
+ eprintln!("\rParsing results : Done ");
+ }
+}
diff --git a/src/tlds.rs b/src/tlds.rs
new file mode 100644
index 0000000..2835c24
--- /dev/null
+++ b/src/tlds.rs
@@ -0,0 +1,179 @@
+use std::collections::HashMap;
+use std::sync::OnceLock;
+
+/// whois server overrides from Lists.toml ("io:whois.nic.io" style entries)
+#[derive(Debug, Clone, Default)]
+pub struct WhoisOverrides {
+ map: HashMap<String, String>,
+}
+
+impl WhoisOverrides {
+ pub fn get_server(&self, tld: &str) -> Option<&str> {
+ self.map.get(&tld.to_lowercase()).map(|s| s.as_str())
+ }
+}
+
+/// a named TLD list from Lists.toml
+struct NamedList {
+ name: String,
+ tlds: Vec<String>,
+}
+
+struct ParsedLists {
+ lists: Vec<NamedList>,
+ whois_overrides: WhoisOverrides,
+}
+
+/// parse a single entry: "tld" or "tld:whois_server"
+fn parse_entry(entry: &str) -> (String, Option<String>) {
+ if let Some(pos) = entry.find(':') {
+ (entry[..pos].to_string(), Some(entry[pos + 1..].to_string()))
+ } else {
+ (entry.to_string(), None)
+ }
+}
+
+/// parse entries, pull out TLD names and whois overrides
+fn parse_list(entries: &[toml::Value], overrides: &mut HashMap<String, String>) -> Vec<String> {
+ entries
+ .iter()
+ .filter_map(|v| v.as_str())
+ .map(|entry| {
+ let (tld, server) = parse_entry(entry);
+ if let Some(s) = server {
+ overrides.insert(tld.to_lowercase(), s);
+ }
+ tld
+ })
+ .collect()
+}
+
+static PARSED_LISTS: OnceLock<ParsedLists> = OnceLock::new();
+
+fn parsed_lists() -> &'static ParsedLists {
+ PARSED_LISTS.get_or_init(|| {
+ let raw: toml::Value = toml::from_str(include_str!("../Lists.toml"))
+ .expect("Lists.toml must be valid TOML");
+
+ let table = raw.as_table().expect("Lists.toml must be a TOML table");
+
+ // Build list names in the order build.rs discovered them
+ let ordered_names: Vec<&str> = env!("HOARDOM_LIST_NAMES").split(',').collect();
+
+ let mut overrides = HashMap::new();
+ let mut lists = Vec::new();
+
+ for name in &ordered_names {
+ if let Some(toml::Value::Array(arr)) = table.get(*name) {
+ let tlds = parse_list(arr, &mut overrides);
+ lists.push(NamedList {
+ name: name.to_string(),
+ tlds,
+ });
+ }
+ }
+
+ ParsedLists {
+ lists,
+ whois_overrides: WhoisOverrides { map: overrides },
+ }
+ })
+}
+
+/// list names from Lists.toml, in order
+pub fn list_names() -> Vec<&'static str> {
+ parsed_lists()
+ .lists
+ .iter()
+ .map(|l| l.name.as_str())
+ .collect()
+}
+
+/// first list name (the default)
+pub fn default_list_name() -> &'static str {
+ list_names().first().copied().unwrap_or("standard")
+}
+
+/// get TLDs for a list name (case insensitive), None if not found
+pub fn get_tlds(name: &str) -> Option<Vec<&'static str>> {
+ let lower = name.to_lowercase();
+ parsed_lists()
+ .lists
+ .iter()
+ .find(|l| l.name == lower)
+ .map(|l| l.tlds.iter().map(String::as_str).collect())
+}
+
+/// get TLDs for a list name, falls back to default if not found
+pub fn get_tlds_or_default(name: &str) -> Vec<&'static str> {
+ get_tlds(name).unwrap_or_else(|| get_tlds(default_list_name()).unwrap_or_default())
+}
+
+/// the builtin whois overrides from Lists.toml
+pub fn whois_overrides() -> &'static WhoisOverrides {
+ &parsed_lists().whois_overrides
+}
+
+pub fn apply_top_tlds(tlds: Vec<&'static str>, top: &[String]) -> Vec<&'static str> {
+ let mut result: Vec<&'static str> = Vec::with_capacity(tlds.len());
+ // first add the top ones in the order specified
+ for t in top {
+ let lower = t.to_lowercase();
+ if let Some(&found) = tlds.iter().find(|&&tld| tld.to_lowercase() == lower) {
+ if !result.contains(&found) {
+ result.push(found);
+ }
+ }
+ }
+ // then add the rest
+ for tld in &tlds {
+ if !result.contains(tld) {
+ result.push(tld);
+ }
+ }
+ result
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_parse_entry_bare() {
+ let (tld, server) = parse_entry("com");
+ assert_eq!(tld, "com");
+ assert_eq!(server, None);
+ }
+
+ #[test]
+ fn test_parse_entry_with_override() {
+ let (tld, server) = parse_entry("io:whois.nic.io");
+ assert_eq!(tld, "io");
+ assert_eq!(server, Some("whois.nic.io".to_string()));
+ }
+
+ #[test]
+ fn test_whois_overrides_populated() {
+ let overrides = whois_overrides();
+ // io should have an override since our Lists.toml has "io:whois.nic.io"
+ assert!(overrides.get_server("io").is_some());
+ // com should not (it has RDAP)
+ assert!(overrides.get_server("com").is_none());
+ }
+
+ #[test]
+ fn test_top_tlds_reorder() {
+ let tlds = vec!["com", "net", "org", "ch", "de"];
+ let top = vec!["ch".to_string(), "de".to_string()];
+ let result = apply_top_tlds(tlds, &top);
+ assert_eq!(result, vec!["ch", "de", "com", "net", "org"]);
+ }
+
+ #[test]
+ fn test_top_tlds_missing_ignored() {
+ let tlds = vec!["com", "net"];
+ let top = vec!["swiss".to_string()];
+ let result = apply_top_tlds(tlds, &top);
+ assert_eq!(result, vec!["com", "net"]);
+ }
+}
diff --git a/src/tui.rs b/src/tui.rs
new file mode 100644
index 0000000..f6d9238
--- /dev/null
+++ b/src/tui.rs
@@ -0,0 +1,2870 @@
+use crossterm::{
+ event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind, KeyModifiers, MouseButton, MouseEvent, MouseEventKind},
+ execute,
+ terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
+};
+use ratatui::{
+ backend::CrosstermBackend,
+ layout::{Constraint, Direction, Layout, Rect},
+ style::{Color, Modifier, Style},
+ text::{Line, Span},
+ widgets::{Block, Borders, Clear, List, ListItem, ListState, Paragraph},
+ Frame, Terminal,
+};
+use std::io::{self, Write};
+use std::path::{Path, PathBuf};
+use std::time::{Duration, Instant};
+
+use crate::cli::Args;
+use crate::config::Config;
+use crate::config::FavoriteEntry;
+use crate::lookup;
+use crate::tlds::{apply_top_tlds, get_tlds_or_default, list_names, default_list_name};
+use crate::types::{DomainResult, DomainStatus, ErrorKind};
+
+// note : this will be the worst shitshow of code you will probably have looked at in youre entire life
+// it works and is somewhat stable but i didnt feel like sorting it into nice modules and all.
+// have fun
+
+// names and labels
+const APP_NAME: &str = "hoardom";
+const APP_DESC: &str = "Domain hoarding made less painful";
+const CLOSE_BUTTON_LABEL: &str = "[X]";
+const EXPORT_BUTTON_LABEL: &str = "[Export](F2)";
+const HELP_BUTTON_LABEL: &str = "[Help](F1)";
+const SEARCH_BUTTON_LABEL: &str = "[Search]";
+const STOP_BUTTON_LABEL: &str = "[Stop](s)";
+const CLEAR_BUTTON_LABEL: &str = "[Clear](C)";
+
+
+// Layout tuning constants
+const TOPBAR_HEIGHT: u16 = 1;
+const SEARCH_PANEL_HEIGHT: u16 = 3;
+const CONTENT_MIN_HEIGHT: u16 = 5;
+
+const SIDEBAR_TARGET_WIDTH_PERCENT: u16 = 30;
+const SIDEBAR_MIN_WIDTH: u16 = 24;
+const SIDEBAR_MAX_WIDTH: u16 = 26;
+
+const SCRATCHPAD_TARGET_WIDTH_PERCENT: u16 = 30;
+const SCRATCHPAD_MIN_WIDTH: u16 = 20;
+const SCRATCHPAD_MAX_WIDTH: u16 = 32;
+
+const RESULTS_MIN_WIDTH: u16 = 24;
+
+const FAVORITES_MIN_HEIGHT: u16 = 4;
+const SETTINGS_PANEL_HEIGHT: u16 = 8;
+const SETTINGS_PANEL_MIN_HEIGHT: u16 = SETTINGS_PANEL_HEIGHT;
+const SETTINGS_PANEL_MAX_HEIGHT: u16 = SETTINGS_PANEL_HEIGHT;
+
+const DROPDOWN_MAX_WIDTH: u16 = 26;
+const DROPDOWN_MAX_HEIGHT: u16 = 10;
+const EXPORT_POPUP_WIDTH: u16 = 82;
+const EXPORT_POPUP_HEIGHT: u16 = 10;
+
+const MIN_UI_WIDTH: u16 = SIDEBAR_MIN_WIDTH + RESULTS_MIN_WIDTH + SCRATCHPAD_MIN_WIDTH;
+const MIN_UI_HEIGHT: u16 = TOPBAR_HEIGHT + SEARCH_PANEL_HEIGHT + CONTENT_MIN_HEIGHT + 5;
+
+#[derive(Debug, Clone, PartialEq)]
+enum Focus {
+ Search,
+ Scratchpad,
+ Results,
+ Favorites,
+ Settings,
+}
+
+fn escape_csv(value: &str) -> String {
+ if value.contains([',', '"', '\n']) {
+ format!("\"{}\"", value.replace('"', "\"\""))
+ } else {
+ value.to_string()
+ }
+}
+
+fn export_favorites_txt(path: &Path, favorites: &[FavoriteEntry]) -> Result<(), String> {
+ if let Some(parent) = path.parent() {
+ std::fs::create_dir_all(parent)
+ .map_err(|e| format!("Failed to create export directory: {}", e))?;
+ }
+ let text: Vec<&str> = favorites.iter().map(|f| f.domain.as_str()).collect();
+ std::fs::write(path, text.join("\n"))
+ .map_err(|e| format!("Failed to export favorites: {}", e))
+}
+
+fn export_results_csv(path: &Path, results: &[&DomainResult]) -> Result<(), String> {
+ if let Some(parent) = path.parent() {
+ std::fs::create_dir_all(parent)
+ .map_err(|e| format!("Failed to create export directory: {}", e))?;
+ }
+
+ let mut lines = vec!["domain,status,details".to_string()];
+ for result in results {
+ lines.push(format!(
+ "{},{},{}",
+ escape_csv(&result.full),
+ escape_csv(result.status_str()),
+ escape_csv(&result.note_str()),
+ ));
+ }
+
+ std::fs::write(path, lines.join("\n"))
+ .map_err(|e| format!("Failed to export results: {}", e))
+}
+
+#[derive(Debug, Clone, PartialEq)]
+enum DropdownState {
+ Closed,
+ Open(usize), // which option is highlighted
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ExportMode {
+ FavoritesTxt,
+ ResultsCsv,
+}
+
+impl ExportMode {
+
+ fn default_file_name(self) -> &'static str {
+ match self {
+ ExportMode::FavoritesTxt => "hoardom-favorites.txt",
+ ExportMode::ResultsCsv => "hoardom-results.csv",
+ }
+ }
+
+ fn toggled(self) -> Self {
+ match self {
+ ExportMode::FavoritesTxt => ExportMode::ResultsCsv,
+ ExportMode::ResultsCsv => ExportMode::FavoritesTxt,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct ExportPopup {
+ mode: ExportMode,
+ selected_row: usize,
+ path: String,
+ cursor_pos: usize,
+ status: Option<String>,
+ status_success: bool,
+ confirm_overwrite: bool,
+ close_at: Option<Instant>,
+}
+
+struct App {
+ search_input: String,
+ cursor_pos: usize,
+ results: Vec<(usize, DomainResult)>,
+ results_state: ListState,
+ favorites: Vec<FavoriteEntry>,
+ favorites_state: ListState,
+ focus: Focus,
+ show_unavailable: bool,
+ clear_on_search: bool,
+ tld_list_name: String,
+ settings_selected: Option<usize>,
+ show_notes_panel: bool,
+ last_fav_export_path: String,
+ last_res_export_path: String,
+ scratchpad: String,
+ scratchpad_cursor: usize,
+ dropdown: DropdownState,
+ searching: bool,
+ search_progress: (usize, usize),
+ search_started_at: Option<Instant>,
+ last_search_duration: Option<Duration>,
+ status_msg: Option<String>,
+ config_path: PathBuf,
+ can_save: bool,
+ cache_path: Option<PathBuf>,
+ force_refresh: bool,
+ mouse_enabled: bool,
+ top_tlds: Option<Vec<String>>,
+ only_top: Option<Vec<String>>,
+ imported_lists: Vec<crate::config::ImportedFilter>,
+ cache_settings: crate::config::CacheSettings,
+ verbose: bool,
+ delay: f64,
+ retries: u32,
+ jobs: u8,
+ panel_rects: PanelRects, // updated each frame for mouse hit detection
+ stream_rx: Option<tokio::sync::mpsc::Receiver<lookup::StreamMsg>>,
+ stream_task: Option<tokio::task::JoinHandle<()>>,
+ patch: crate::tlds::WhoisOverrides,
+ noretry: Vec<ErrorKind>,
+ should_quit: bool,
+ show_help: bool,
+ export_popup: Option<ExportPopup>,
+ fav_check_rx: Option<tokio::sync::mpsc::Receiver<lookup::StreamMsg>>,
+ fav_check_task: Option<tokio::task::JoinHandle<()>>,
+ checking_favorites: bool,
+ backups_enabled: bool,
+ backup_count: u32,
+}
+
+#[derive(Debug, Clone, Default)]
+struct PanelRects {
+ topbar: Option<Rect>,
+ export_button: Option<Rect>,
+ help_button: Option<Rect>,
+ help_popup: Option<Rect>,
+ dropdown: Option<Rect>,
+ search: Option<Rect>,
+ search_button: Option<Rect>,
+ cancel_button: Option<Rect>,
+ clear_button: Option<Rect>,
+ export_popup: Option<Rect>,
+ export_mode_favorites: Option<Rect>,
+ export_mode_results: Option<Rect>,
+ export_path: Option<Rect>,
+ export_cancel: Option<Rect>,
+ export_save: Option<Rect>,
+ scratchpad: Option<Rect>,
+ results: Option<Rect>,
+ favorites: Option<Rect>,
+ fav_check_button: Option<Rect>,
+ settings: Option<Rect>,
+}
+
+impl App {
+ fn new(args: &Args, config: &Config, config_path: PathBuf, can_save: bool, cache_path: Option<PathBuf>, force_refresh: bool, whois_overrides: crate::tlds::WhoisOverrides, noretry: Vec<ErrorKind>) -> Self {
+ let tld_list_name = args
+ .tld_list
+ .as_ref()
+ .map(|s| s.to_lowercase())
+ .unwrap_or_else(|| config.settings.tld_list.clone());
+
+ Self {
+ search_input: String::new(),
+ cursor_pos: 0,
+ results: Vec::new(),
+ results_state: ListState::default(),
+ favorites: config.favorites.clone(),
+ favorites_state: ListState::default(),
+ focus: Focus::Search,
+ show_unavailable: config.settings.show_all || args.show_all,
+ clear_on_search: config.settings.clear_on_search,
+ tld_list_name,
+ settings_selected: Some(0),
+ show_notes_panel: config.settings.show_notes_panel,
+ last_fav_export_path: config.settings.last_fav_export_path.clone(),
+ last_res_export_path: config.settings.last_res_export_path.clone(),
+ scratchpad: config.scratchpad.clone(),
+ scratchpad_cursor: config.scratchpad.len(),
+ dropdown: DropdownState::Closed,
+ searching: false,
+ search_progress: (0, 0),
+ search_started_at: None,
+ last_search_duration: None,
+ status_msg: None,
+ config_path,
+ can_save,
+ cache_path,
+ force_refresh,
+ mouse_enabled: !args.no_mouse,
+ top_tlds: args.top_tlds.clone(),
+ only_top: args.only_top.clone(),
+ imported_lists: config.imported_filters.clone(),
+ cache_settings: config.cache.clone(),
+ verbose: args.verbose,
+ delay: args.effective_delay(),
+ retries: args.effective_retry(),
+ jobs: if args.jobs.is_some() { args.effective_jobs() } else { config.settings.jobs.max(1) },
+ panel_rects: PanelRects::default(),
+ stream_rx: None,
+ stream_task: None,
+ patch: whois_overrides,
+ noretry,
+ should_quit: false,
+ show_help: false,
+ export_popup: None,
+ fav_check_rx: None,
+ fav_check_task: None,
+ checking_favorites: false,
+ backups_enabled: config.settings.backups,
+ backup_count: config.settings.backup_count,
+ }
+ }
+
+ fn get_effective_tlds(&self) -> Vec<&'static str> {
+ let mut tld_vec = self.base_tlds_for_selection();
+
+ if let Some(ref only) = self.only_top {
+ tld_vec = only
+ .iter()
+ .filter(|s| !s.is_empty())
+ .map(|s| -> &'static str { Box::leak(s.clone().into_boxed_str()) })
+ .collect();
+ }
+
+ if let Some(ref top) = self.top_tlds {
+ tld_vec = apply_top_tlds(tld_vec, top);
+ }
+
+ tld_vec
+ }
+
+ fn base_tlds_for_selection(&self) -> Vec<&'static str> {
+ if let Some(tlds) = crate::tlds::get_tlds(&self.tld_list_name) {
+ tlds
+ } else if let Some(imported) = self
+ .imported_lists
+ .iter()
+ .find(|list| list.name == self.tld_list_name)
+ {
+ imported
+ .tlds
+ .iter()
+ .map(|s| -> &'static str { Box::leak(s.clone().into_boxed_str()) })
+ .collect()
+ } else {
+ get_tlds_or_default(default_list_name())
+ }
+ }
+
+ fn list_options(&self) -> Vec<String> {
+ let mut options: Vec<String> = list_names().iter().map(|s| s.to_string()).collect();
+ for imported in &self.imported_lists {
+ if !options.contains(&imported.name) {
+ options.push(imported.name.clone());
+ }
+ }
+ options
+ }
+
+ fn parsed_queries(&self) -> Vec<String> {
+ self.search_input
+ .split(|c: char| c.is_whitespace() || c == ',')
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(ToOwned::to_owned)
+ .collect()
+ }
+
+ fn default_export_dir() -> PathBuf {
+ #[cfg(debug_assertions)]
+ {
+ if let Ok(dir) = std::env::current_dir() {
+ return dir;
+ }
+ }
+
+ dirs::home_dir().unwrap_or_else(|| PathBuf::from("."))
+ }
+
+ fn suggested_export_path(&self, mode: ExportMode) -> String {
+ let last_export_path = match mode {
+ ExportMode::FavoritesTxt => &self.last_fav_export_path,
+ ExportMode::ResultsCsv => &self.last_res_export_path,
+ };
+
+ let base = if last_export_path.is_empty() {
+ Self::default_export_dir().join(mode.default_file_name())
+ } else {
+ let last = PathBuf::from(last_export_path);
+ if last.is_dir() {
+ last.join(mode.default_file_name())
+ } else {
+ last
+ }
+ };
+ base.display().to_string()
+ }
+
+ fn open_export_popup(&mut self) {
+ let mode = ExportMode::FavoritesTxt;
+ let path = self.suggested_export_path(mode);
+ self.export_popup = Some(ExportPopup {
+ mode,
+ selected_row: 0,
+ cursor_pos: path.len(),
+ path,
+ status: None,
+ status_success: false,
+ confirm_overwrite: false,
+ close_at: None,
+ });
+ }
+
+ fn set_export_mode(&mut self, mode: ExportMode) {
+ let suggested_path = self.suggested_export_path(mode);
+ if let Some(popup) = &mut self.export_popup {
+ popup.mode = mode;
+ popup.path = suggested_path;
+ popup.cursor_pos = popup.path.len();
+ popup.status = None;
+ popup.status_success = false;
+ popup.confirm_overwrite = false;
+ popup.close_at = None;
+ }
+ }
+
+ fn clear_results(&mut self) {
+ self.results.clear();
+ self.results_state.select(None);
+ self.search_progress = (0, 0);
+ self.status_msg = Some("Results cleared".to_string());
+ }
+ fn save_config(&self) {
+ if !self.can_save {
+ return;
+ }
+ let config = Config {
+ settings: crate::config::Settings {
+ tld_list: self.tld_list_name.clone(),
+ show_all: self.show_unavailable,
+ clear_on_search: self.clear_on_search,
+ show_notes_panel: self.show_notes_panel,
+ last_fav_export_path: self.last_fav_export_path.clone(),
+ last_res_export_path: self.last_res_export_path.clone(),
+ top_tlds: self.top_tlds.clone().unwrap_or_default(),
+ jobs: self.jobs,
+ noretry: self.noretry.iter().map(|k| k.to_config_str().to_string()).collect(),
+ backups: self.backups_enabled,
+ backup_count: self.backup_count,
+ },
+ favorites: self.favorites.clone(),
+ imported_filters: self.imported_lists.clone(),
+ cache: self.cache_settings.clone(),
+ scratchpad: self.scratchpad.clone(),
+ };
+ let _ = config.save(&self.config_path);
+ }
+
+ fn add_favorite(&mut self, domain: &str) {
+ let d = domain.to_lowercase();
+ if !self.favorites.iter().any(|f| f.domain == d) {
+ // check if we just looked this domain up - inherit its status
+ let status = self.results.iter()
+ .find(|(_, r)| r.full.to_lowercase() == d)
+ .map(|(_, r)| r.status_str().to_string())
+ .unwrap_or_else(|| "unknown".to_string());
+ let checked = if status != "unknown" {
+ chrono::Utc::now().to_rfc3339()
+ } else {
+ String::new()
+ };
+ self.favorites.push(FavoriteEntry {
+ domain: d,
+ status,
+ checked,
+ changed: false,
+ });
+ self.save_config();
+ }
+ }
+
+ fn remove_focused_favorite(&mut self) {
+ if let Some(idx) = self.favorites_state.selected() {
+ if idx < self.favorites.len() {
+ self.favorites.remove(idx);
+ // adjust selection
+ if !self.favorites.is_empty() {
+ if idx >= self.favorites.len() {
+ self.favorites_state.select(Some(self.favorites.len() - 1));
+ }
+ } else {
+ self.favorites_state.select(None);
+ }
+ self.save_config();
+ }
+ }
+ }
+
+ fn visible_results(&self) -> Vec<&DomainResult> {
+ if self.show_unavailable {
+ self.results.iter().map(|(_, r)| r).collect()
+ } else {
+ self.results.iter().filter(|(_, r)| r.is_available()).map(|(_, r)| r).collect()
+ }
+ }
+
+ fn set_tld_list_by_index(&mut self, idx: usize) {
+ let options = self.list_options();
+ if let Some(selected) = options.get(idx) {
+ self.tld_list_name = selected.clone();
+ } else {
+ self.tld_list_name = default_list_name().to_string();
+ }
+ self.save_config();
+ }
+
+ fn tld_list_index(&self) -> usize {
+ self.list_options()
+ .iter()
+ .position(|option| option == &self.tld_list_name)
+ .unwrap_or(0)
+ }
+
+ fn set_focus(&mut self, focus: Focus) {
+ self.focus = focus;
+ if self.focus == Focus::Settings && self.settings_selected.is_none() {
+ self.settings_selected = Some(0);
+ }
+ }
+
+ fn panel_at(&self, col: u16, row: u16) -> Option<Focus> {
+ let pos = (col, row);
+ if let Some(r) = self.panel_rects.search {
+ if contains_pos(r, pos) {
+ return Some(Focus::Search);
+ }
+ }
+ if let Some(r) = self.panel_rects.scratchpad {
+ if contains_pos(r, pos) {
+ return Some(Focus::Scratchpad);
+ }
+ }
+ if let Some(r) = self.panel_rects.results {
+ if contains_pos(r, pos) {
+ return Some(Focus::Results);
+ }
+ }
+ if let Some(r) = self.panel_rects.favorites {
+ if contains_pos(r, pos) {
+ return Some(Focus::Favorites);
+ }
+ }
+ if let Some(r) = self.panel_rects.settings {
+ if contains_pos(r, pos) {
+ return Some(Focus::Settings);
+ }
+ }
+ None
+ }
+}
+
+fn contains_pos(rect: Rect, pos: (u16, u16)) -> bool {
+ pos.0 >= rect.x
+ && pos.0 < rect.x + rect.width
+ && pos.1 >= rect.y
+ && pos.1 < rect.y + rect.height
+}
+
+fn clamp_panel_size(value: u16, min: u16, max: u16) -> u16 {
+ value.max(min).min(max)
+}
+
+fn export_popup_rect(area: Rect) -> Rect {
+ let width = EXPORT_POPUP_WIDTH.min(area.width.saturating_sub(4));
+ let height = EXPORT_POPUP_HEIGHT.min(area.height.saturating_sub(4));
+ Rect {
+ x: area.x + (area.width.saturating_sub(width)) / 2,
+ y: area.y + (area.height.saturating_sub(height)) / 2,
+ width,
+ height,
+ }
+}
+
+fn help_popup_rect(area: Rect) -> Rect {
+ let width = if area.width > 54 {
+ area.width.saturating_sub(6).min(58)
+ } else {
+ area.width.saturating_sub(2)
+ };
+ let height = if area.height > 22 {
+ 20
+ } else {
+ area.height.saturating_sub(2)
+ };
+
+ Rect {
+ x: area.x + (area.width.saturating_sub(width)) / 2,
+ y: area.y + (area.height.saturating_sub(height)) / 2,
+ width,
+ height,
+ }
+}
+
+pub async fn run_tui(
+ args: &Args,
+ config: &Config,
+ paths: crate::config::HoardomPaths,
+ cache_file: Option<PathBuf>,
+ force_refresh: bool,
+ whois_overrides: crate::tlds::WhoisOverrides,
+ noretry: Vec<ErrorKind>,
+) -> io::Result<()> {
+ // terminal setup
+ enable_raw_mode()?;
+ let mut stdout = io::stdout();
+ if !args.no_mouse {
+ execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
+ } else {
+ execute!(stdout, EnterAlternateScreen)?;
+ }
+ let backend = CrosstermBackend::new(stdout);
+ let mut terminal = Terminal::new(backend)?;
+
+ let mut app = App::new(args, config, paths.config_file.clone(), paths.can_save, cache_file, force_refresh, whois_overrides, noretry);
+
+ if !paths.can_save {
+ app.status_msg = Some("Warning: favorites and settings wont be saved".to_string());
+ }
+
+ let result = run_app(&mut terminal, &mut app).await;
+
+ // put the terminal back to normal
+ if !args.no_mouse {
+ execute!(
+ terminal.backend_mut(),
+ DisableMouseCapture
+ )?;
+
+ while event::poll(std::time::Duration::from_millis(0))? {
+ let _ = event::read();
+ }
+
+ execute!(terminal.backend_mut(), LeaveAlternateScreen)?;
+ } else {
+ execute!(terminal.backend_mut(), LeaveAlternateScreen)?;
+ }
+ terminal.backend_mut().flush()?;
+ disable_raw_mode()?;
+ terminal.show_cursor()?;
+
+ result
+}
+
+async fn run_app(
+ terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
+ app: &mut App,
+) -> io::Result<()> {
+ loop {
+ if app.should_quit {
+ return Ok(());
+ }
+
+ if let Some(popup) = app.export_popup.as_ref() {
+ if popup.close_at.is_some_and(|deadline| Instant::now() >= deadline) {
+ app.export_popup = None;
+ }
+ }
+
+ // poll streaming results if a search is in progress
+ if let Some(ref mut rx) = app.stream_rx {
+ // drain all available messages without blocking
+ loop {
+ match rx.try_recv() {
+ Ok(lookup::StreamMsg::Result { result, sort_index }) => {
+ // insert in sorted position to maintain list order
+ let pos = app.results.partition_point(|(idx, _)| *idx < sort_index);
+ app.results.insert(pos, (sort_index, result));
+ // auto-select first result
+ if app.results.len() == 1 {
+ app.results_state.select(Some(0));
+ }
+ }
+ Ok(lookup::StreamMsg::Progress { current, total }) => {
+ app.search_progress = (current, total);
+ }
+ Ok(lookup::StreamMsg::Error(msg)) => {
+ app.status_msg = Some(format!("Error: {}", msg));
+ }
+ Ok(lookup::StreamMsg::Done) => {
+ app.searching = false;
+ app.last_search_duration = app.search_started_at.map(|t| t.elapsed());
+ app.search_started_at = None;
+ app.status_msg = None;
+ app.stream_rx = None;
+ app.stream_task = None;
+ break;
+ }
+ Err(tokio::sync::mpsc::error::TryRecvError::Empty) => break,
+ Err(tokio::sync::mpsc::error::TryRecvError::Disconnected) => {
+ app.searching = false;
+ app.last_search_duration = app.search_started_at.map(|t| t.elapsed());
+ app.search_started_at = None;
+ app.status_msg = None;
+ app.stream_rx = None;
+ app.stream_task = None;
+ break;
+ }
+ }
+ }
+ }
+
+ // poll favorites check results
+ if let Some(ref mut rx) = app.fav_check_rx {
+ loop {
+ match rx.try_recv() {
+ Ok(lookup::StreamMsg::Result { result, .. }) => {
+ // Update the matching favorite's status
+ let domain_lower = result.full.to_lowercase();
+ if let Some(fav) = app.favorites.iter_mut().find(|f| f.domain == domain_lower) {
+ let new_status = result.status_str().to_string();
+ if fav.status != new_status && fav.status != "unknown" {
+ fav.changed = true;
+ }
+ fav.status = new_status;
+ fav.checked = chrono::Utc::now().to_rfc3339();
+ }
+ }
+ Ok(lookup::StreamMsg::Done) => {
+ app.checking_favorites = false;
+ app.fav_check_rx = None;
+ app.fav_check_task = None;
+ app.save_config();
+ break;
+ }
+ Ok(_) => {} // progress/error - dont care for fav checks
+ Err(tokio::sync::mpsc::error::TryRecvError::Empty) => break,
+ Err(tokio::sync::mpsc::error::TryRecvError::Disconnected) => {
+ app.checking_favorites = false;
+ app.fav_check_rx = None;
+ app.fav_check_task = None;
+ app.save_config();
+ break;
+ }
+ }
+ }
+ }
+
+ terminal.draw(|f| draw_ui(f, app))?;
+
+ // poll for events with a timeout so we can update during searches
+ if event::poll(std::time::Duration::from_millis(50))? {
+ match event::read()? {
+ Event::Key(key) if key.kind == KeyEventKind::Press => {
+ if matches!(key.code, KeyCode::F(1)) {
+ app.show_help = !app.show_help;
+ continue;
+ }
+
+ if matches!(key.code, KeyCode::F(2)) {
+ if app.export_popup.is_some() {
+ app.export_popup = None;
+ } else {
+ app.open_export_popup();
+ }
+ continue;
+ }
+
+ if key.modifiers.contains(KeyModifiers::CONTROL)
+ && matches!(key.code, KeyCode::Char('c'))
+ {
+ quit_app(app);
+ continue;
+ }
+
+ if app.export_popup.is_some() {
+ handle_export_popup_key(app, key.code);
+ continue;
+ }
+
+ if app.searching && matches!(key.code, KeyCode::Char('s' | 'S')) {
+ cancel_search(app);
+ continue;
+ }
+
+ if !app.searching
+ && !app.clear_on_search
+ && matches!(key.code, KeyCode::Char('C'))
+ {
+ app.clear_results();
+ continue;
+ }
+
+ // close dropdown on any key if its open (unless its the dropdown interaction)
+ if app.dropdown != DropdownState::Closed && app.focus != Focus::Settings {
+ app.dropdown = DropdownState::Closed;
+ }
+
+ match key.code {
+ KeyCode::Esc => {
+ if app.show_help {
+ app.show_help = false;
+ continue;
+ }
+ handle_escape_for_panel(app);
+ continue;
+ }
+ // tab between panels
+ KeyCode::Tab => {
+ app.dropdown = DropdownState::Closed;
+ app.set_focus(match app.focus {
+ Focus::Search => {
+ if app.show_notes_panel { Focus::Scratchpad } else { Focus::Results }
+ }
+ Focus::Scratchpad => Focus::Results,
+ Focus::Results => Focus::Favorites,
+ Focus::Favorites => Focus::Settings,
+ Focus::Settings => Focus::Search,
+ });
+ }
+ KeyCode::BackTab => {
+ app.dropdown = DropdownState::Closed;
+ app.set_focus(match app.focus {
+ Focus::Search => Focus::Settings,
+ Focus::Scratchpad => Focus::Search,
+ Focus::Results => {
+ if app.show_notes_panel { Focus::Scratchpad } else { Focus::Search }
+ }
+ Focus::Favorites => Focus::Results,
+ Focus::Settings => Focus::Favorites,
+ });
+ }
+ _ => {
+ handle_key_for_panel(app, key.code).await;
+ }
+ }
+ }
+ Event::Mouse(mouse) => {
+ if app.mouse_enabled {
+ handle_mouse(app, mouse);
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+}
+
+async fn handle_key_for_panel(app: &mut App, key: KeyCode) {
+ match app.focus {
+ Focus::Search => handle_search_key(app, key).await,
+ Focus::Scratchpad => handle_scratchpad_key(app, key),
+ Focus::Results => handle_results_key(app, key),
+ Focus::Favorites => handle_favorites_key(app, key),
+ Focus::Settings => handle_settings_key(app, key),
+ }
+}
+
+fn run_export(app: &mut App) {
+ let Some(popup) = app.export_popup.as_ref() else {
+ return;
+ };
+
+ let mode = popup.mode;
+ let path_text = popup.path.clone();
+ let path = PathBuf::from(&path_text);
+
+ if path.is_file() && !popup.confirm_overwrite {
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.status = Some("File exists. Press Enter again to overwrite.".to_string());
+ popup.status_success = false;
+ popup.confirm_overwrite = true;
+ popup.close_at = None;
+ }
+ return;
+ }
+
+ let result = match mode {
+ ExportMode::FavoritesTxt => export_favorites_txt(&path, &app.favorites),
+ ExportMode::ResultsCsv => {
+ let visible = app.visible_results();
+ if visible.is_empty() {
+ Err("No results to export".to_string())
+ } else {
+ export_results_csv(&path, &visible)
+ }
+ }
+ };
+
+ match result {
+ Ok(()) => {
+ match mode {
+ ExportMode::FavoritesTxt => app.last_fav_export_path = path_text.clone(),
+ ExportMode::ResultsCsv => app.last_res_export_path = path_text.clone(),
+ }
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.status = Some("Success".to_string());
+ popup.status_success = true;
+ popup.confirm_overwrite = false;
+ popup.close_at = Some(Instant::now() + Duration::from_secs(2));
+ }
+ app.save_config();
+ }
+ Err(err) => {
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.status = Some(err);
+ popup.status_success = false;
+ popup.confirm_overwrite = false;
+ popup.close_at = None;
+ }
+ }
+ }
+}
+
+fn handle_export_popup_key(app: &mut App, key: KeyCode) {
+ let Some(popup) = app.export_popup.as_mut() else {
+ return;
+ };
+
+ match key {
+ KeyCode::Esc => {
+ app.export_popup = None;
+ }
+ KeyCode::Tab | KeyCode::Down => {
+ popup.selected_row = (popup.selected_row + 1) % 4;
+ }
+ KeyCode::BackTab | KeyCode::Up => {
+ popup.selected_row = if popup.selected_row == 0 { 3 } else { popup.selected_row - 1 };
+ }
+ KeyCode::Left => {
+ if popup.selected_row == 0 {
+ let mode = popup.mode.toggled();
+ app.set_export_mode(mode);
+ } else if popup.selected_row == 3 {
+ popup.selected_row = 2;
+ } else if popup.selected_row == 1 && popup.cursor_pos > 0 {
+ popup.cursor_pos -= 1;
+ }
+ }
+ KeyCode::Right => {
+ if popup.selected_row == 0 {
+ let mode = popup.mode.toggled();
+ app.set_export_mode(mode);
+ } else if popup.selected_row == 2 {
+ popup.selected_row = 3;
+ } else if popup.selected_row == 1 && popup.cursor_pos < popup.path.len() {
+ popup.cursor_pos += 1;
+ }
+ }
+ KeyCode::Home => {
+ if popup.selected_row == 1 {
+ popup.cursor_pos = 0;
+ }
+ }
+ KeyCode::End => {
+ if popup.selected_row == 1 {
+ popup.cursor_pos = popup.path.len();
+ }
+ }
+ KeyCode::Backspace => {
+ if popup.selected_row == 1 && popup.cursor_pos > 0 {
+ popup.cursor_pos -= 1;
+ popup.path.remove(popup.cursor_pos);
+ popup.status = None;
+ popup.status_success = false;
+ popup.confirm_overwrite = false;
+ popup.close_at = None;
+ }
+ }
+ KeyCode::Delete => {
+ if popup.selected_row == 1 && popup.cursor_pos < popup.path.len() {
+ popup.path.remove(popup.cursor_pos);
+ popup.status = None;
+ popup.status_success = false;
+ popup.confirm_overwrite = false;
+ popup.close_at = None;
+ }
+ }
+ KeyCode::Char(c) => {
+ if popup.selected_row == 1 {
+ popup.path.insert(popup.cursor_pos, c);
+ popup.cursor_pos += 1;
+ popup.status = None;
+ popup.status_success = false;
+ popup.confirm_overwrite = false;
+ popup.close_at = None;
+ }
+ }
+ KeyCode::Enter => match popup.selected_row {
+ 0 => {
+ let mode = popup.mode.toggled();
+ app.set_export_mode(mode);
+ }
+ 1 => run_export(app),
+ 2 => app.export_popup = None,
+ 3 => run_export(app),
+ _ => {}
+ },
+ _ => {}
+ }
+}
+
+fn handle_scratchpad_key(app: &mut App, key: KeyCode) {
+ match key {
+ KeyCode::Char(c) => {
+ app.scratchpad.insert(app.scratchpad_cursor, c);
+ app.scratchpad_cursor += 1;
+ app.save_config();
+ }
+ KeyCode::Enter => {
+ app.scratchpad.insert(app.scratchpad_cursor, '\n');
+ app.scratchpad_cursor += 1;
+ app.save_config();
+ }
+ KeyCode::Backspace => {
+ if app.scratchpad_cursor > 0 {
+ app.scratchpad_cursor -= 1;
+ app.scratchpad.remove(app.scratchpad_cursor);
+ app.save_config();
+ }
+ }
+ KeyCode::Delete => {
+ if app.scratchpad_cursor < app.scratchpad.len() {
+ app.scratchpad.remove(app.scratchpad_cursor);
+ app.save_config();
+ }
+ }
+ KeyCode::Left => {
+ if app.scratchpad_cursor > 0 {
+ app.scratchpad_cursor -= 1;
+ }
+ }
+ KeyCode::Right => {
+ if app.scratchpad_cursor < app.scratchpad.len() {
+ app.scratchpad_cursor += 1;
+ }
+ }
+ KeyCode::Up => {
+ move_scratchpad_cursor_vertical(app, -1);
+ }
+ KeyCode::Down => {
+ move_scratchpad_cursor_vertical(app, 1);
+ }
+ KeyCode::Home => {
+ app.scratchpad_cursor = 0;
+ }
+ KeyCode::End => {
+ app.scratchpad_cursor = app.scratchpad.len();
+ }
+ _ => {}
+ }
+}
+
+async fn handle_search_key(app: &mut App, key: KeyCode) {
+ if app.searching {
+ return;
+ }
+ match key {
+ KeyCode::Enter => {
+ if !app.search_input.is_empty() && !app.searching {
+ start_search(app);
+ }
+ }
+ KeyCode::Char(c) => {
+ // only allow valid domain chars (alphanumeric, hyphen, dot, space for multi query)
+ if c.is_ascii_alphanumeric() || c == '-' || c == '.' || c == ' ' {
+ app.search_input.insert(app.cursor_pos, c);
+ app.cursor_pos += c.len_utf8();
+ }
+ }
+ KeyCode::Backspace => {
+ if app.cursor_pos > 0 {
+ // Find the previous char boundary
+ let prev = app.search_input[..app.cursor_pos]
+ .char_indices()
+ .next_back()
+ .map(|(i, _)| i)
+ .unwrap_or(0);
+ app.search_input.remove(prev);
+ app.cursor_pos = prev;
+ }
+ }
+ KeyCode::Delete => {
+ if app.cursor_pos < app.search_input.len() && app.search_input.is_char_boundary(app.cursor_pos) {
+ app.search_input.remove(app.cursor_pos);
+ }
+ }
+ KeyCode::Left => {
+ if app.cursor_pos > 0 {
+ app.cursor_pos = app.search_input[..app.cursor_pos]
+ .char_indices()
+ .next_back()
+ .map(|(i, _)| i)
+ .unwrap_or(0);
+ }
+ }
+ KeyCode::Right => {
+ if app.cursor_pos < app.search_input.len() {
+ app.cursor_pos = app.search_input[app.cursor_pos..]
+ .char_indices()
+ .nth(1)
+ .map(|(i, _)| app.cursor_pos + i)
+ .unwrap_or(app.search_input.len());
+ }
+ }
+ KeyCode::Home => {
+ app.cursor_pos = 0;
+ }
+ KeyCode::End => {
+ app.cursor_pos = app.search_input.len();
+ }
+ _ => {}
+ }
+}
+
+fn handle_results_key(app: &mut App, key: KeyCode) {
+ let visible = app.visible_results();
+ let len = visible.len();
+ if len == 0 {
+ return;
+ }
+
+ match key {
+ KeyCode::Up => {
+ let i = match app.results_state.selected() {
+ Some(i) => {
+ if i > 0 { i - 1 } else { 0 }
+ }
+ None => 0,
+ };
+ app.results_state.select(Some(i));
+ }
+ KeyCode::Down => {
+ let i = match app.results_state.selected() {
+ Some(i) => {
+ if i + 1 < len { i + 1 } else { i }
+ }
+ None => 0,
+ };
+ app.results_state.select(Some(i));
+ }
+ KeyCode::Enter => {
+ // add focused domain to favorites
+ if let Some(idx) = app.results_state.selected() {
+ let visible = app.visible_results();
+ if let Some(result) = visible.get(idx) {
+ let domain = result.full.clone();
+ app.add_favorite(&domain);
+ }
+ }
+ }
+ _ => {}
+ }
+}
+
+fn handle_favorites_key(app: &mut App, key: KeyCode) {
+ let len = app.favorites.len();
+ if len == 0 {
+ return;
+ }
+
+ match key {
+ KeyCode::Up => {
+ let i = match app.favorites_state.selected() {
+ Some(i) => {
+ if i > 0 { i - 1 } else { 0 }
+ }
+ None => 0,
+ };
+ app.favorites_state.select(Some(i));
+ }
+ KeyCode::Down => {
+ let i = match app.favorites_state.selected() {
+ Some(i) => {
+ if i + 1 < len { i + 1 } else { i }
+ }
+ None => 0,
+ };
+ app.favorites_state.select(Some(i));
+ }
+ KeyCode::Enter => {
+ // acknowledge status change - clear the ! marker
+ if let Some(idx) = app.favorites_state.selected() {
+ if let Some(fav) = app.favorites.get_mut(idx) {
+ if fav.changed {
+ fav.changed = false;
+ app.save_config();
+ }
+ }
+ }
+ }
+ KeyCode::Backspace | KeyCode::Delete => {
+ app.remove_focused_favorite();
+ }
+ KeyCode::Char('c') | KeyCode::Char('C') => {
+ start_fav_check(app);
+ }
+ _ => {}
+ }
+}
+
+fn handle_settings_key(app: &mut App, key: KeyCode) {
+ match &app.dropdown {
+ DropdownState::Open(current) => {
+ let current = *current;
+ let option_count = app.list_options().len();
+ match key {
+ KeyCode::Up => {
+ let new = if option_count == 0 {
+ 0
+ } else if current > 0 {
+ current - 1
+ } else {
+ option_count - 1
+ };
+ app.dropdown = DropdownState::Open(new);
+ }
+ KeyCode::Down => {
+ let new = if option_count == 0 {
+ 0
+ } else if current + 1 < option_count {
+ current + 1
+ } else {
+ 0
+ };
+ app.dropdown = DropdownState::Open(new);
+ }
+ KeyCode::Enter => {
+ app.set_tld_list_by_index(current);
+ app.dropdown = DropdownState::Closed;
+ app.settings_selected = Some(0);
+ }
+ KeyCode::Esc => {
+ app.dropdown = DropdownState::Closed;
+ }
+ _ => {}
+ }
+ }
+ DropdownState::Closed => {
+ match key {
+ KeyCode::Up => {
+ app.settings_selected = Some(match app.settings_selected.unwrap_or(0) {
+ 0 => 4,
+ n => n - 1,
+ });
+ }
+ KeyCode::Down => {
+ app.settings_selected = Some(match app.settings_selected.unwrap_or(0) {
+ 0 => 1,
+ 1 => 2,
+ 2 => 3,
+ 3 => 4,
+ _ => 0,
+ });
+ }
+ KeyCode::Enter => {
+ match app.settings_selected.unwrap_or(0) {
+ 0 => {
+ app.dropdown = DropdownState::Open(app.tld_list_index());
+ }
+ 1 => {
+ app.show_unavailable = !app.show_unavailable;
+ app.save_config();
+ }
+ 2 => {
+ app.show_notes_panel = !app.show_notes_panel;
+ if !app.show_notes_panel && app.focus == Focus::Scratchpad {
+ app.set_focus(Focus::Results);
+ }
+ app.save_config();
+ }
+ 3 => {
+ app.clear_on_search = !app.clear_on_search;
+ app.save_config();
+ }
+ 4 => {
+ // increment jobs (wrap at 99 -> 1)
+ app.jobs = if app.jobs >= 99 { 1 } else { app.jobs + 1 };
+ app.save_config();
+ }
+ _ => {}
+ }
+ }
+ KeyCode::Char(' ') => {
+ match app.settings_selected.unwrap_or(0) {
+ 1 => {
+ app.show_unavailable = !app.show_unavailable;
+ app.save_config();
+ }
+ 2 => {
+ app.show_notes_panel = !app.show_notes_panel;
+ if !app.show_notes_panel && app.focus == Focus::Scratchpad {
+ app.set_focus(Focus::Results);
+ }
+ app.save_config();
+ }
+ 3 => {
+ app.clear_on_search = !app.clear_on_search;
+ app.save_config();
+ }
+ _ => {}
+ }
+ }
+ KeyCode::Char('+') | KeyCode::Char('=') => {
+ if app.settings_selected == Some(4) {
+ app.jobs = if app.jobs >= 99 { 99 } else { app.jobs + 1 };
+ app.save_config();
+ }
+ }
+ KeyCode::Char('-') => {
+ if app.settings_selected == Some(4) {
+ app.jobs = if app.jobs <= 1 { 1 } else { app.jobs - 1 };
+ app.save_config();
+ }
+ }
+ KeyCode::Left => {
+ if app.settings_selected == Some(4) {
+ app.jobs = if app.jobs <= 1 { 1 } else { app.jobs - 1 };
+ app.save_config();
+ }
+ }
+ KeyCode::Right => {
+ if app.settings_selected == Some(4) {
+ app.jobs = if app.jobs >= 99 { 99 } else { app.jobs + 1 };
+ app.save_config();
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+}
+
+fn handle_mouse(app: &mut App, mouse: MouseEvent) {
+ match mouse.kind {
+ MouseEventKind::Down(MouseButton::Left) => {
+ let col = mouse.column;
+ let row = mouse.row;
+
+ if let Some(help_popup) = app.panel_rects.help_popup {
+ if !contains_pos(help_popup, (col, row)) {
+ app.show_help = false;
+ }
+ return;
+ }
+
+ if let Some(export_popup) = app.panel_rects.export_popup {
+ if !contains_pos(export_popup, (col, row)) {
+ app.export_popup = None;
+ return;
+ }
+
+ if let Some(mode_rect) = app.panel_rects.export_mode_favorites {
+ if contains_pos(mode_rect, (col, row)) {
+ app.set_export_mode(ExportMode::FavoritesTxt);
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.selected_row = 0;
+ }
+ return;
+ }
+ }
+
+ if let Some(mode_rect) = app.panel_rects.export_mode_results {
+ if contains_pos(mode_rect, (col, row)) {
+ app.set_export_mode(ExportMode::ResultsCsv);
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.selected_row = 0;
+ }
+ return;
+ }
+ }
+
+ if let Some(path_rect) = app.panel_rects.export_path {
+ if contains_pos(path_rect, (col, row)) {
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.selected_row = 1;
+ let clicked = col.saturating_sub(path_rect.x + 1) as usize;
+ popup.cursor_pos = clicked.min(popup.path.len());
+ }
+ return;
+ }
+ }
+
+ if let Some(cancel_rect) = app.panel_rects.export_cancel {
+ if contains_pos(cancel_rect, (col, row)) {
+ app.export_popup = None;
+ return;
+ }
+ }
+
+ if let Some(save_rect) = app.panel_rects.export_save {
+ if contains_pos(save_rect, (col, row)) {
+ if let Some(popup) = app.export_popup.as_mut() {
+ popup.selected_row = 3;
+ }
+ run_export(app);
+ return;
+ }
+ }
+
+ return;
+ }
+
+ if let Some(dropdown_rect) = app.panel_rects.dropdown {
+ if contains_pos(dropdown_rect, (col, row)) {
+ let item_row = row.saturating_sub(dropdown_rect.y + 1) as usize;
+ let options = app.list_options();
+ if item_row < options.len() {
+ app.set_tld_list_by_index(item_row);
+ }
+ app.dropdown = DropdownState::Closed;
+ return;
+ }
+
+ app.dropdown = DropdownState::Closed;
+ app.panel_rects.dropdown = None;
+ }
+
+ if let Some(topbar) = app.panel_rects.topbar {
+ if contains_pos(topbar, (col, row)) {
+ let close_end = topbar.x + 2;
+ if col <= close_end {
+ return;
+ }
+ if let Some(export_button) = app.panel_rects.export_button {
+ if contains_pos(export_button, (col, row)) {
+ app.open_export_popup();
+ return;
+ }
+ }
+ if let Some(help_button) = app.panel_rects.help_button {
+ if contains_pos(help_button, (col, row)) {
+ app.show_help = !app.show_help;
+ return;
+ }
+ }
+ return;
+ }
+ }
+
+ if let Some(search_rect) = app.panel_rects.search {
+ if contains_pos(search_rect, (col, row)) {
+ app.set_focus(Focus::Search);
+
+ if let Some(search_button) = app.panel_rects.search_button {
+ if contains_pos(search_button, (col, row)) {
+ if !app.searching && !app.search_input.is_empty() {
+ start_search(app);
+ }
+ return;
+ }
+ }
+
+ if let Some(cancel_button) = app.panel_rects.cancel_button {
+ if contains_pos(cancel_button, (col, row)) {
+ if app.searching {
+ cancel_search(app);
+ }
+ return;
+ }
+ }
+
+ if let Some(clear_button) = app.panel_rects.clear_button {
+ if contains_pos(clear_button, (col, row)) {
+ app.clear_results();
+ return;
+ }
+ }
+
+ return;
+ }
+ }
+
+ if let Some(scratchpad_rect) = app.panel_rects.scratchpad {
+ if contains_pos(scratchpad_rect, (col, row)) {
+ app.set_focus(Focus::Scratchpad);
+ app.scratchpad_cursor = app.scratchpad.len();
+ return;
+ }
+ }
+
+ // check if clicking inside settings panel for interactive elements
+ if let Some(settings_rect) = app.panel_rects.settings {
+ if contains_pos(settings_rect, (col, row)) {
+ app.set_focus(Focus::Settings);
+
+ // row offsets within settings panel (1 = border)
+ let local_row = row.saturating_sub(settings_rect.y + 1);
+
+ if app.dropdown == DropdownState::Closed {
+ // row 0 = TLD list line, row 1 = checkbox line
+ if local_row == 0 {
+ app.settings_selected = Some(0);
+ // open TLD dropdown
+ app.dropdown = DropdownState::Open(app.tld_list_index());
+ } else if local_row == 1 {
+ app.settings_selected = Some(1);
+ // toggle show unavailable checkbox
+ app.show_unavailable = !app.show_unavailable;
+ app.save_config();
+ } else if local_row == 2 {
+ app.settings_selected = Some(2);
+ app.show_notes_panel = !app.show_notes_panel;
+ if !app.show_notes_panel && app.focus == Focus::Scratchpad {
+ app.set_focus(Focus::Results);
+ }
+ app.save_config();
+ } else if local_row == 3 {
+ app.settings_selected = Some(3);
+ app.clear_on_search = !app.clear_on_search;
+ app.save_config();
+ } else if local_row == 4 {
+ app.settings_selected = Some(4);
+ // clicking on jobs row increments (use keyboard -/+ for fine control)
+ app.jobs = if app.jobs >= 99 { 1 } else { app.jobs + 1 };
+ app.save_config();
+ }
+ }
+ return;
+ }
+ }
+
+ // check if clicking inside results panel if so select that row
+ if let Some(results_rect) = app.panel_rects.results {
+ if contains_pos(results_rect, (col, row)) {
+ app.set_focus(Focus::Results);
+ let visible_len = app.visible_results().len();
+ let content_start = results_rect.y + 1;
+ let progress_offset = if app.searching && app.search_progress.1 > 0 { 1 } else { 0 };
+ let header_offset = if visible_len > 0 { 1 } else { 0 };
+ let list_start = content_start + progress_offset + header_offset;
+
+ if row < list_start {
+ return;
+ }
+
+ let clicked_idx = row.saturating_sub(list_start) as usize;
+ if clicked_idx < visible_len {
+ app.results_state.select(Some(clicked_idx));
+ }
+ return;
+ }
+ }
+
+ // check if clicking the fav check button
+ if let Some(btn_rect) = app.panel_rects.fav_check_button {
+ if contains_pos(btn_rect, (col, row)) {
+ start_fav_check(app);
+ return;
+ }
+ }
+
+ // check if clicking inside favorites panel - select that row
+ if let Some(fav_rect) = app.panel_rects.favorites {
+ if contains_pos(fav_rect, (col, row)) {
+ app.set_focus(Focus::Favorites);
+ let content_start = fav_rect.y + 1;
+ let clicked_idx = (row.saturating_sub(content_start)) as usize;
+ if clicked_idx < app.favorites.len() {
+ app.favorites_state.select(Some(clicked_idx));
+ }
+ return;
+ }
+ }
+
+ // default: just switch focus to clicked panel
+ if let Some(panel) = app.panel_at(col, row) {
+ app.set_focus(panel);
+ }
+ }
+ MouseEventKind::Up(MouseButton::Left) => {
+ let col = mouse.column;
+ let row = mouse.row;
+
+ if let Some(topbar) = app.panel_rects.topbar {
+ if contains_pos(topbar, (col, row)) {
+ let close_end = topbar.x + 2;
+ if col <= close_end {
+ quit_app(app);
+ return;
+ }
+ }
+ }
+ }
+ MouseEventKind::ScrollUp => {
+ // scroll up in focused list
+ match app.focus {
+ Focus::Results => {
+ if let Some(i) = app.results_state.selected() {
+ if i > 0 {
+ app.results_state.select(Some(i - 1));
+ }
+ }
+ }
+ Focus::Favorites => {
+ if let Some(i) = app.favorites_state.selected() {
+ if i > 0 {
+ app.favorites_state.select(Some(i - 1));
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ MouseEventKind::ScrollDown => {
+ // scroll down in focused list
+ match app.focus {
+ Focus::Results => {
+ let len = app.visible_results().len();
+ if let Some(i) = app.results_state.selected() {
+ if i + 1 < len {
+ app.results_state.select(Some(i + 1));
+ }
+ } else if len > 0 {
+ app.results_state.select(Some(0));
+ }
+ }
+ Focus::Favorites => {
+ let len = app.favorites.len();
+ if let Some(i) = app.favorites_state.selected() {
+ if i + 1 < len {
+ app.favorites_state.select(Some(i + 1));
+ }
+ } else if len > 0 {
+ app.favorites_state.select(Some(0));
+ }
+ }
+ _ => {}
+ }
+ }
+ _ => {}
+ }
+}
+
+fn cancel_search(app: &mut App) {
+ if let Some(handle) = app.stream_task.take() {
+ handle.abort();
+ }
+ app.stream_rx = None;
+ app.searching = false;
+ app.search_progress = (0, 0);
+ app.search_started_at = None;
+ app.last_search_duration = None;
+ app.status_msg = Some("Search canceled".to_string());
+}
+
+fn quit_app(app: &mut App) {
+ if let Some(handle) = app.stream_task.take() {
+ handle.abort();
+ }
+ if let Some(handle) = app.fav_check_task.take() {
+ handle.abort();
+ }
+ app.stream_rx = None;
+ app.fav_check_rx = None;
+
+ // backup on shutdown if enabled
+ if app.can_save && app.backups_enabled {
+ let _ = Config::create_backup(&app.config_path, app.backup_count);
+ }
+
+ app.should_quit = true;
+}
+
+/// kick off checking all favorites availability in the bg
+fn start_fav_check(app: &mut App) {
+ if app.checking_favorites || app.favorites.is_empty() {
+ return;
+ }
+
+ // Cancel any previous fav check
+ if let Some(handle) = app.fav_check_task.take() {
+ handle.abort();
+ }
+ app.fav_check_rx = None;
+
+ app.checking_favorites = true;
+
+ // Build a batch: each favorite is "name.tld" -> lookup (name, [tld])
+ let batches: lookup::LookupBatch = app.favorites.iter().filter_map(|fav| {
+ let parts: Vec<&str> = fav.domain.splitn(2, '.').collect();
+ if parts.len() == 2 {
+ Some((parts[0].to_string(), vec![parts[1].to_string()]))
+ } else {
+ None
+ }
+ }).collect();
+
+ if batches.is_empty() {
+ app.checking_favorites = false;
+ return;
+ }
+
+ let stream = lookup::lookup_many_streaming(
+ batches,
+ app.delay,
+ app.retries,
+ app.verbose,
+ app.cache_path.clone(),
+ false, // dont force refresh
+ app.jobs,
+ app.patch.clone(),
+ app.noretry.clone(),
+ );
+
+ app.fav_check_task = Some(stream.handle);
+ app.fav_check_rx = Some(stream.receiver);
+}
+
+fn handle_escape_for_panel(app: &mut App) {
+ if app.dropdown != DropdownState::Closed {
+ app.dropdown = DropdownState::Closed;
+ return;
+ }
+
+ match app.focus {
+ Focus::Scratchpad => {}
+ Focus::Results => {
+ app.results_state.select(None);
+ }
+ Focus::Favorites => {
+ app.favorites_state.select(None);
+ }
+ Focus::Settings => {
+ app.settings_selected = None;
+ }
+ Focus::Search => {}
+ }
+}
+
+fn start_search(app: &mut App) {
+ if app.searching {
+ cancel_search(app);
+ }
+
+ let search_terms = app.parsed_queries();
+ if search_terms.is_empty() {
+ return;
+ }
+
+ app.searching = true;
+ if app.clear_on_search {
+ app.results.clear();
+ app.results_state.select(None);
+ }
+ app.search_progress = (0, 0);
+ app.search_started_at = Some(Instant::now());
+ app.last_search_duration = None;
+ app.status_msg = Some("Searching...".to_string());
+
+ let default_tlds = app.get_effective_tlds();
+ if default_tlds.is_empty() {
+ app.status_msg = Some("No TLDs to search".to_string());
+ app.searching = false;
+ return;
+ }
+
+ let search_batches: lookup::LookupBatch = search_terms
+ .into_iter()
+ .map(|term| {
+ if term.contains('.') {
+ let mut parts = term.splitn(2, '.');
+ let name = parts.next().unwrap_or_default().to_string();
+ let tld = parts.next().unwrap_or_default().to_string();
+ (name, vec![tld])
+ } else {
+ (
+ term,
+ default_tlds.iter().map(|tld| (*tld).to_string()).collect(),
+ )
+ }
+ })
+ .filter(|(name, tlds)| !name.is_empty() && !tlds.is_empty())
+ .collect();
+
+ if search_batches.is_empty() {
+ app.status_msg = Some("No valid search terms".to_string());
+ app.searching = false;
+ return;
+ }
+
+ let stream = lookup::lookup_many_streaming(
+ search_batches,
+ app.delay,
+ app.retries,
+ app.verbose,
+ app.cache_path.clone(),
+ app.force_refresh,
+ app.jobs,
+ app.patch.clone(),
+ app.noretry.clone(),
+ );
+
+ // only force refresh on first search
+ app.force_refresh = false;
+ app.stream_task = Some(stream.handle);
+ app.stream_rx = Some(stream.receiver);
+}
+
+fn draw_ui(f: &mut Frame, app: &mut App) {
+ let size = f.area();
+
+ if terminal_too_small(size) {
+ app.panel_rects = PanelRects::default();
+ draw_terminal_too_small(f, size);
+ return;
+ }
+
+ // main layout: top bar + content area + search bar at bottom
+ let main_chunks = Layout::default()
+ .direction(Direction::Vertical)
+ .constraints([
+ Constraint::Length(TOPBAR_HEIGHT),
+ Constraint::Min(CONTENT_MIN_HEIGHT),
+ Constraint::Length(SEARCH_PANEL_HEIGHT),
+ ])
+ .split(size);
+
+ let content_area = main_chunks[1];
+ let desired_sidebar = content_area.width.saturating_mul(SIDEBAR_TARGET_WIDTH_PERCENT) / 100;
+ let mut sidebar_width = clamp_panel_size(desired_sidebar, SIDEBAR_MIN_WIDTH, SIDEBAR_MAX_WIDTH)
+ .min(content_area.width.saturating_sub(RESULTS_MIN_WIDTH));
+ if sidebar_width == 0 {
+ sidebar_width = SIDEBAR_MIN_WIDTH.min(content_area.width);
+ }
+
+ let sidebar_chunk = Rect {
+ x: content_area.x + content_area.width.saturating_sub(sidebar_width),
+ y: content_area.y,
+ width: sidebar_width,
+ height: content_area.height,
+ };
+
+ let (scratchpad_chunk, results_chunk) = if app.show_notes_panel {
+ let center_width = content_area.width.saturating_sub(sidebar_width);
+ let desired_scratchpad = content_area.width.saturating_mul(SCRATCHPAD_TARGET_WIDTH_PERCENT) / 100;
+ let mut scratchpad_width = clamp_panel_size(
+ desired_scratchpad,
+ SCRATCHPAD_MIN_WIDTH,
+ SCRATCHPAD_MAX_WIDTH,
+ )
+ .min(center_width.saturating_sub(RESULTS_MIN_WIDTH));
+ if scratchpad_width == 0 {
+ scratchpad_width = SCRATCHPAD_MIN_WIDTH.min(center_width);
+ }
+
+ (
+ Some(Rect {
+ x: content_area.x,
+ y: content_area.y,
+ width: scratchpad_width,
+ height: content_area.height,
+ }),
+ Rect {
+ x: content_area.x + scratchpad_width,
+ y: content_area.y,
+ width: center_width.saturating_sub(scratchpad_width),
+ height: content_area.height,
+ },
+ )
+ } else {
+ (
+ None,
+ Rect {
+ x: content_area.x,
+ y: content_area.y,
+ width: content_area.width.saturating_sub(sidebar_width),
+ height: content_area.height,
+ },
+ )
+ };
+
+ let settings_height = clamp_panel_size(
+ SETTINGS_PANEL_HEIGHT,
+ SETTINGS_PANEL_MIN_HEIGHT,
+ SETTINGS_PANEL_MAX_HEIGHT,
+ )
+ .min(sidebar_chunk.height.saturating_sub(FAVORITES_MIN_HEIGHT));
+ let settings_chunk = Rect {
+ x: sidebar_chunk.x,
+ y: sidebar_chunk.y + sidebar_chunk.height.saturating_sub(settings_height),
+ width: sidebar_chunk.width,
+ height: settings_height,
+ };
+ let favorites_chunk = Rect {
+ x: sidebar_chunk.x,
+ y: sidebar_chunk.y,
+ width: sidebar_chunk.width,
+ height: sidebar_chunk.height.saturating_sub(settings_height),
+ };
+
+ // store rects for mouse detection
+ app.panel_rects.topbar = Some(main_chunks[0]);
+ let help_width = HELP_BUTTON_LABEL.chars().count() as u16;
+ let export_width = EXPORT_BUTTON_LABEL.chars().count() as u16;
+ let help_x = main_chunks[0].x + main_chunks[0].width.saturating_sub(help_width);
+ let export_x = help_x.saturating_sub(export_width + 1);
+ app.panel_rects.export_button = Some(Rect {
+ x: export_x,
+ y: main_chunks[0].y,
+ width: export_width,
+ height: 1,
+ });
+ app.panel_rects.help_button = Some(Rect {
+ x: help_x,
+ y: main_chunks[0].y,
+ width: help_width,
+ height: 1,
+ });
+ app.panel_rects.dropdown = None;
+ app.panel_rects.help_popup = None;
+ app.panel_rects.search_button = None;
+ app.panel_rects.cancel_button = None;
+ app.panel_rects.clear_button = None;
+ app.panel_rects.export_popup = None;
+ app.panel_rects.export_mode_favorites = None;
+ app.panel_rects.export_mode_results = None;
+ app.panel_rects.export_path = None;
+ app.panel_rects.export_cancel = None;
+ app.panel_rects.export_save = None;
+ app.panel_rects.scratchpad = scratchpad_chunk;
+ app.panel_rects.results = Some(results_chunk);
+ app.panel_rects.favorites = Some(favorites_chunk);
+ app.panel_rects.settings = Some(settings_chunk);
+ app.panel_rects.search = Some(main_chunks[2]);
+
+ // draw each panel
+ draw_topbar(f, main_chunks[0]);
+ if let Some(scratchpad_rect) = scratchpad_chunk {
+ draw_scratchpad(f, app, scratchpad_rect);
+ }
+ draw_results(f, app, results_chunk);
+ draw_favorites(f, app, favorites_chunk);
+ draw_settings(f, app, settings_chunk);
+ draw_search(f, app, main_chunks[2]);
+
+ // draw dropdown overlay if open
+ if let DropdownState::Open(selected) = &app.dropdown {
+ draw_dropdown(f, app, settings_chunk, *selected);
+ }
+
+ if app.show_help {
+ draw_help_overlay(f, app, size);
+ }
+
+ if app.export_popup.is_some() {
+ draw_export_popup(f, app, size);
+ }
+}
+
+fn terminal_too_small(area: Rect) -> bool {
+ area.width < MIN_UI_WIDTH || area.height < MIN_UI_HEIGHT
+}
+
+fn draw_terminal_too_small(f: &mut Frame, area: Rect) {
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(Style::default().fg(Color::Red))
+ .title(" hoardom ");
+
+ let inner = block.inner(area);
+ f.render_widget(block, area);
+ let content_width = inner.width as usize;
+
+ let text = vec![
+ Line::from(Span::styled(
+ fit_cell_center("HELP ! HELP ! HELP !", content_width),
+ Style::default().fg(Color::White).add_modifier(Modifier::BOLD),
+ )),
+ Line::from(Span::styled(
+ fit_cell_center("I AM BEING CRUSHED!", content_width),
+ Style::default().fg(Color::White).add_modifier(Modifier::BOLD),
+ )),
+ Line::from(fit_cell_center("", content_width)),
+ Line::from(Span::styled(
+ fit_cell_center("Im claustrophobic! :'(", content_width),
+ Style::default().fg(Color::White),
+ )),
+ Line::from(Span::styled(
+ fit_cell_center(&format!("Need {}x{} of space", MIN_UI_WIDTH, MIN_UI_HEIGHT), content_width),
+ Style::default().fg(Color::White),
+ )),
+ Line::from(Span::styled(
+ fit_cell_center(&format!("Current: {}x{}", area.width, area.height), content_width),
+ Style::default().fg(Color::DarkGray),
+ )),
+ Line::from(fit_cell_center("", content_width)),
+ Line::from(Span::styled(
+ fit_cell_center("REFUSING TO WORK TILL YOU", content_width),
+ Style::default().fg(Color::White).add_modifier(Modifier::BOLD),
+ )),
+ Line::from(Span::styled(
+ fit_cell_center("GIVE ME BACK MY SPACE! >:(", content_width),
+ Style::default().fg(Color::White).add_modifier(Modifier::BOLD),
+ )),
+ ];
+
+ f.render_widget(Paragraph::new(text), inner);
+}
+
+fn draw_topbar(f: &mut Frame, area: Rect) {
+ let title = format!("{} - {}", APP_NAME, APP_DESC);
+ let width = area.width as usize;
+ let left = format!("{} {}", CLOSE_BUTTON_LABEL, title);
+ let right = format!("{} {}", EXPORT_BUTTON_LABEL, HELP_BUTTON_LABEL);
+ let gap = width.saturating_sub(left.chars().count() + right.chars().count());
+ let paragraph = Paragraph::new(Line::from(vec![
+ Span::styled(CLOSE_BUTTON_LABEL, Style::default().fg(Color::Red).bg(Color::Gray).add_modifier(Modifier::BOLD)),
+ Span::styled(format!(" {}", title), Style::default().fg(Color::White).bg(Color::Red).add_modifier(Modifier::BOLD)),
+ Span::styled(" ".repeat(gap), Style::default().bg(Color::Red).add_modifier(Modifier::BOLD)),
+ Span::styled(EXPORT_BUTTON_LABEL, Style::default().fg(Color::LightGreen).bg(Color::Red).add_modifier(Modifier::BOLD)),
+ Span::styled(" ", Style::default().bg(Color::Red).add_modifier(Modifier::BOLD)),
+ Span::styled(HELP_BUTTON_LABEL, Style::default().fg(Color::LightGreen).bg(Color::Red).add_modifier(Modifier::BOLD)),
+ ]))
+ .style(Style::default().fg(Color::White).bg(Color::Red).add_modifier(Modifier::BOLD));
+ f.render_widget(paragraph, area);
+}
+
+// Help Overlay size is here you goofus
+fn draw_help_overlay(f: &mut Frame, app: &mut App, area: Rect) {
+ let popup = help_popup_rect(area);
+ app.panel_rects.help_popup = Some(popup);
+
+ let text = vec![
+ Line::from(Span::styled(" ", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Global :", Style::default().fg(Color::White))),
+ Line::from(Span::styled("F1 or Help button Toggle this help", Style::default().fg(Color::White))),
+ Line::from(Span::styled("F2 or Export button Open export popup", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Ctrl+C Quit the app", Style::default().fg(Color::White))),
+ Line::from(Span::styled("s Stop/cancel running search", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Esc Clear selection or close help", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Tab or Shift+Tab Move between panels", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Up and Down arrows Navigate results", Style::default().fg(Color::White))),
+ Line::from(Span::styled(" ", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Mouse Click Elements duh", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Scrolling Scroll through elements (yea)", Style::default().fg(Color::White))),
+
+ Line::from(Span::styled(" ", Style::default().fg(Color::White))),
+ Line::from(Span::styled("In Results :", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Enter Add highlighted result to Favorites", Style::default().fg(Color::White))),
+ Line::from(Span::styled("In Favorites :", Style::default().fg(Color::White))),
+ Line::from(Span::styled("Backspace or Delete Remove focused favorite", Style::default().fg(Color::White))),
+ ];
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(Style::default().fg(Color::Red))
+ .title(" Help ");
+
+ f.render_widget(Clear, popup);
+ f.render_widget(Paragraph::new(text).block(block), popup);
+}
+
+fn draw_export_popup(f: &mut Frame, app: &mut App, area: Rect) {
+ let Some(popup_state) = app.export_popup.as_ref() else {
+ return;
+ };
+
+ let popup = export_popup_rect(area);
+ app.panel_rects.export_popup = Some(popup);
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(Style::default().fg(Color::Red))
+ .title(" Export ");
+
+ let inner = block.inner(popup);
+ f.render_widget(Clear, popup);
+ f.render_widget(block, popup);
+
+ let chunks = Layout::default()
+ .direction(Direction::Vertical)
+ .constraints([
+ Constraint::Length(1),
+ Constraint::Length(1),
+ Constraint::Length(3),
+ Constraint::Length(1),
+ Constraint::Length(1),
+ Constraint::Min(0),
+ ])
+ .split(inner);
+
+ let mode_style = |mode: ExportMode| {
+ let mut style = Style::default().fg(Color::White);
+ if popup_state.mode == mode {
+ style = style.add_modifier(Modifier::REVERSED | Modifier::BOLD);
+ } else if popup_state.selected_row == 0 {
+ style = style.add_modifier(Modifier::BOLD);
+ }
+ style
+ };
+
+ let subtitle = fit_cell_center("Choose what to export and where to save it.", chunks[0].width as usize);
+ f.render_widget(
+ Paragraph::new(subtitle).style(Style::default().fg(Color::DarkGray)),
+ chunks[0],
+ );
+
+ let favorites_label = "[Favorites TXT]";
+ let results_label = "[Results CSV]";
+ let mode_spacing = " ";
+ let mode_text = format!("{}{}{}", favorites_label, mode_spacing, results_label);
+ let mode_pad = chunks[1]
+ .width
+ .saturating_sub(mode_text.chars().count() as u16)
+ / 2;
+ let mode_x = chunks[1].x + mode_pad;
+ app.panel_rects.export_mode_favorites = Some(Rect {
+ x: mode_x,
+ y: chunks[1].y,
+ width: favorites_label.chars().count() as u16,
+ height: 1,
+ });
+ app.panel_rects.export_mode_results = Some(Rect {
+ x: mode_x + favorites_label.chars().count() as u16 + mode_spacing.chars().count() as u16,
+ y: chunks[1].y,
+ width: results_label.chars().count() as u16,
+ height: 1,
+ });
+ let mode_line = Line::from(vec![
+ Span::raw(" ".repeat(mode_pad as usize)),
+ Span::styled(favorites_label, mode_style(ExportMode::FavoritesTxt)),
+ Span::raw(mode_spacing),
+ Span::styled(results_label, mode_style(ExportMode::ResultsCsv)),
+ ]);
+ f.render_widget(Paragraph::new(mode_line), chunks[1]);
+
+ let path_block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(if popup_state.selected_row == 1 {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ })
+ .title(" Save to ");
+ let path_inner = path_block.inner(chunks[2]);
+ app.panel_rects.export_path = Some(chunks[2]);
+ f.render_widget(path_block, chunks[2]);
+ f.render_widget(
+ Paragraph::new(popup_state.path.as_str()).style(Style::default().fg(Color::White)),
+ path_inner,
+ );
+
+ let status_style = if popup_state.status_success {
+ Style::default().fg(Color::Green).add_modifier(Modifier::BOLD)
+ } else if popup_state.confirm_overwrite {
+ Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)
+ } else if popup_state.status.is_some() {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+ let status_text = popup_state.status.as_deref().unwrap_or(" ");
+ f.render_widget(
+ Paragraph::new(fit_cell_center(status_text, chunks[3].width as usize)).style(status_style),
+ chunks[3],
+ );
+
+
+ let cancel_label = "[Cancel]";
+ let button_gap = " ";
+ let save_label = "[Save]";
+ let button_text = format!("{}{}{}", cancel_label, button_gap, save_label);
+ let button_pad = chunks[4]
+ .width
+ .saturating_sub(button_text.chars().count() as u16);
+ let buttons_x = chunks[4].x + button_pad;
+ app.panel_rects.export_cancel = Some(Rect {
+ x: buttons_x,
+ y: chunks[4].y,
+ width: cancel_label.chars().count() as u16,
+ height: 1,
+ });
+ app.panel_rects.export_save = Some(Rect {
+ x: buttons_x + cancel_label.chars().count() as u16 + button_gap.chars().count() as u16,
+ y: chunks[4].y,
+ width: save_label.chars().count() as u16,
+ height: 1,
+ });
+ let button_line = Line::from(vec![
+ Span::raw(" ".repeat(button_pad as usize)),
+ Span::styled(
+ cancel_label,
+ if popup_state.selected_row == 2 {
+ Style::default().fg(Color::Green).bg(Color::DarkGray).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default().fg(Color::Green).add_modifier(Modifier::BOLD)
+ },
+ ),
+ Span::raw(button_gap),
+ Span::styled(
+ save_label,
+ if popup_state.selected_row == 3 {
+ Style::default().fg(Color::Green).bg(Color::DarkGray).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default().fg(Color::Green).add_modifier(Modifier::BOLD)
+ },
+ ),
+ ]);
+ f.render_widget(Paragraph::new(button_line), chunks[4]);
+
+ if popup_state.selected_row == 1 {
+ let max_x = path_inner.width.saturating_sub(1);
+ let x = path_inner.x + (popup_state.cursor_pos as u16).min(max_x);
+ let y = path_inner.y;
+ f.set_cursor_position((x, y));
+ }
+}
+
+fn draw_results(f: &mut Frame, app: &mut App, area: Rect) {
+ let focused = app.focus == Focus::Results;
+ let border_style = if focused {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ // show progress in title when searching
+ let title = if app.searching {
+ let (cur, tot) = app.search_progress;
+ if tot > 0 {
+ format!(" Results [{}/{}] ", cur, tot)
+ } else {
+ " Results (loading...) ".to_string()
+ }
+ } else if app.results.is_empty() {
+ " Results ".to_string()
+ } else {
+ let avail = app.results.iter().filter(|(_, r)| r.is_available()).count();
+ let duration_str = match app.last_search_duration {
+ Some(d) => format!(" | Took: {:.1}s", d.as_secs_f64()),
+ None => String::new(),
+ };
+ format!(" Results ({} available / {} total{}) ", avail, app.results.len(), duration_str)
+ };
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(border_style)
+ .title(title);
+
+ // If searching and have progress, show a gauge bar at top of results area
+ if app.searching && app.search_progress.1 > 0 {
+ let inner = block.inner(area);
+ f.render_widget(block, area);
+
+ // split: 1 line for progress bar, rest for results
+ let chunks = Layout::default()
+ .direction(Direction::Vertical)
+ .constraints([Constraint::Length(1), Constraint::Min(1)])
+ .split(inner);
+
+ // draw progress gauge
+ let (cur, tot) = app.search_progress;
+ let pct = (cur as f64 / tot as f64 * 100.0) as u16;
+ let filled = (chunks[0].width as u32 * cur as u32 / tot as u32) as u16;
+ let bar: String = "\u{2588}".repeat(filled as usize)
+ + &"\u{2591}".repeat((chunks[0].width.saturating_sub(filled)) as usize);
+ let bar_text = format!(" {}% ", pct);
+ let gauge_line = Line::from(vec![
+ Span::styled(bar, Style::default().fg(Color::Red)),
+ Span::styled(bar_text, Style::default().fg(Color::DarkGray)),
+ ]);
+ f.render_widget(Paragraph::new(gauge_line), chunks[0]);
+
+ // draw results list in remaining space
+ draw_results_list(f, app, chunks[1]);
+ } else {
+ let inner = block.inner(area);
+ f.render_widget(block, area);
+ draw_results_list(f, app, inner);
+ }
+}
+
+fn draw_results_list(f: &mut Frame, app: &mut App, area: Rect) {
+ let show_note_column = app.show_unavailable;
+ let selected_idx = app.results_state.selected();
+ let selected_bg = Color::Black;
+
+ // collect visible results
+ let visible_data: Vec<(String, String, String, DomainStatus)> = if app.show_unavailable {
+ app.results.iter().map(|(_, r)| (r.full.clone(), r.status_str().to_string(), r.note_str(), r.status.clone())).collect()
+ } else {
+ app.results.iter().filter(|(_, r)| r.is_available()).map(|(_, r)| (r.full.clone(), r.status_str().to_string(), r.note_str(), r.status.clone())).collect()
+ };
+
+ if visible_data.is_empty() && !app.searching {
+ let msg = if app.results.is_empty() {
+ "Type a domain suffix or full domain name then press Enter"
+ } else {
+ "No available domains found"
+ };
+ let p = Paragraph::new(msg).style(Style::default().fg(Color::DarkGray));
+ f.render_widget(p, area);
+ return;
+ }
+
+ // calculate adaptive column widths from available space
+ let total_w = area.width.saturating_sub(1) as usize;
+ let marker_w = 3usize;
+ let sep_w = if show_note_column { 9usize } else { 6usize };
+ let mut status_w = 10usize.min(total_w.saturating_sub(12)).max(6);
+ let mut note_w = if show_note_column {
+ (total_w / 3).clamp(8, 28)
+ } else {
+ 0
+ };
+ let mut domain_w = total_w.saturating_sub(status_w + note_w + marker_w + sep_w);
+
+ if domain_w < 10 {
+ let needed = 10 - domain_w;
+ if show_note_column {
+ let shrink_note = needed.min(note_w.saturating_sub(8));
+ note_w = note_w.saturating_sub(shrink_note);
+ }
+ domain_w = total_w.saturating_sub(status_w + note_w + marker_w + sep_w);
+ }
+ if domain_w < 10 {
+ let needed = 10 - domain_w;
+ let shrink_status = needed.min(status_w.saturating_sub(6));
+ status_w = status_w.saturating_sub(shrink_status);
+ domain_w = total_w.saturating_sub(status_w + note_w + marker_w + sep_w);
+ }
+ domain_w = domain_w.max(6);
+
+ let (list_area, header_area) = if !visible_data.is_empty() {
+ let chunks = Layout::default()
+ .direction(Direction::Vertical)
+ .constraints([Constraint::Length(1), Constraint::Min(1)])
+ .split(area);
+ (chunks[1], Some(chunks[0]))
+ } else {
+ (area, None)
+ };
+
+ if let Some(header_area) = header_area {
+ let mut header_spans = vec![
+ Span::styled(format!(" {}", fit_cell("Domain", domain_w)), Style::default().fg(Color::Gray).add_modifier(Modifier::BOLD)),
+ Span::styled(" │ ", Style::default().fg(Color::DarkGray)),
+ Span::styled(fit_cell("Status", status_w), Style::default().fg(Color::Gray).add_modifier(Modifier::BOLD)),
+ ];
+
+ if show_note_column {
+ header_spans.push(Span::styled(" │ ", Style::default().fg(Color::DarkGray)));
+ header_spans.push(Span::styled(fit_cell("Details", note_w), Style::default().fg(Color::Gray).add_modifier(Modifier::BOLD)));
+ }
+
+ header_spans.push(Span::styled(" │ ", Style::default().fg(Color::DarkGray)));
+ header_spans.push(Span::styled(" ✓ ", Style::default().fg(Color::Gray).add_modifier(Modifier::BOLD)));
+
+ f.render_widget(Paragraph::new(Line::from(header_spans)), header_area);
+ }
+
+ let items: Vec<ListItem> = visible_data
+ .iter()
+ .enumerate()
+ .map(|(idx, (full, status_str, note, status))| {
+ let is_selected = selected_idx == Some(idx);
+ let selection_bg = if is_selected { Some(selected_bg) } else { None };
+
+ let status_style = match status {
+ DomainStatus::Available => Style::default().fg(Color::Green),
+ DomainStatus::Registered { .. } => Style::default().fg(Color::Red),
+ DomainStatus::Error { kind, .. } => match kind {
+ ErrorKind::InvalidTld => Style::default().fg(Color::Yellow),
+ _ => Style::default().fg(Color::Blue),
+ },
+ };
+
+ let domain_style = match status {
+ DomainStatus::Available => Style::default().fg(Color::Green),
+ DomainStatus::Registered { .. } => Style::default().fg(Color::Red),
+ DomainStatus::Error { kind, .. } => match kind {
+ ErrorKind::InvalidTld => Style::default().fg(Color::Yellow),
+ _ => Style::default().fg(Color::Blue),
+ },
+ };
+
+ let apply_bg = |style: Style| {
+ if let Some(bg) = selection_bg {
+ style.bg(bg).add_modifier(Modifier::BOLD)
+ } else {
+ style
+ }
+ };
+
+ let mut spans = vec![
+ Span::styled(format!(" {}", fit_cell(full, domain_w)), apply_bg(domain_style)),
+ Span::styled(" \u{2502} ", apply_bg(Style::default().fg(Color::Gray))),
+ Span::styled(fit_cell(status_str, status_w), apply_bg(status_style)),
+ ];
+
+ if show_note_column {
+ spans.push(Span::styled(" \u{2502} ", apply_bg(Style::default().fg(Color::Gray))));
+ spans.push(Span::styled(fit_cell(note, note_w), apply_bg(Style::default().fg(Color::White))));
+ }
+
+ spans.push(Span::styled(" \u{2502} ", apply_bg(Style::default().fg(Color::Gray))));
+ spans.push(match status {
+ DomainStatus::Available => Span::styled(" ✓ ", apply_bg(Style::default().fg(Color::Green))),
+ DomainStatus::Registered { .. } => Span::styled(" ✗ ", apply_bg(Style::default().fg(Color::Red))),
+ DomainStatus::Error { kind, .. } => match kind {
+ ErrorKind::InvalidTld => Span::styled(" ? ", apply_bg(Style::default().fg(Color::Yellow))),
+ _ => Span::styled(" ! ", apply_bg(Style::default().fg(Color::Blue))),
+ },
+ });
+
+ let line = Line::from(spans);
+ ListItem::new(line)
+ })
+ .collect();
+
+ let list = List::new(items);
+
+ f.render_stateful_widget(list, list_area, &mut app.results_state);
+}
+
+fn fit_cell(value: &str, width: usize) -> String {
+ if width == 0 {
+ return String::new();
+ }
+
+ let len = value.chars().count();
+ if len <= width {
+ return format!("{:<width$}", value, width = width);
+ }
+
+ if width == 1 {
+ return "…".to_string();
+ }
+
+ let truncated: String = value.chars().take(width - 1).collect();
+ format!("{:<width$}", format!("{}…", truncated), width = width)
+}
+
+fn fit_cell_center(value: &str, width: usize) -> String {
+ if width == 0 {
+ return String::new();
+ }
+
+ let rendered = if value.chars().count() <= width {
+ value.to_string()
+ } else if width == 1 {
+ "…".to_string()
+ } else {
+ let truncated: String = value.chars().take(width - 1).collect();
+ format!("{}…", truncated)
+ };
+
+ let len = rendered.chars().count();
+ let left = width.saturating_sub(len) / 2;
+ let right = width.saturating_sub(len + left);
+ format!("{}{}{}", " ".repeat(left), rendered, " ".repeat(right))
+}
+
+fn wrap_text_lines(text: &str, width: u16) -> Vec<String> {
+ if width == 0 {
+ return Vec::new();
+ }
+
+ let width = width as usize;
+ let mut wrapped = Vec::new();
+
+ for line in text.split('\n') {
+ if line.is_empty() {
+ wrapped.push(String::new());
+ continue;
+ }
+
+ let chars: Vec<char> = line.chars().collect();
+ for chunk in chars.chunks(width) {
+ wrapped.push(chunk.iter().collect());
+ }
+ }
+
+ if text.ends_with('\n') {
+ wrapped.push(String::new());
+ }
+
+ wrapped
+}
+
+fn scratchpad_cursor_positions(text: &str, width: u16) -> Vec<(usize, u16, u16)> {
+ if width == 0 {
+ return vec![(0, 0, 0)];
+ }
+
+ let mut positions = Vec::with_capacity(text.chars().count() + 1);
+ let mut line = 0u16;
+ let mut col = 0u16;
+ let mut byte_idx = 0usize;
+ positions.push((byte_idx, line, col));
+
+ for ch in text.chars() {
+ byte_idx += ch.len_utf8();
+ if ch == '\n' {
+ line += 1;
+ col = 0;
+ } else {
+ col += 1;
+ if col >= width {
+ line += 1;
+ col = 0;
+ }
+ }
+ positions.push((byte_idx, line, col));
+ }
+
+ positions
+}
+
+fn cursor_line_col(text: &str, cursor: usize, width: u16) -> (u16, u16) {
+ let clamped_cursor = cursor.min(text.len());
+ scratchpad_cursor_positions(text, width)
+ .into_iter()
+ .take_while(|(idx, _, _)| *idx <= clamped_cursor)
+ .last()
+ .map(|(_, line, col)| (line, col))
+ .unwrap_or((0, 0))
+}
+
+fn cursor_index_for_line_col(text: &str, target_line: u16, target_col: u16, width: u16) -> usize {
+ let positions = scratchpad_cursor_positions(text, width);
+ let mut best_on_line = None;
+
+ for (idx, line, col) in positions {
+ if line == target_line {
+ best_on_line = Some(idx);
+ if col >= target_col {
+ return idx;
+ }
+ } else if line > target_line {
+ return best_on_line.unwrap_or(idx);
+ }
+ }
+
+ best_on_line.unwrap_or(text.len())
+}
+
+fn scratchpad_inner_width(app: &App) -> u16 {
+ app.panel_rects
+ .scratchpad
+ .map(|rect| rect.width.saturating_sub(2).max(1))
+ .unwrap_or(1)
+}
+
+fn move_scratchpad_cursor_vertical(app: &mut App, line_delta: i16) {
+ let width = scratchpad_inner_width(app);
+ let (line, col) = cursor_line_col(&app.scratchpad, app.scratchpad_cursor, width);
+ let target_line = if line_delta.is_negative() {
+ line.saturating_sub(line_delta.unsigned_abs())
+ } else {
+ line.saturating_add(line_delta as u16)
+ };
+ app.scratchpad_cursor = cursor_index_for_line_col(&app.scratchpad, target_line, col, width);
+}
+
+fn draw_scratchpad(f: &mut Frame, app: &mut App, area: Rect) {
+ let focused = app.focus == Focus::Scratchpad;
+ let border_style = if focused {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(border_style)
+ .title(" Scratchpad ");
+
+ let inner = block.inner(area);
+ let wrapped_lines = wrap_text_lines(&app.scratchpad, inner.width);
+ let text: Vec<Line> = if wrapped_lines.is_empty() {
+ vec![Line::raw(String::new())]
+ } else {
+ wrapped_lines.into_iter().map(Line::raw).collect()
+ };
+ f.render_widget(block, area);
+ f.render_widget(
+ Paragraph::new(text)
+ .style(Style::default().fg(Color::White)),
+ inner,
+ );
+
+ if focused && app.export_popup.is_none() {
+ let (line, col) = cursor_line_col(&app.scratchpad, app.scratchpad_cursor, inner.width);
+ let max_x = inner.width.saturating_sub(1);
+ let max_y = inner.height.saturating_sub(1);
+ f.set_cursor_position((inner.x + col.min(max_x), inner.y + line.min(max_y)));
+ }
+}
+
+fn draw_favorites(f: &mut Frame, app: &mut App, area: Rect) {
+ let focused = app.focus == Focus::Favorites;
+ let border_style = if focused {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ let title = if app.checking_favorites {
+ " Favorites (checking...) "
+ } else {
+ " Favorites "
+ };
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(border_style)
+ .title(title);
+
+ let inner = block.inner(area);
+
+ // Reserve 1 row at the bottom for the check button
+ let list_area = Rect {
+ x: inner.x,
+ y: inner.y,
+ width: inner.width,
+ height: inner.height.saturating_sub(1),
+ };
+ let button_area = Rect {
+ x: inner.x,
+ y: inner.y + list_area.height,
+ width: inner.width,
+ height: 1.min(inner.height),
+ };
+ app.panel_rects.fav_check_button = Some(button_area);
+
+ let items: Vec<ListItem> = app
+ .favorites
+ .iter()
+ .map(|fav| {
+ let status_color = match fav.status.as_str() {
+ "available" => Color::Green,
+ "registered" => Color::Red,
+ "error" => Color::DarkGray,
+ _ => Color::White, // unknown
+ };
+ let mut spans = vec![Span::styled(
+ fav.domain.as_str(),
+ Style::default().fg(status_color),
+ )];
+ if fav.changed {
+ spans.push(Span::styled(" !", Style::default().fg(Color::Yellow)));
+ }
+ ListItem::new(Line::from(spans))
+ })
+ .collect();
+
+ let list = List::new(items)
+ .highlight_style(
+ Style::default()
+ .add_modifier(Modifier::REVERSED),
+ );
+
+ f.render_widget(block, area);
+ f.render_stateful_widget(list, list_area, &mut app.favorites_state);
+
+ // Draw the check button at the bottom
+ let btn_label = if app.checking_favorites { "checking..." } else { "[c]heck all" };
+ let btn_style = if app.checking_favorites {
+ Style::default().fg(Color::DarkGray)
+ } else {
+ Style::default().fg(Color::Green)
+ };
+ f.render_widget(
+ Paragraph::new(Line::from(Span::styled(btn_label, btn_style)))
+ .alignment(ratatui::layout::Alignment::Center),
+ button_area,
+ );
+}
+
+fn draw_settings(f: &mut Frame, app: &mut App, area: Rect) {
+ let focused = app.focus == Focus::Settings;
+ let border_style = if focused {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(border_style)
+ .title(" Settings ");
+
+ let unavail_check = if app.show_unavailable { "[x]" } else { "[ ]" };
+ let notes_check = if app.show_notes_panel { "[x]" } else { "[ ]" };
+ let clear_check = if app.clear_on_search { "[x]" } else { "[ ]" };
+ let jobs_str = format!("{:>2}", app.jobs);
+
+ let selected = if focused { app.settings_selected } else { None };
+ let checkbox_style = |row: usize, checked: bool| {
+ let style = if checked {
+ Style::default().fg(Color::Green)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ if selected == Some(row) {
+ style.add_modifier(Modifier::REVERSED | Modifier::BOLD)
+ } else {
+ style
+ }
+ };
+
+ let label_style = |row: usize| {
+ if selected == Some(row) {
+ Style::default().add_modifier(Modifier::REVERSED)
+ } else {
+ Style::default().fg(Color::White)
+ }
+ };
+
+ let tld_row_style = if selected == Some(0) {
+ Style::default().bg(Color::DarkGray).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default()
+ };
+
+ let jobs_row_style = if selected == Some(4) {
+ Style::default().bg(Color::DarkGray).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default()
+ };
+
+ let text = vec![
+ Line::from(vec![
+ Span::raw(" "),
+ Span::styled("TLD List: [", tld_row_style.fg(Color::White)),
+ Span::styled(app.tld_list_name.as_str(), tld_row_style.fg(Color::Cyan)),
+ Span::styled("] ", tld_row_style.fg(Color::White)),
+ Span::styled("V", tld_row_style.fg(Color::Green)),
+ ]),
+ Line::from(vec![
+ Span::raw(" "),
+ Span::styled(unavail_check, checkbox_style(1, app.show_unavailable)),
+ Span::styled(" Show Unavailable", label_style(1)),
+ ]),
+ Line::from(vec![
+ Span::raw(" "),
+ Span::styled(notes_check, checkbox_style(2, app.show_notes_panel)),
+ Span::styled(" Show Notes Panel", label_style(2)),
+ ]),
+ Line::from(vec![
+ Span::raw(" "),
+ Span::styled(clear_check, checkbox_style(3, app.clear_on_search)),
+ Span::styled(" Clear on Search", label_style(3)),
+ ]),
+ Line::from(vec![
+ Span::raw(" "),
+ Span::styled("Jobs: [", jobs_row_style.fg(Color::White)),
+ Span::styled(jobs_str, jobs_row_style.fg(Color::Cyan)),
+ Span::styled("] ", jobs_row_style.fg(Color::White)),
+ Span::styled("-/+", jobs_row_style.fg(Color::Green)),
+ ]),
+ ];
+
+ let paragraph = Paragraph::new(text).block(block);
+ f.render_widget(paragraph, area);
+}
+
+fn draw_search(f: &mut Frame, app: &mut App, area: Rect) {
+ let focused = app.focus == Focus::Search;
+ let border_style = if focused {
+ Style::default().fg(Color::Red)
+ } else {
+ Style::default().fg(Color::DarkGray)
+ };
+
+ let title = match &app.status_msg {
+ Some(msg) => format!(" Search - {} ", msg),
+ None => " Search (Enter to lookup) ".to_string(),
+ };
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(border_style)
+ .title(title);
+
+ let inner = block.inner(area);
+ f.render_widget(block, area);
+
+ let search_button_width = SEARCH_BUTTON_LABEL.chars().count() as u16;
+ let clear_button_width = CLEAR_BUTTON_LABEL.chars().count() as u16;
+ let stop_button_width = STOP_BUTTON_LABEL.chars().count() as u16;
+ let chunks = if app.clear_on_search {
+ Layout::default()
+ .direction(Direction::Horizontal)
+ .constraints([
+ Constraint::Min(1),
+ Constraint::Length(search_button_width),
+ Constraint::Length(1),
+ Constraint::Length(stop_button_width),
+ ])
+ .split(inner)
+ } else {
+ Layout::default()
+ .direction(Direction::Horizontal)
+ .constraints([
+ Constraint::Min(1),
+ Constraint::Length(search_button_width),
+ Constraint::Length(1),
+ Constraint::Length(stop_button_width),
+ Constraint::Length(1),
+ Constraint::Length(clear_button_width),
+ ])
+ .split(inner)
+ };
+
+ app.panel_rects.search_button = Some(chunks[1]);
+ if app.clear_on_search {
+ app.panel_rects.clear_button = None;
+ app.panel_rects.cancel_button = Some(chunks[3]);
+ } else {
+ app.panel_rects.cancel_button = Some(chunks[3]);
+ app.panel_rects.clear_button = Some(chunks[5]);
+ }
+
+ let input_chunk = chunks[0];
+ let visible_input = fit_cell(&app.search_input, input_chunk.width as usize);
+ let input = Paragraph::new(visible_input).style(Style::default().fg(Color::White));
+ f.render_widget(input, input_chunk);
+
+ let search_enabled = !app.searching && !app.search_input.is_empty();
+ let cancel_enabled = app.searching;
+
+ let search_style = if search_enabled {
+ Style::default().fg(Color::Black).bg(Color::Green).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default().fg(Color::DarkGray).bg(Color::Black)
+ };
+ let stop_style = if cancel_enabled {
+ Style::default().fg(Color::Black).bg(Color::Yellow).add_modifier(Modifier::BOLD)
+ } else {
+ Style::default().fg(Color::DarkGray).bg(Color::Black)
+ };
+ let clear_style = Style::default().fg(Color::White).bg(Color::Red).add_modifier(Modifier::BOLD);
+
+ f.render_widget(Paragraph::new(SEARCH_BUTTON_LABEL).style(search_style), chunks[1]);
+ if app.clear_on_search {
+ f.render_widget(Paragraph::new(STOP_BUTTON_LABEL).style(stop_style), chunks[3]);
+ } else {
+ f.render_widget(Paragraph::new(STOP_BUTTON_LABEL).style(stop_style), chunks[3]);
+ f.render_widget(Paragraph::new(CLEAR_BUTTON_LABEL).style(clear_style), chunks[5]);
+ }
+
+ // show cursor in search bar when focused
+ if focused && app.export_popup.is_none() {
+ let max_cursor = input_chunk.width.saturating_sub(1) as usize;
+ let x = input_chunk.x + app.cursor_pos.min(max_cursor) as u16;
+ let y = input_chunk.y;
+ f.set_cursor_position((x, y));
+ }
+}
+
+fn draw_dropdown(f: &mut Frame, app: &mut App, settings_area: Rect, selected: usize) {
+ let options = app.list_options();
+
+ // position dropdown below the TLD list line in settings
+ let dropdown_full = Rect {
+ x: settings_area.x + 1,
+ y: settings_area.y + 1,
+ width: settings_area.width.saturating_sub(2).min(DROPDOWN_MAX_WIDTH),
+ height: (options.len() as u16 + 2).min(DROPDOWN_MAX_HEIGHT),
+ };
+
+ app.panel_rects.dropdown = Some(dropdown_full);
+
+ // clear the area behind the dropdown
+ f.render_widget(Clear, dropdown_full);
+
+ let items: Vec<ListItem> = options
+ .iter()
+ .map(|opt| {
+ ListItem::new(Line::from(Span::styled(
+ format!(" {} ", opt),
+ Style::default().fg(Color::White),
+ )))
+ })
+ .collect();
+
+ let block = Block::default()
+ .borders(Borders::ALL)
+ .border_style(Style::default().fg(Color::Red))
+ .title(" TLD List ");
+
+ f.render_widget(Clear, dropdown_full);
+ let list = List::new(items)
+ .block(block)
+ .highlight_style(Style::default().fg(Color::White).bg(Color::Red).add_modifier(Modifier::BOLD));
+ let mut state = ListState::default();
+ state.select(Some(selected));
+ f.render_stateful_widget(list, dropdown_full, &mut state);
+}
diff --git a/src/types.rs b/src/types.rs
new file mode 100644
index 0000000..9a496c2
--- /dev/null
+++ b/src/types.rs
@@ -0,0 +1,93 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+pub enum DomainStatus {
+ Available,
+ Registered { expiry: Option<String> },
+ Error { kind: ErrorKind, message: String },
+}
+
+#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+pub enum ErrorKind {
+ InvalidTld,
+ Unknown,
+ Timeout,
+ RateLimit,
+ Forbidden,
+}
+
+impl ErrorKind {
+ /// parse from config string (case insensitive, underscores and hyphens both work)
+ pub fn from_config_str(s: &str) -> Option<Self> {
+ match s.to_lowercase().replace('-', "_").as_str() {
+ "invalid_tld" | "invalidtld" => Some(ErrorKind::InvalidTld),
+ "unknown" => Some(ErrorKind::Unknown),
+ "timeout" => Some(ErrorKind::Timeout),
+ "rate_limit" | "ratelimit" => Some(ErrorKind::RateLimit),
+ "forbidden" => Some(ErrorKind::Forbidden),
+ _ => None,
+ }
+ }
+
+ /// back to config string
+ pub fn to_config_str(&self) -> &'static str {
+ match self {
+ ErrorKind::InvalidTld => "invalid_tld",
+ ErrorKind::Unknown => "unknown",
+ ErrorKind::Timeout => "timeout",
+ ErrorKind::RateLimit => "rate_limit",
+ ErrorKind::Forbidden => "forbidden",
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct DomainResult {
+ pub name: String,
+ pub tld: String,
+ pub full: String,
+ pub status: DomainStatus,
+}
+
+impl DomainResult {
+ pub fn new(name: &str, tld: &str, status: DomainStatus) -> Self {
+ Self {
+ name: name.to_string(),
+ tld: tld.to_string(),
+ full: format!("{}.{}", name, tld),
+ status,
+ }
+ }
+
+ pub fn is_available(&self) -> bool {
+ matches!(self.status, DomainStatus::Available)
+ }
+
+ pub fn is_error(&self) -> bool {
+ matches!(self.status, DomainStatus::Error { .. })
+ }
+
+ pub fn status_str(&self) -> &str {
+ match &self.status {
+ DomainStatus::Available => "available",
+ DomainStatus::Registered { .. } => "registered",
+ DomainStatus::Error { .. } => "error",
+ }
+ }
+
+ pub fn note_str(&self) -> String {
+ match &self.status {
+ DomainStatus::Available => "-".to_string(),
+ DomainStatus::Registered { expiry } => match expiry {
+ Some(date) => format!("until {}", date),
+ None => "no expiry info".to_string(),
+ },
+ DomainStatus::Error { kind, message } => match kind {
+ ErrorKind::InvalidTld => "invalid tld".to_string(),
+ _ => message.clone(),
+ },
+ }
+ }
+}
+
+