diff --git a/Build-Rules.ps1 b/Build-Rules.ps1 new file mode 100644 index 0000000..6de037c --- /dev/null +++ b/Build-Rules.ps1 @@ -0,0 +1,46 @@ +# Build-Rules.ps1 +$ErrorActionPreference = "Stop" +$urlsFile = ".\urls.txt" +$out = ".\malicious_rules.txt" +"" | Out-File $out -Encoding utf8 +Add-Content $out "# Aggregated malicious_rules.txt — generated on $(Get-Date -AsUTC -Format 'yyyy-MM-ddTHH:mm:ssZ')" + +Get-Content $urlsFile | ForEach-Object { + $line = $_.Trim() + if ($line -eq "" -or $line.StartsWith("#")) { return } + Add-Content $out "# from $line" + try { + $resp = Invoke-RestMethod -Uri $line -UseBasicParsing -ErrorAction Stop + } catch { + Write-Warning "Failed to fetch $line" + return + } + $resp -split "`n" | ForEach-Object { + $ltrim = $_.Trim() + if ($ltrim -eq "" -or $ltrim.StartsWith("#")) { return } + # split fields + $parts = -split $ltrim + if ($parts.Count -ge 2 -and ($parts[0] -match '^\d+\.\d+\.\d+\.\d+$' -or $parts[0] -eq "0.0.0.0")) { + $host = $parts[-1] + } else { + $host = $parts[0] + } + $host = $host.ToLower().Trim() + if ($host -match "[/:@]") { return } + if ($host -match '^\d+\.\d+\.\d+\.\d+$') { return } + $host = $host.TrimStart('.') + if ($host -notmatch '\.') { return } + "$host|-;" | Out-File -Append -Encoding utf8 $out + } +} + +# dedupe while keeping comments +$lines = Get-Content $out +$seen = @{} +$outLines = New-Object System.Collections.Generic.List[string] +foreach ($ln in $lines) { + if ($ln -match '^\s*#') { $outLines.Add($ln); continue } + if (-not $seen.ContainsKey($ln)) { $seen[$ln] = $true; $outLines.Add($ln) } +} +$outLines | Set-Content $out -Encoding utf8 +Write-Output "Generated $out with $((Get-Content $out | Select-String '\|-;').Count) rule lines. Review before importing." diff --git a/DnsRules.txt b/DnsRules.txt new file mode 100644 index 0000000..c0fd510 --- /dev/null +++ b/DnsRules.txt @@ -0,0 +1,203 @@ +// ------------------------------------------------------------- +// DnsRules_populated.txt — DNSveil / SDC rule file (curated sources) +// Created: 2025-10-20 +// Notes: These are pointers to trusted public sources used by white-hat researchers. +// I could not fetch/verify the exact raw contents here — fetch + convert +// the remote files before importing (see scripts at end). +// ------------------------------------------------------------- + +// --------------------------- +// Variables (trusted DoH / DoT / DNS providers) +// --------------------------- +SmartDns1 = https://cloudflare-dns.com/dns-query; // Cloudflare DoH (1.1.1.1) +SmartDns2 = https://dns.google/dns-query; // Google DoH (8.8.8.8) +SmartDns3 = https://dns.quad9.net/dns-query; // Quad9 DoH (protects against malicious domains) +SmartDns4 = https://dns.adguard.com/dns-query; // AdGuard DoH (ad-blocking option) +SmartDns5 = https://doh.cleanbrowsing.org/doh/family-filter; // CleanBrowsing (family / malware filters) +LocalDNS = udp://127.0.0.1:53; + +// --------------------------- +// Defaults +// --------------------------- +blockport:53,80,443; +*|+; + +// --------------------------- +// Trusted remote blocklist sources (white-hat / researcher-referenced) +// --------------------------- +// NOTE: These links point to the project pages or raw file endpoints used by many researchers. +// Verify the exact raw path and integrity (HTTPS, checksums, signatures) before automating. +// +// Aggregated hosts / combined lists +# Steven Black — community-curated aggregated hosts (ad/tracker/malware) +# raw hosts (example): +# https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts +# project page: +https://github.com/StevenBlack/hosts + +# Abuse.ch — URLhaus (malicious URL / host distribution) +# Hostfile download: +https://urlhaus.abuse.ch/downloads/hostfile/ +# project page: +https://urlhaus.abuse.ch/ + +# OISD — comprehensive host/blocklist project popular with researchers +# project page & downloads (check official download links): +https://oisd.nl/ +https://github.com/oisd + +# AdAway — widely used hosts file for Android / research +https://adaway.org/hosts.txt +https://github.com/AdAway/adaway.github.io + +# EasyList / EasyPrivacy — widely-used ad and tracker filters (maintained by community) +# project page / GitHub: +https://easylist.to/ +https://github.com/easylist/easylist + +# AdGuard filter lists (DNS filters / SDNS filters) +https://github.com/AdguardTeam/AdGuardSDNSFilters +https://github.com/AdguardTeam/AdGuardHome + +# PhishTank — community-curated phishing feed (requires API/consent for raw dumps) +https://www.phishtank.com/developer_info.php + +# Spamhaus DROP / PBL (IP blocklists, useful for CIDR blocking) +https://www.spamhaus.org/drop/ + +# Emerging Threats / Proofpoint ET Open — IDS / malware indicators (feeds) +https://rules.emergingthreats.net/ +https://github.com/emergingthreats + +# MalwareBazaar / MalwareDomains / other indicator aggregators (use with care) +https://bazaar.abuse.ch/ +# Project pages for additional curated lists: +https://github.com/mitchellkrogza/Ultimate.Hosts.Blacklist +https://github.com/StevenBlack/hosts + +// --------------------------- +// How to reference these in your workflow +// --------------------------- +// Preferred approach: +// 1) Maintain a local file `urls.txt` listing one remote source URL per line (use the raw endpoints where available). +// 2) Use the aggregation script (bash / PowerShell) to fetch, normalize and convert to `domain|-;` lines. +// 3) Deduplicate, verify, then import the single `malicious_rules.txt` into DNSveil. +// +// Example urls.txt (place raw URLs here — replace any that require API keys with their raw-export endpoints): +# raw.githubusercontent.com/StevenBlack/hosts/master/hosts +# https://urlhaus.abuse.ch/downloads/hostfile/ +# https://adaway.org/hosts.txt +# https://filters.adtidy.org/extension/chromium/filters/11.txt (example Adblock filter) +# https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilters/master/Filters/filter.txt + +// --------------------------- +// Example protective rules (keep/adjust) +// --------------------------- +# Provider exemptions (no DPI bypass for core providers) +google.com|--; +*.google.com|--; +github.com|--; +*.github.com|--; +githubusercontent.com|--; +*.githubusercontent.com|--; +stackexchange.com|--; +*.stackoverflow.com|--; +openai.com|--; +*.openai.com|--; + +# YouTube / Google Video handling +youtube.com|dnsdomain:google.com;sni:google.com; +ytimg.com|dnsdomain:google.com; +*.googlevideo.com|dnsdomain:*.c.docs.google.com;sni:google.com; + +# Local exemptions +192.168.0.0/16|--; +10.0.0.0/8|--; +172.16.0.0/12|--; +127.0.0.0/8|--; +::1/128|--; + +// --------------------------- +// Placeholder for aggregated rules (generated after fetching remote lists) +// --------------------------- +// After running the fetch/convert script you will replace this block with the converted lines, +// e.g.: +// +// adserver.example|-; +// tracking.example|-; +// maliciousdomain.example|-; +// +// (Automatically generated -> import-ready: every rule line ends with ';') +// --------------------------- + +// --------------------------- +// Where to find DNSCrypt stamps & public resolver lists +// --------------------------- +// DNSCrypt public servers & stamps (canonical list maintained by community) +https://dnscrypt.info/public-servers/ +https://github.com/DNSCrypt/dnscrypt-resolvers + +// --------------------------- +// DOH / DOT endpoints (human readable references — insert as variables above) +// Cloudflare DoH: https://cloudflare-dns.com/dns-query +// Google DoH: https://dns.google/dns-query +// Quad9 DoH: https://dns.quad9.net/dns-query +// AdGuard DoH: https://dns.adguard.com/dns-query +// CleanBrowsing DoH: https://doh.cleanbrowsing.org/doh/family-filter +// NextDNS (per-config): https://dns.nextdns.io/ (replace ) +// --------------------------- + +// --------------------------- +// Automated fetch + convert examples (use locally; validate outputs) +// --------------------------- + +# Bash (recommended on Linux/macOS): +# 1) create urls.txt with raw endpoints (one per line) +# 2) run: +# out="malicious_rules.txt" +# : > "$out" +# while read -r url; do +# echo "# from $url" >> "$out" +# curl -fsSL "$url" | sed 's/\r//g' | awk ' +# /^[[:space:]]*#/ {next} +# NF==0 {next} +# { +# # hosts style (e.g., "0.0.0.0 domain") -> take last field, otherwise first +# host = ($1 ~ /^[0-9]/) ? $NF : $1 +# # basic sanity filters +# if (host ~ /[:\/]/) next +# print host "|-;" +# }' >> "$out" +# done < urls.txt +# # dedupe +# awk '!seen[$0]++' "$out" > "${out}.uniq" && mv "${out}.uniq" "$out" +# echo "Generated $out" + +# PowerShell (Windows): +# $urls = Get-Content .\urls.txt +# $out = "malicious_rules.txt" +# "" | Out-File $out -Encoding utf8 +# foreach ($u in $urls) { +# try { $text = Invoke-RestMethod -Uri $u -UseBasicParsing -ErrorAction Stop } catch { Write-Warning "fail $u"; continue } +# Add-Content -Path $out -Value "# from $u" +# $text -split "`n" | ForEach-Object { +# $l = $_.Trim() +# if ($l -eq "" -or $l.StartsWith("#")) { continue } +# $p = -split $l +# $host = if ($p.Count -ge 2) { $p[-1] } else { $p[0] } +# if ($host -match "[:/]" -or $host -match "^\d+(\.\d+){3}$") { continue } +# "$host|-;" | Out-File -Append -Encoding utf8 $out +# } +# } +# Get-Content $out | Select-Object -Unique | Set-Content $out -Encoding utf8 +# Write-Output "Generated $out" + +// --------------------------- +// Final notes & cautions +// --------------------------- +// - Always vet remote sources before automated inclusion; prefer HTTPS raw endpoints and pinned commits. +// - Test the aggregated set in a controlled environment (small subset) before deploying network-wide. +// - Keep backups and use ordering: place the most specific rules above general ones. +// - Consider adding integrity checks (hashes / signed lists) for fully automated ingestion. +// +// End of DnsRules_populated.txt diff --git a/Rules.txt b/Rules.txt new file mode 100644 index 0000000..592d2c0 --- /dev/null +++ b/Rules.txt @@ -0,0 +1,161 @@ +// ------------------------------------------------------------- +// DnsRules.txt — DNSveil / SDC rule file +// Created: 2025-10-20 +// Notes: Edit SmartDns variables to your preferred DoH/DoT/DNSCrypt endpoints. +// Wildcards (*) supported. Lines ending with ';' are rules. +// ------------------------------------------------------------- + +// --------------------------- +// Variables (set your own DoH/DoT/DNSCrypt endpoints) +// --------------------------- +SmartDns1 = https://dns1.example.com/dns-query; +SmartDns2 = https://dns2.example.com/dns-query; +SmartDns3 = https://dns3.example.com/dns-query; +LocalDNS = udp://127.0.0.1:53; + +// --------------------------- +// Defaults +// --------------------------- +// blockport: specify ports to block by default (comma separated) +blockport:53,80,443; + +// Default action for everything else: use system/default behaviour (+ = apply defaults) +*|+; + +// --------------------------- +// Privacy / Ad / Tracker blocks +// (blocks domains and subdomains used for ads, analytics, trackers) +// Edit to remove/add lists you trust or need. +// --------------------------- +doubleclick.net|-; +googleadservices.com|-; +adservice.google.com|-; +ads.youtube.com|-; +googlesyndication.com|-; +pagead2.googlesyndication.com|-; +amazon-adsystem.com|-; +adnxs.com|-; +adsafeprotected.com|-; +rubiconproject.com|-; +scorecardresearch.com|-; +analytics.google.com|-; +google-analytics.com|-; +facebook.com|-; // block Facebook entire domain (remove if you need FB) +*.facebook.com|-; +connect.facebook.net|-; +pixel.facebook.com|-; +track.adform.net|-; +*.tracking.example|-; // placeholder: add private tracking hosts + +// --------------------------- +// System / Developer / CDN exemptions +// Don't apply DPI/proxy/fake SNI for these, let system handle them directly +// Use the '--' rule to skip DPI bypass and proxies +google.com|--; +*.google.com|--; +github.com|--; +*.github.com|--; +githubusercontent.com|--; +*.githubusercontent.com|--; +stackexchange.com|--; +*.stackoverflow.com|--; +*.sstatic.net|--; +php.net|--; +openai.com|--; +*.openai.com|--; + +// --------------------------- +// YouTube / Google video handling +// Use google DNS domain mapping to preserve playback, and set SNI where needed +// Adjust based on network behavior; SNI fake helps DPI bypass on googlevideo hosts +youtube.com|dnsdomain:google.com;sni:google.com; +ytimg.com|dnsdomain:google.com; +*.ytimg.com|dnsdomain:google.com; +ggpht.com|dnsdomain:google.com; +*.ggpht.com|dnsdomain:*.googleusercontent.com; +*.googlevideo.com|dnsdomain:*.c.docs.google.com;sni:google.com; + +// --------------------------- +// Force specific DoH/DoT per high-value domains +// Example: send traffic for selected domains to SmartDns1 / SmartDns2 +// --------------------------- +developers.google.com|--;dns:SmartDns1,SmartDns2,SmartDns3; +*.googleusercontent.com|--;dns:SmartDns1,SmartDns2,SmartDns3; +developer.android.com|--;dns:SmartDns1,SmartDns2,SmartDns3; +spotify.com|--;dns:SmartDns1,SmartDns2,SmartDns3; +*.spotify.com|--;dns:SmartDns1,SmartDns2,SmartDns3; + +// --------------------------- +// Fake DNS (map domain to a local IP or block by redirecting to 127.0.0.1) +// Useful for blocking telemetry or unwanted hosts. +// --------------------------- +telemetry.example.com|127.0.0.1; +diagnostics.example.net|127.0.0.1; + +// --------------------------- +// Proxy / Upstream proxy per-domain +// Route traffic for selected domains through a SOCKS5 or HTTP proxy. +// Example: route a domain via local SOCKS5 running on 127.0.0.1:1080 +// Format: domain|proxy:socks5://127.0.0.1:1080; +// Add user/password if required: &user:UserName&pass:PassWord +// --------------------------- +example-blocked-site.com|proxy:socks5://127.0.0.1:1080; +*.blocked-video-site.com|proxy:http://127.0.0.1:3128; + +// --------------------------- +// Anonymized DNSCrypt examples (stamp + relay) +// (Only required if you use DNSCrypt relays) +// Example pattern (stamp + relay) is supported by DNSveil per README. +// --------------------------- +sdns://...sdns... sdns://relay-stamp-or-ip:port + +// --------------------------- +// DNS Domain translation +// Use dnsdomain to get IP from one domain and use it for another +// Useful if upstream blocks direct domain but allows other domain +// --------------------------- +youtube-nocdn.example|dnsdomain:google.com; + +// --------------------------- +// Example: Set a custom/fake SNI for a domain (helps DPI bypass) +// --------------------------- +*.googlevideo.com|sni:google.com; +*.example-streaming.com|sni:cdn.example.com; + +// --------------------------- +// Example: Block entire TLDs or country zones (use with caution) +// --------------------------- +.cn|-; +.ru|-; + +// --------------------------- +// Local network & LAN exemptions +// Allow local LAN and DNS to operate directly +// --------------------------- +192.168.0.0/16|--; +10.0.0.0/8|--; +172.16.0.0/12|--; +127.0.0.0/8|--; +::1/128|--; + +// --------------------------- +// Export / Advanced scanner hints +// You can export servers based on conditions from the advanced DNS scanner +// e.g., export servers that pass IPv6 test or avoid Google Safe Search +// --------------------------- + +// --------------------------- +// Custom: Use Local DNS for development/test domains +// --------------------------- +dev.local|dns:LocalDNS; +*.staging.local|dns:LocalDNS; + +// --------------------------- +// End of file — additional tips: +// - To block a domain and all subdomains use: example.com|-; and *.example.com|-; +// - Place the most specific rules above more generic ones (rules are matched in order). +// - Test changes with the Advanced DNS Scanner and DNS Lookup tools in DNSveil. +// - If using "SSL Decryption" or fake SNI, you may need to install the generated root CA +// and restart browsers for changes to take effect. +// - Keep copies of original rules and incrementally change to avoid accidental lockout. +// ---------------------------v diff --git a/build_rules.sh b/build_rules.sh new file mode 100644 index 0000000..3c3e593 --- /dev/null +++ b/build_rules.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash +set -euo pipefail + +urls_file="urls.txt" +out="malicious_rules.txt" +tmpdir="$(mktemp -d)" +trap 'rm -rf "$tmpdir"' EXIT + +: > "$out" +echo "# Aggregated malicious_rules.txt — generated on $(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> "$out" + +while IFS= read -r url || [ -n "$url" ]; do + # skip empty or comment lines + [[ -z "${url//[[:space:]]/}" ]] && continue + [[ "$url" =~ ^[[:space:]]*# ]] && continue + + echo "# from $url" >> "$out" + # Fetch (curl). Try to follow redirects and time out if unreachable. + if ! curl -fsSL --max-time 60 "$url" -o "$tmpdir/cur"; then + echo "Warning: failed to fetch $url" >&2 + continue + fi + + # Normalize and extract hostnames: + # - remove CR + # - remove comments + # - handle hosts format (0.0.0.0 domain) and plain domain lists + # - skip IP-only, URLs, mailto:, entries with slash/colon + sed 's/\r//g' "$tmpdir/cur" \ + | sed 's/#.*$//' \ + | awk ' + /^[[:space:]]*$/ { next } + { + # split into fields + n = split($0, f, /[[:space:]]+/) + if (n == 0) next + # choose last field if first is an IP, else first field + if (f[1] ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/ || f[1] ~ /^0\.0\.0\.0$/) { + host = f[n] + } else { + host = f[1] + } + # lower-case + for(i=1;i<=length(host);i++) host=tolower(host) + # skip if contains slash, colon, @, or looks like an IP + if (host ~ /[\/:]/) next + if (host ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) next + # strip leading dots + gsub(/^\\.+/, "", host) + # basic sanity: host must contain a dot + if (host !~ /\\./) next + print host "|-;" + }' >> "$out" + +done < "$urls_file" + +# dedupe while preserving comment/context blocks +awk ' + /^#/ { print; next } + { if (!seen[$0]++) print } +' "$out" > "${out}.uniq" && mv "${out}.uniq" "$out" + +echo "Generated $out with $(grep -c \"|-;\" "$out") rule lines." +echo "Review the file before importing into DNSveil." diff --git a/maldomains_Block.txt b/maldomains_Block.txt new file mode 100644 index 0000000..3f89620 --- /dev/null +++ b/maldomains_Block.txt @@ -0,0 +1,14 @@ +# Malicious Domain Blocklist — curated for DNS filtering +# Source: public threat-intel feeds (see references at end) +# Updated: [insert date] + +malicious-example1.com| +badactor-dnsserver.net| +c2.evildomain.org| +botnet-controller123.info| +malware-downloadhub.xyz| +ransomware-filestore.ru| +phishing-login-secure-site.com| +telemetry-tracker-evil.io| +untrusted-dns-resolver-free.co| +advertising-malware-portal.top| diff --git a/malicious_rules.txt b/malicious_rules.txt new file mode 100644 index 0000000..e1c2006 --- /dev/null +++ b/malicious_rules.txt @@ -0,0 +1,3 @@ + +# Aggregated malicious_rules.txt � generated on 2025-10-21T02:02:15Z +# from https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts diff --git a/polts.ods b/polts.ods new file mode 100644 index 0000000..a0ef08f Binary files /dev/null and b/polts.ods differ diff --git a/urls.txt b/urls.txt new file mode 100644 index 0000000..f7b0ebd --- /dev/null +++ b/urls.txt @@ -0,0 +1,16 @@ +# urls.txt — one raw endpoint per line (trusted community/researcher sources) +# Replace or remove any lines that require API keys or you don't trust. + +https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts +https://adaway.org/hosts.txt +https://urlhaus.abuse.ch/downloads/hostfile/ +https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilters/master/Filters/filter.txt +https://raw.githubusercontent.com/Adguardteam/AdGuardHome/master/filters/filter.txt +https://raw.githubusercontent.com/mitchellkrogza/Ultimate.Hosts.Blacklist/master/hosts +https://raw.githubusercontent.com/StevenBlack/hosts/master/data/hosts-blocklists.txt +https://rules.emergingthreats.net/blockrules/compromised-ips.txt +https://www.spamhaus.org/drop/drop.txt +https://bazaar.abuse.ch/export/csv/ # MalwareBazaar — CSV export (post-process needed) +# Optional / utility lists: +https://dnscrypt.info/public-servers/ # list of public DNSCrypt servers (human page) +https://github.com/DNSCrypt/dnscrypt-resolvers # repo reference for stamps