diff --git a/gravity.sh b/gravity.sh index 852356d7..b222886b 100755 --- a/gravity.sh +++ b/gravity.sh @@ -12,7 +12,7 @@ # Please see LICENSE file for your rights under this license. coltable="/opt/pihole/COL_TABLE" -source ${coltable} +source "${coltable}" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" @@ -35,13 +35,16 @@ localList="${piholeDir}/local.list" domainsExtension="domains" matterAndLight="${basename}.0.matterandlight.txt" -supernova="${basename}.1.supernova.txt" -preEventHorizon="list.preEventHorizon" -eventHorizon="${basename}.2.supernova.txt" +parsedMatter="${basename}.1.parsedmatter.txt" +whitelistMatter="${basename}.2.whitelistmatter.txt" accretionDisc="${basename}.3.accretionDisc.txt" +preEventHorizon="list.preEventHorizon" skipDownload="false" +# Use "force-reload" when restarting dnsmasq for everything but Wildcards +dnsRestart="force-reload" + # Source setupVars from install script setupVars="${piholeDir}/setupVars.conf" if [[ -f "${setupVars}" ]];then @@ -61,17 +64,39 @@ if [[ -r "${piholeDir}/pihole.conf" ]]; then echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}" fi -helpFunc() { - echo "Usage: pihole -g -Update domains from blocklists specified in adlists.list +# Attempt to resolve DNS before proceeding with blocklist generation +gravity_DNSLookup() { + local plu -Options: - -f, --force Force the download of all specified blocklists - -h, --help Show this help dialog" - exit 0 + # Determine if github.com can be resolved + if ! timeout 2 nslookup github.com &> /dev/null; then + if [[ -n "${secs}" ]]; then + echo -e "${OVER} ${CROSS} DNS resolution is still unavailable, cancelling" + exit 1 + fi + + # Determine error output message + if pidof dnsmasq &> /dev/null; then + echo -e " ${CROSS} DNS resolution is temporarily unavailable" + else + echo -e " ${CROSS} DNS service is not running" + "${PIHOLE_COMMAND}" restartdns + fi + + secs="15" + while [[ "${secs}" -ne 0 ]]; do + [[ "${secs}" -ne 1 ]] && plu="s" || plu="" + echo -ne "${OVER} ${INFO} Waiting $secs second${plu} before continuing..." + sleep 1 + : $((secs--)) + done + + # Try again + gravity_DNSLookup + fi } -# Retrieve blocklist URLs from adlists.list +# Retrieve blocklist URLs and parse domains from adlists.list gravity_Collapse() { echo -e " ${INFO} Neutrino emissions detected..." @@ -94,7 +119,7 @@ gravity_Collapse() { mapfile -t sources < <(awk '!/^[#@;!\[]/ {gsub(/\r$/, "", $0); if ($1) { print $1 } }' "${adListFile}" 2> /dev/null) # Parse source domains from $sources - # Logic: Split by folder/port and remove URL protocol/password + # Logic: Split by folder/port, remove URL protocol & optional username:password@ mapfile -t sourceDomains < <( awk -F '[/:]' '{ gsub(/(.*:\/\/|.*:.*@)/, "", $0) @@ -102,7 +127,7 @@ gravity_Collapse() { }' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null ) - if [[ -n "${sources[*]}" ]] || [[ -n "${sourceDomains[*]}" ]]; then + if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then echo -e "${OVER} ${TICK} ${str}" else echo -e "${OVER} ${CROSS} ${str}" @@ -110,46 +135,144 @@ gravity_Collapse() { fi } -# Parse source file into domains-only format -gravity_ParseFileAsDomains() { - local source destination hostsFilter firstLine abpFilter +# Define options for when retrieving blocklists +gravity_Supernova() { + local url domain agent cmd_ext str + + echo "" + + # Loop through $sources to download each one + for ((i = 0; i < "${#sources[@]}"; i++)); do + url="${sources[$i]}" + domain="${sourceDomains[$i]}" + + # Save the file as list.#.domain + saveLocation="${piholeDir}/list.${i}.${domain}.${domainsExtension}" + activeDomains[$i]="${saveLocation}" + + agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36" + + # Use a case statement to download lists that need special commands + case "${domain}" in + "pgl.yoyo.org") cmd_ext="-d mimetype=plaintext -d hostformat=hosts";; + *) cmd_ext="";; + esac + + if [[ "${skipDownload}" == false ]]; then + str="Target: ${domain} (${url##*/})" + echo -e " ${INFO} ${str}" + + gravity_Pull "${url}" "${cmd_ext}" "${agent}" "${str}" + + echo "" + fi + done +} + +# Download specified URL and perform QA +gravity_Pull() { + local url cmd_ext agent heisenbergCompensator patternBuffer str httpCode success + + url="${1}" + cmd_ext="${2}" + agent="${3}" + + # Store downloaded content to temp file instead of RAM + patternBuffer=$(mktemp) + heisenbergCompensator="" + if [[ -r "${saveLocation}" ]]; then + # If domain has been saved, add file for date check to only download newer + heisenbergCompensator="-z ${saveLocation}" + fi + + str="Status:" + echo -ne " ${INFO} ${str} Pending..." + # shellcheck disable=SC2086 + httpCode=$(curl -s -L ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" -A "${agent}" "${url}" -o "${patternBuffer}" 2> /dev/null) + + # Determine "Status:" output based on HTTP response + case "$httpCode" in + "200" ) echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success="true";; + "304" ) echo -e "${OVER} ${TICK} ${str} No changes detected"; success="true";; + "403" ) echo -e "${OVER} ${CROSS} ${str} Forbidden"; success="false";; + "404" ) echo -e "${OVER} ${CROSS} ${str} Not found"; success="false";; + "408" ) echo -e "${OVER} ${CROSS} ${str} Time-out"; success="false";; + "451" ) echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons"; success="false";; + "521" ) echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)"; success="false";; + "522" ) echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)"; success="false";; + "500" ) echo -e "${OVER} ${CROSS} ${str} Internal Server Error"; success="false";; + * ) echo -e "${OVER} ${CROSS} ${str} Status $httpCode"; success="false";; + esac + + # Determine if the blocklist was downloaded and saved correctly + if [[ "${success}" == "true" ]]; then + if [[ "${httpCode}" == "304" ]]; then + : # Do nothing + # Check if patternbuffer is a non-zero length file + elif [[ -s "${patternBuffer}" ]]; then + # Determine if blocklist is non-standard and parse as appropriate + gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" + else + # Fall back to previously cached list if patternBuffer is empty + echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" + fi + else + # Determine if cached list exists + if [[ -r "${saveLocation}" ]]; then + echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" + else + echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" + fi + fi + + # Delete temp file if it has not been moved + if [[ -f "${patternBuffer}" ]]; then + rm "${patternBuffer}" 2> /dev/null || \ + echo -e " ${CROSS} Unable to remove ${patternBuffer}" + fi +} + +# Parse non-standard source files into domains-only format +gravity_ParseFileIntoDomains() { + local source destination commentPattern firstLine abpFilter source="${1}" destination="${2}" # Determine how to parse source file if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then - # Symbols used as comments: "#;@![/" + # Consolidated list parsing: Remove comments and hosts IP's + + # Symbols used as comments: #;@![/ commentPattern="[#;@![\\/]" - # Parse consolidated file by removing comments and hosts IP's # Logic: Process lines which do not begin with comments awk '!/^'"${commentPattern}"'/ { # If there are multiple words seperated by space if (NF>1) { # Remove comments (Inc. prefixed spaces/tabs) - if ($0 ~ /'"${commentPattern}"'/) { gsub("( | )'"${commentPattern}"'.*", "", $0) } + if ($0 ~ /'"${commentPattern}"'/) { gsub("( |\t)'"${commentPattern}"'.*", "", $0) } # Print consecutive domains if ($3) { $1="" + # Remove space which is left in $0 when removing $1 gsub("^ ", "", $0) print $0 # Print single domain } else if ($2) { print $2 } - # Print single domain + # If there are no words seperated by space } else if($1) { print $1 } }' "${source}" 2> /dev/null > "${destination}" else - # Individual file parsing - # Logic: comments are kept, and domains are extracted from each line + # Individual file parsing: Keep comments, while parsing domains from each line read -r firstLine < "${source}" - # Determine how to parse individual source file + # Determine how to parse individual source file formats if [[ "${firstLine,,}" =~ "adblock" ]] || [[ "${firstLine,,}" =~ "ublock" ]] || [[ "${firstLine,,}" =~ "! checksum" ]]; then - # Parse Adblock domains & comments: https://adblockplus.org/filter-cheatsheet + # Logic: Parse Adblock domains & comments: https://adblockplus.org/filter-cheatsheet abpFilter="/^(\\[|!)|^(\\|\\|.*\\^)/" awk ''"${abpFilter}"' { # Remove valid adblock type options @@ -158,12 +281,10 @@ gravity_ParseFileAsDomains() { gsub(/(\|\||\^\$?$)/, "", $0) # Remove lines which are only IPv4 addresses or contain "^/*" if ($0 ~ /(^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$|[\\^\/\*])/) { $0="" } - # Print if not empty if ($0) { print $0 } }' "${source}" 2> /dev/null > "${destination}" - echo -e " ${TICK} Format: Adblock" - elif grep -q -E "^(https?://|([0-9]{1,3}\.){3}[0-9]{1,3}$)" "${source}" &> /dev/null; then - # Parse URLs + elif grep -q -E "^(https?://|([0-9]{1,3}\\.){3}[0-9]{1,3}$)" "${source}" &> /dev/null; then + # Parse URLs if source file contains http:// or IPv4 awk '{ # Remove URL protocol, optional "username:password@", and ":?/;" if ($0 ~ /[:?\/;]/) { gsub(/(^.*:\/\/(.*:.*@)?|[:?\/;].*)/, "", $0) } @@ -171,7 +292,6 @@ gravity_ParseFileAsDomains() { if ($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } if ($0) { print $0 } }' "${source}" 2> /dev/null > "${destination}" - echo -e " ${TICK} Format: URL" else # Keep hosts/domains file in same format as it was downloaded output=$( { mv "${source}" "${destination}"; } 2>&1 ) @@ -186,124 +306,20 @@ gravity_ParseFileAsDomains() { fi } -# Determine output based on gravity_Transport() status -gravity_AdvancedTransport() { - local patternBuffer success error output status - patternBuffer="${1}" - success="${2}" - error="${3}" - - if [[ "${success}" = true ]]; then - if [[ "${error}" == "304" ]]; then - : # Print no output - # Check if the patternbuffer is a non-zero length file - elif [[ -s "${patternBuffer}" ]]; then - # Parse Adblock/URL format blocklists and include comments - # HOSTS format is moved as-is - gravity_ParseFileAsDomains "${patternBuffer}" "${saveLocation}" "1" - else - # Fall back to previously cached list if current $patternBuffer is empty - echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - fi - else - # Determine if cached list exists - if [[ -r "${saveLocation}" ]]; then - echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - else - echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" - fi - fi -} - -# Curl the specified URL with any necessary command extentions -gravity_Transport() { - local url cmd_ext agent - url="${1}" - cmd_ext="${2}" - agent="${3}" - - # Store downloaded content to temp file instead of RAM - patternBuffer=$(mktemp) - heisenbergCompensator="" - if [[ -r "${saveLocation}" ]]; then - # If domain has been saved, add file for date check to only download newer - heisenbergCompensator="-z ${saveLocation}" - fi - - local str="Status:" - echo -ne " ${INFO} ${str} Pending..." - # shellcheck disable=SC2086 - httpCode=$(curl -s -L ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" -A "${agent}" "${url}" -o "${patternBuffer}" 2> /dev/null) - - # Determine "Status:" output based on HTTP response - case "$httpCode" in - "200" ) echo -e "${OVER} ${TICK} ${str} Transport successful"; success=true;; - "304" ) echo -e "${OVER} ${TICK} ${str} No changes detected"; success=true;; - "403" ) echo -e "${OVER} ${CROSS} ${str} Forbidden"; success=false;; - "404" ) echo -e "${OVER} ${CROSS} ${str} Not found"; success=false;; - "408" ) echo -e "${OVER} ${CROSS} ${str} Time-out"; success=false;; - "451" ) echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons"; success=false;; - "521" ) echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)"; success=false;; - "522" ) echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)"; success=false;; - "500" ) echo -e "${OVER} ${CROSS} ${str} Internal Server Error"; success=false;; - * ) echo -e "${OVER} ${CROSS} ${str} Status $httpCode"; success=false;; - esac - - # Output additional info if success=false - gravity_AdvancedTransport "${patternBuffer}" "${success}" "${httpCode}" - - # Delete temp file if it has not been moved - if [[ -f "${patternBuffer}" ]]; then - rm "${patternBuffer}" - fi -} - -# Define User Agent and options for each blocklist -gravity_Pull() { - local agent url domain cmd_ext str - - echo "" - - # Loop through $sources to download each one - for ((i = 0; i < "${#sources[@]}"; i++)); do - url="${sources[$i]}" - domain="${sourceDomains[$i]}" - - # Save the file as list.#.domain - saveLocation="${piholeDir}/list.${i}.${domain}.${domainsExtension}" - activeDomains[$i]="${saveLocation}" - - agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36" - - # Use a case statement to download lists that need special commands - case "${domain}" in - "pgl.yoyo.org") cmd_ext="-d mimetype=plaintext -d hostformat=hosts";; - *) cmd_ext="";; - esac - - if [[ "${skipDownload}" == false ]]; then - str="Target: $domain (${url##*/})" - echo -e " ${INFO} ${str}" - - gravity_Transport "$url" "$cmd_ext" "$agent" "$str" - - echo "" - fi - done -} - -# Consolidate domains to one list and add blacklisted domains +# Create unfiltered "Matter and Light" consolidated list gravity_Schwarzschild() { local str lastLine str="Consolidating blocklists" echo -ne " ${INFO} ${str}..." - # Compile all blacklisted domains into one file and remove CRs - truncate -s 0 "${piholeDir}/${matterAndLight}" + # Empty $matterAndLight if it already exists, otherwise, create it + : > "${piholeDir}/${matterAndLight}" + for i in "${activeDomains[@]}"; do # Only assimilate list if it is available (download might have failed permanently) if [[ -r "${i}" ]]; then + # Compile all blacklisted domains into one file and remove CRs tr -d '\r' < "${i}" >> "${piholeDir}/${matterAndLight}" # Ensure each source blocklist has a final newline @@ -315,39 +331,39 @@ gravity_Schwarzschild() { echo -e "${OVER} ${TICK} ${str}" } -# Append blacklist entries to eventHorizon if they exist -gravity_Blacklist() { - local numBlacklisted plural str +# Parse unfiltered consolidated blocklist into filtered domains-only format +gravity_Filter() { + local str num - if [[ -f "${blacklistFile}" ]]; then - numBlacklisted=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")") - plural=; [[ "${numBlacklisted}" != "1" ]] && plural=s - str="Exact blocked domain${plural}: $numBlacklisted" - echo -e " ${INFO} ${str}" - else - echo -e " ${INFO} Nothing to blacklist!" - fi + str="Extracting domains from blocklists" + echo -ne " ${INFO} ${str}..." + + # Parse files as hosts + gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}" + + num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")") + echo -e "${OVER} ${TICK} ${str} + ${INFO} ${COL_LIGHT_BLUE}${num}${COL_NC} domains being pulled in by gravity" + + gravity_Unique } -# Return number of wildcards in output -gravity_Wildcard() { - local numWildcards plural +# Sort and remove duplicate blacklisted domains +gravity_Unique() { + local str num - if [[ -f "${wildcardFile}" ]]; then - numWildcards=$(grep -c ^ "${wildcardFile}") - if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then - let numWildcards/=2 - fi - plural=; [[ "${numWildcards}" != "1" ]] && plural=s - echo -e " ${INFO} Wildcard blocked domain${plural}: $numWildcards" - else - echo -e " ${INFO} No wildcards used!" - fi + str="Removing duplicate domains" + echo -ne " ${INFO} ${str}..." + sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" + echo -e "${OVER} ${TICK} ${str}" + + num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") + echo -e " ${INFO} ${COL_LIGHT_BLUE}${num}${COL_NC} unique domains trapped in the Event Horizon" } -# Prevent the domains of adlist sources from being blacklisted by other blocklists -gravity_Whitelist() { - local plural str +# Whitelist blocklist domain sources +gravity_WhitelistBLD() { + local plural str uniqDomains echo "" plural=; [[ "${#sources[*]}" != "1" ]] && plural=s @@ -361,17 +377,22 @@ gravity_Whitelist() { ${WHITELIST_COMMAND} -nr -q "${uniqDomains[*]}" > /dev/null echo -e "${OVER} ${TICK} ${str}" +} + +# Whitelist user-defined domains +gravity_Whitelist() { + local plural str num # Test existence of whitelist.txt if [[ -f "${whitelistFile}" ]]; then # Remove anything in whitelist.txt from the Event Horizon - numWhitelisted=$(wc -l < "${whitelistFile}") - plural=; [[ "${numWhitelisted}" != "1" ]] && plural=s - local str="Whitelisting $numWhitelisted domain${plural}" + num=$(wc -l < "${whitelistFile}") + plural=; [[ "${num}" != "1" ]] && plural=s + local str="Whitelisting ${num} domain${plural}" echo -ne " ${INFO} ${str}..." - # Print everything from preEventHorizon into eventHorizon EXCEPT domains in whitelist.txt - grep -F -x -v -f "${whitelistFile}" "${piholeDir}/${preEventHorizon}" > "${piholeDir}/${eventHorizon}" + # Print everything from preEventHorizon into whitelistMatter EXCEPT domains in whitelist.txt + grep -F -x -v -f "${whitelistFile}" "${piholeDir}/${preEventHorizon}" > "${piholeDir}/${whitelistMatter}" echo -e "${OVER} ${TICK} ${str}" else @@ -379,24 +400,40 @@ gravity_Whitelist() { fi } -# Sort and remove duplicate blacklisted domains -gravity_Unique() { - local str numberOf +# Output count of blacklisted domains and wildcards +gravity_ShowBlockCount() { + local num plural str - str="Removing duplicate domains" - echo -ne " ${INFO} ${str}..." - sort -u "${piholeDir}/${supernova}" > "${piholeDir}/${preEventHorizon}" - echo -e "${OVER} ${TICK} ${str}" + if [[ -f "${blacklistFile}" ]]; then + num=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")") + plural=; [[ "${num}" != "1" ]] && plural=s + str="Exact blocked domain${plural}: ${num}" + echo -e " ${INFO} ${str}" + else + echo -e " ${INFO} Nothing to blacklist!" + fi - numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") - echo -e " ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon" + if [[ -f "${wildcardFile}" ]]; then + num=$(grep -c "^" "${wildcardFile}") + # If IPv4 and IPv6 is used, divide total wildcard count by 2 + if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then + num=$(( num/2 )) + fi + plural=; [[ "${num}" != "1" ]] && plural=s + echo -e " ${INFO} Wildcard blocked domain${plural}: ${num}" + else + echo -e " ${INFO} No wildcards used!" + fi } # Parse list of domains into hosts format gravity_ParseDomainsIntoHosts() { if [[ -n "${IPV4_ADDRESS}" ]] || [[ -n "${IPV6_ADDRESS}" ]]; then - awk -v ipv4addr="$IPV4_ADDRESS" -v ipv6addr="$IPV6_ADDRESS" \ - '{sub(/\r$/,""); if(ipv4addr) { print ipv4addr" "$0; }; if(ipv6addr) { print ipv6addr" "$0; }}' >> "${2}" < "${1}" + awk -v ipv4addr="$IPV4_ADDRESS" -v ipv6addr="$IPV6_ADDRESS" '{ + sub(/\r$/, "") + if(ipv4addr) { print ipv4addr" "$0; } + if(ipv6addr) { print ipv6addr" "$0; } + }' >> "${2}" < "${1}" else echo -e "${OVER} ${CROSS} ${str}" echo -e " ${COL_LIGHT_RED}No IP addresses found! Please run 'pihole -r' to reconfigure${COL_NC}\\n" @@ -406,6 +443,8 @@ gravity_ParseDomainsIntoHosts() { # Create "localhost" entries gravity_ParseLocalDomains() { + local hostname + if [[ -f "/etc/hostname" ]]; then hostname=$(< "/etc/hostname") elif command -v hostname &> /dev/null; then @@ -415,23 +454,19 @@ gravity_ParseLocalDomains() { fi echo -e "${hostname}\\npi.hole" > "${localList}.tmp" - - # Copy the file over as /etc/pihole/local.list so dnsmasq can use it - rm "${localList}" 2> /dev/null || \ - echo -e " ${CROSS} Unable to remove ${localList}" gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}" - rm "${localList}.tmp" 2> /dev/null || \ - echo -e " ${CROSS} Unable to remove ${localList}.tmp" } # Create primary blacklist entries gravity_ParseBlacklistDomains() { - # Create $accretionDisc - [[ ! -f "${piholeDir}/${accretionDisc}" ]] && echo "" > "${piholeDir}/${accretionDisc}" + local output status - gravity_ParseDomainsIntoHosts "${piholeDir}/${eventHorizon}" "${piholeDir}/${accretionDisc}" + # Empty $accretionDisc if it already exists, otherwise, create it + : > "${piholeDir}/${accretionDisc}" - # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it + gravity_ParseDomainsIntoHosts "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}" + + # Move the file over as /etc/pihole/gravity.list so dnsmasq can use it output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 ) status="$?" @@ -445,7 +480,6 @@ gravity_ParseBlacklistDomains() { # Create user-added blacklist entries gravity_ParseUserDomains() { if [[ -f "${blacklistFile}" ]]; then - numBlacklisted=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")") gravity_ParseDomainsIntoHosts "${blacklistFile}" "${blackList}.tmp" # Copy the file over as /etc/pihole/black.list so dnsmasq can use it mv "${blackList}.tmp" "${blackList}" 2> /dev/null || \ @@ -455,21 +489,6 @@ gravity_ParseUserDomains() { fi } -# Parse consolidated blocklist into domains-only format -gravity_Advanced() { - local str="Extracting domains from blocklists" - echo -ne " ${INFO} ${str}..." - - # Parse files as Hosts - gravity_ParseFileAsDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${supernova}" - - numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${supernova}")") - echo -e "${OVER} ${TICK} ${str} - ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} domains being pulled in by gravity" - - gravity_Unique -} - # Trap Ctrl-C gravity_Trap() { trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT @@ -479,7 +498,7 @@ gravity_Trap() { gravity_Cleanup() { local error="${1:-}" - str="Cleaning up debris" + str="Cleaning up stray matter" echo -ne " ${INFO} ${str}..." rm ${piholeDir}/pihole.*.txt 2> /dev/null @@ -487,10 +506,8 @@ gravity_Cleanup() { # Remove any unused .domains files for file in ${piholeDir}/*.${domainsExtension}; do - # If list is in active array then leave it (noop) else rm the list - if [[ "${activeDomains[*]}" =~ ${file} ]]; then - : - else + # If list is not in active array, then remove it + if [[ ! "${activeDomains[*]}" =~ ${file} ]]; then rm -f "${file}" 2> /dev/null || \ echo -e " ${CROSS} Failed to remove ${file##*/}" fi @@ -505,27 +522,38 @@ gravity_Cleanup() { "${PIHOLE_COMMAND}" restartdns fi + # Print Pi-hole status if an error occured if [[ -n "$error" ]]; then "${PIHOLE_COMMAND}" status exit 1 fi } +helpFunc() { + echo "Usage: pihole -g +Update domains from blocklists specified in adlists.list + +Options: + -f, --force Force the download of all specified blocklists + -h, --help Show this help dialog" + exit 0 +} + for var in "$@"; do case "${var}" in "-f" | "--force" ) forceDelete=true;; "-h" | "--help" ) helpFunc;; "-sd" | "--skip-download" ) skipDownload=true;; - "-b" | "--blacklist-only" ) blackListOnly=true;; - "-w" | "--wildcard" ) dnsRestart="restart";; + "-b" | "--blacklist-only" ) listType="blacklist";; + "-w" | "--whitelist-only" ) listType="whitelist";; + "-wild" | "--wildcard-only" ) listType="wildcard";; esac done -# Main Gravity Execution gravity_Trap -# Use "force-reload" when restarting dnsmasq for Blacklists and Whitelists -[[ -z "${dnsRestart}" ]] && dnsRestart="force-reload" +# Ensure dnsmasq is restarted when modifying wildcards +[[ "${listType}" == "wildcard" ]] && dnsRestart="restart" if [[ "${forceDelete}" == true ]]; then str="Deleting exising list cache" @@ -535,41 +563,51 @@ if [[ "${forceDelete}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" else echo -e "${OVER} ${CROSS} ${str}" + exit 1 fi fi -# If $blackListOnly is true, only run essential functions -if [[ ! "${blackListOnly}" == true ]]; then +# Determine which functions to run +if [[ "${skipDownload}" == false ]]; then + # Gravity needs to download blocklists + gravity_DNSLookup gravity_Collapse - gravity_Pull - - if [[ "${skipDownload}" == false ]]; then - gravity_Schwarzschild - gravity_Advanced - else - echo -e " ${INFO} Using cached Event Horizon list..." - numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") - echo -e " ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon" - fi + gravity_Supernova + gravity_Schwarzschild + gravity_Filter + gravity_WhitelistBLD +else + # Gravity needs to modify Blacklist/Whitelist/Wildcards + echo -e " ${INFO} Using cached Event Horizon list..." + numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") + echo -e " ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon" +fi +# Perform when downloading blocklists, or modifying the whitelist +if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "whitelist" ]]; then gravity_Whitelist fi -gravity_Blacklist -gravity_Wildcard +gravity_ShowBlockCount -str="Parsing domains into hosts format" -echo -ne " ${INFO} ${str}..." +# Perform when downloading blocklists, or modifying the blacklist +if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "blacklist" ]]; then + str="Parsing domains into hosts format" + echo -ne " ${INFO} ${str}..." -if [[ ! "${blackListOnly}" == true ]]; then - gravity_ParseLocalDomains - gravity_ParseBlacklistDomains -fi + gravity_ParseUserDomains -gravity_ParseUserDomains -echo -e "${OVER} ${TICK} ${str}" + # Perform when downloading blocklists + if [[ ! "${listType}" == "blacklist" ]]; then + gravity_ParseLocalDomains + gravity_ParseBlacklistDomains + fi + + echo -e "${OVER} ${TICK} ${str}" +fi -if [[ ! "${blackListOnly}" == true ]]; then +# Perform when downloading blocklists +if [[ "${skipDownload}" == false ]]; then gravity_Cleanup fi