2015-11-11 08:43:00 +00:00
#!/usr/bin/env bash
2017-07-24 11:24:34 +00:00
# shellcheck disable=SC1090
2015-11-23 10:52:12 +00:00
# Pi-hole: A black hole for Internet advertisements
2017-02-22 17:55:20 +00:00
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
#
2017-07-24 11:24:34 +00:00
# Usage: "pihole -g"
2015-11-06 23:03:55 +00:00
# Compiles a list of ad-serving domains by downloading them from multiple sources
2015-12-06 13:55:50 +00:00
#
2017-02-22 17:55:20 +00:00
# This file is copyright under the latest version of the EUPL.
# Please see LICENSE file for your rights under this license.
2018-02-21 11:33:29 +00:00
export LC_ALL = C
2017-06-21 11:49:05 +00:00
coltable = "/opt/pihole/COL_TABLE"
2017-07-27 02:34:35 +00:00
source " ${ coltable } "
2018-06-17 12:26:57 +00:00
regexconverter = "/opt/pihole/wildcard_regex_converter.sh"
source " ${ regexconverter } "
2019-07-07 08:46:20 +00:00
# shellcheck disable=SC1091
2019-07-07 08:33:08 +00:00
source "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
2016-04-11 10:29:14 +00:00
2017-07-24 11:24:34 +00:00
basename = "pihole"
PIHOLE_COMMAND = " /usr/local/bin/ ${ basename } "
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
piholeDir = " /etc/ ${ basename } "
2019-04-24 17:55:05 +00:00
# Legacy (pre v5.0) list file locations
2017-07-24 11:24:34 +00:00
whitelistFile = " ${ piholeDir } /whitelist.txt "
blacklistFile = " ${ piholeDir } /blacklist.txt "
2018-06-17 12:26:57 +00:00
regexFile = " ${ piholeDir } /regex.list "
2019-04-24 17:55:05 +00:00
adListFile = " ${ piholeDir } /adlists.list "
2016-01-21 22:14:55 +00:00
2017-07-24 11:24:34 +00:00
localList = " ${ piholeDir } /local.list "
2017-09-14 06:39:30 +00:00
VPNList = "/etc/openvpn/ipp.txt"
2016-01-26 20:26:09 +00:00
2019-02-03 12:04:31 +00:00
piholeGitDir = "/etc/.pihole"
gravityDBfile = " ${ piholeDir } /gravity.db "
2020-01-24 17:39:13 +00:00
gravityTEMPfile = " ${ piholeDir } /gravity_temp.db "
2019-02-05 18:05:11 +00:00
gravityDBschema = " ${ piholeGitDir } /advanced/Templates/gravity.db.sql "
2020-01-24 17:39:13 +00:00
gravityDBcopy = " ${ piholeGitDir } /advanced/Templates/gravity_copy.sql "
2019-02-03 12:04:31 +00:00
2017-07-24 11:24:34 +00:00
domainsExtension = "domains"
# Source setupVars from install script
setupVars = " ${ piholeDir } /setupVars.conf "
2017-06-21 11:49:05 +00:00
if [ [ -f " ${ setupVars } " ] ] ; then
2017-07-24 11:24:34 +00:00
source " ${ setupVars } "
# Remove CIDR mask from IPv4/6 addresses
IPV4_ADDRESS = " ${ IPV4_ADDRESS %/* } "
IPV6_ADDRESS = " ${ IPV6_ADDRESS %/* } "
2017-09-15 12:39:17 +00:00
# Determine if IPv4/6 addresses exist
if [ [ -z " ${ IPV4_ADDRESS } " ] ] && [ [ -z " ${ IPV6_ADDRESS } " ] ] ; then
echo -e " ${ COL_LIGHT_RED } No IP addresses found! Please run 'pihole -r' to reconfigure ${ COL_NC } "
exit 1
fi
2016-10-15 19:03:33 +00:00
else
2017-07-24 11:24:34 +00:00
echo -e " ${ COL_LIGHT_RED } Installation Failure: ${ setupVars } does not exist! ${ COL_NC }
Please run 'pihole -r' , and choose the 'reconfigure' option to fix."
2017-06-21 11:49:05 +00:00
exit 1
2016-10-15 19:03:33 +00:00
fi
2018-08-11 23:15:42 +00:00
# Source pihole-FTL from install script
pihole_FTL = " ${ piholeDir } /pihole-FTL.conf "
if [ [ -f " ${ pihole_FTL } " ] ] ; then
source " ${ pihole_FTL } "
2018-09-30 19:00:06 +00:00
fi
if [ [ -z " ${ BLOCKINGMODE } " ] ] ; then
BLOCKINGMODE = "NULL"
2018-08-11 23:15:42 +00:00
fi
2017-09-15 12:39:17 +00:00
# Determine if superseded pihole.conf exists
2017-07-24 11:24:34 +00:00
if [ [ -r " ${ piholeDir } /pihole.conf " ] ] ; then
echo -e " ${ COL_LIGHT_RED } Ignoring overrides specified within pihole.conf! ${ COL_NC } "
2015-11-06 23:03:55 +00:00
fi
2015-11-26 23:48:52 +00:00
2019-02-03 12:04:31 +00:00
# Generate new sqlite3 file from schema template
generate_gravity_database( ) {
2020-01-24 17:39:13 +00:00
sqlite3 " ${ 1 } " < " ${ gravityDBschema } "
2019-02-03 12:04:31 +00:00
}
2020-01-24 17:39:13 +00:00
# Copy data from old to new database file and swap them
gravity_swap_databases( ) {
local str
str = "Building tree"
echo -ne " ${ INFO } ${ str } ... "
# The index is intentionally not UNIQUE as prro quality adlists may contain domains more than once
output = $( { sqlite3 " ${ gravityTEMPfile } " "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);" ; } 2>& 1 )
2019-09-01 12:42:07 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 17:39:13 +00:00
echo -e " \\n ${ CROSS } Unable to build gravity tree in ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
2019-09-01 12:42:07 +00:00
fi
2020-01-24 17:39:13 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2019-09-01 12:42:07 +00:00
2020-01-24 17:39:13 +00:00
str = "Swapping databases"
echo -ne " ${ INFO } ${ str } ... "
2019-02-03 12:21:26 +00:00
2020-01-24 17:39:13 +00:00
output = $( { sqlite3 " ${ gravityTEMPfile } " < " ${ gravityDBcopy } " ; } 2>& 1 )
2019-02-03 12:21:26 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 17:39:13 +00:00
echo -e " \\n ${ CROSS } Unable to copy data from ${ gravityDBfile } to ${ gravityTEMPfile } \\n ${ output } "
return 1
fi
echo -e " ${ OVER } ${ TICK } ${ str } "
# Swap databases and remove old database
rm " ${ gravityDBfile } "
mv " ${ gravityTEMPfile } " " ${ gravityDBfile } "
}
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp( ) {
2020-02-12 18:26:25 +00:00
output = $( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 " ${ gravityDBfile } " ; } 2>& 1 )
2020-01-24 17:39:13 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-02-12 18:26:25 +00:00
echo -e " \\n ${ CROSS } Unable to update gravity timestamp in database ${ gravityDBfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file( ) {
# Define locals
2020-01-24 17:39:13 +00:00
local table source backup_path backup_file tmpFile type
2019-09-04 21:14:29 +00:00
table = " ${ 1 } "
source = " ${ 2 } "
backup_path = " ${ piholeDir } /migration_backup "
backup_file = " ${ backup_path } / $( basename " ${ 2 } " ) "
2019-02-06 18:09:09 +00:00
tmpFile = " $( mktemp -p "/tmp" --suffix= ".gravity" ) "
2020-01-24 17:39:13 +00:00
2019-02-22 21:46:19 +00:00
local timestamp
timestamp = " $( date --utc +'%s' ) "
2020-01-24 17:39:13 +00:00
2019-09-04 21:14:29 +00:00
local rowid
declare -i rowid
rowid = 1
2019-12-09 22:30:41 +00:00
2020-01-24 17:39:13 +00:00
# Special handling for domains to be imported into the common domainlist table
if [ [ " ${ table } " = = "whitelist" ] ] ; then
type = "0"
table = "domainlist"
elif [ [ " ${ table } " = = "blacklist" ] ] ; then
type = "1"
table = "domainlist"
elif [ [ " ${ table } " = = "regex" ] ] ; then
type = "3"
table = "domainlist"
2019-12-09 22:30:41 +00:00
fi
2019-09-04 21:14:29 +00:00
2020-01-24 17:39:13 +00:00
# Get MAX(id) from domainlist when INSERTing into this table
if [ [ " ${ table } " = = "domainlist" ] ] ; then
rowid = " $( sqlite3 " ${ gravityDBfile } " "SELECT MAX(id) FROM domainlist;" ) "
if [ [ -z " $rowid " ] ] ; then
rowid = 0
fi
rowid += 1
fi
# Loop over all domains in ${source} file
# Read file line by line
grep -v '^ *#' < " ${ source } " | while IFS = read -r domain
do
# Only add non-empty lines
if [ [ -n " ${ domain } " ] ] ; then
if [ [ " ${ table } " = = "domain_audit" ] ] ; then
# domain_audit table format (no enable or modified fields)
echo " ${ rowid } ,\" ${ domain } \", ${ timestamp } " >> " ${ tmpFile } "
elif [ [ " ${ table } " = = "adlist" ] ] ; then
# Adlist table format
echo " ${ rowid } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ source } \" " >> " ${ tmpFile } "
else
# White-, black-, and regexlist table format
echo " ${ rowid } , ${ type } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ source } \" " >> " ${ tmpFile } "
fi
rowid += 1
fi
done
2019-12-12 10:13:51 +00:00
2019-04-24 17:55:05 +00:00
# Store domains in database table specified by ${table}
2019-02-06 17:57:48 +00:00
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
2020-01-24 17:39:13 +00:00
output = $( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" " ${ tmpFile } " " ${ table } " | sqlite3 " ${ gravityDBfile } " ; } 2>& 1 )
2019-02-03 12:21:26 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 17:39:13 +00:00
echo -e " \\n ${ CROSS } Unable to fill table ${ table } ${ type } in database ${ gravityDBfile } \\n ${ output } "
2019-02-03 12:21:26 +00:00
gravity_Cleanup "error"
fi
2019-04-28 20:15:47 +00:00
# Move source file to backup directory, create directory if not existing
mkdir -p " ${ backup_path } "
mv " ${ source } " " ${ backup_file } " 2> /dev/null || \
2019-04-29 15:19:02 +00:00
echo -e " ${ CROSS } Unable to backup ${ source } to ${ backup_path } "
2020-01-24 17:39:13 +00:00
# Delete tmpFile
rm " ${ tmpFile } " > /dev/null 2>& 1 || \
echo -e " ${ CROSS } Unable to remove ${ tmpFile } "
2019-02-03 12:21:26 +00:00
}
2020-07-13 18:47:12 +00:00
# Update timestamp of last update of this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_updated( ) {
output = $( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" " ${ 1 } " | sqlite3 " ${ gravityDBfile } " ; } 2>& 1 )
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to update timestamp of adlist with ID ${ 1 } in database ${ gravityDBfile } \\n ${ output } "
gravity_Cleanup "error"
fi
}
2019-04-25 08:46:18 +00:00
# Migrate pre-v5.0 list files to database-based Pi-hole versions
2019-04-24 17:55:05 +00:00
migrate_to_database( ) {
2019-05-01 19:12:22 +00:00
# Create database file only if not present
2019-07-05 12:03:57 +00:00
if [ ! -e " ${ gravityDBfile } " ] ; then
2019-07-07 08:33:08 +00:00
# Create new database file - note that this will be created in version 1
2019-07-05 12:03:57 +00:00
echo -e " ${ INFO } Creating new gravity database "
2020-01-24 17:39:13 +00:00
generate_gravity_database " ${ gravityDBfile } "
# Check if gravity database needs to be updated
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2019-07-07 08:33:08 +00:00
# Migrate list files to new database
if [ -e " ${ adListFile } " ] ; then
# Store adlist domains in database
echo -e " ${ INFO } Migrating content of ${ adListFile } into new database "
database_table_from_file "adlist" " ${ adListFile } "
fi
if [ -e " ${ blacklistFile } " ] ; then
# Store blacklisted domains in database
echo -e " ${ INFO } Migrating content of ${ blacklistFile } into new database "
database_table_from_file "blacklist" " ${ blacklistFile } "
fi
if [ -e " ${ whitelistFile } " ] ; then
# Store whitelisted domains in database
echo -e " ${ INFO } Migrating content of ${ whitelistFile } into new database "
database_table_from_file "whitelist" " ${ whitelistFile } "
fi
if [ -e " ${ regexFile } " ] ; then
# Store regex domains in database
2019-07-08 19:39:30 +00:00
# Important note: We need to add the domains to the "regex" table
# as it will only later be renamed to "regex_blacklist"!
2019-07-07 08:33:08 +00:00
echo -e " ${ INFO } Migrating content of ${ regexFile } into new database "
2019-07-08 19:39:30 +00:00
database_table_from_file "regex" " ${ regexFile } "
2019-07-07 08:33:08 +00:00
fi
2019-05-01 19:12:22 +00:00
fi
2019-07-05 12:03:57 +00:00
# Check if gravity database needs to be updated
2019-07-09 09:41:44 +00:00
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2018-08-13 11:43:14 +00:00
}
2017-09-15 12:39:17 +00:00
# Determine if DNS resolution is available before proceeding
2018-01-14 20:38:39 +00:00
gravity_CheckDNSResolutionAvailable( ) {
2017-12-16 13:55:52 +00:00
local lookupDomain = "pi.hole"
2017-07-24 11:24:34 +00:00
2019-05-27 17:27:28 +00:00
# Determine if $localList does not exist, and ensure it is not empty
if [ [ ! -e " ${ localList } " ] ] || [ [ -s " ${ localList } " ] ] ; then
2017-09-14 06:39:30 +00:00
lookupDomain = "raw.githubusercontent.com"
fi
2017-09-15 12:39:17 +00:00
# Determine if $lookupDomain is resolvable
2019-11-26 09:58:39 +00:00
if timeout 4 getent hosts " ${ lookupDomain } " & > /dev/null; then
2017-09-15 12:39:17 +00:00
# Print confirmation of resolvability if it had previously failed
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
2017-07-27 02:34:35 +00:00
fi
2017-09-15 12:39:17 +00:00
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-12-08 04:38:47 +00:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
exit 1
fi
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
2020-03-08 23:53:14 +00:00
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
2017-12-08 04:38:47 +00:00
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
2019-11-26 09:58:39 +00:00
if timeout 4 dig +short " ${ lookupDomain } " & > /dev/null; then
2017-12-08 04:38:47 +00:00
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
fi
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-09-21 17:56:53 +00:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
2017-09-15 12:39:17 +00:00
exit 1
fi
2017-07-27 02:34:35 +00:00
2017-09-15 12:39:17 +00:00
# Determine error output message
2020-07-21 19:39:59 +00:00
if pgrep pihole-FTL & > /dev/null; then
2017-09-15 12:39:17 +00:00
echo -e " ${ CROSS } DNS resolution is currently unavailable "
else
echo -e " ${ CROSS } DNS service is not running "
" ${ PIHOLE_COMMAND } " restartdns
2017-07-27 02:34:35 +00:00
fi
2017-09-15 12:39:17 +00:00
2017-09-21 17:56:53 +00:00
# Ensure DNS server is given time to be resolvable
secs = "120"
2017-12-16 13:55:52 +00:00
echo -ne " ${ INFO } Time until retry: ${ secs } "
2017-09-21 17:56:53 +00:00
until timeout 1 getent hosts " ${ lookupDomain } " & > /dev/null; do
[ [ " ${ secs :- } " -eq 0 ] ] && break
2017-12-16 13:55:52 +00:00
echo -ne " ${ OVER } ${ INFO } Time until retry: ${ secs } "
2017-09-15 12:39:17 +00:00
: $(( secs--))
2017-09-21 17:56:53 +00:00
sleep 1
2017-09-15 12:39:17 +00:00
done
# Try again
2018-01-14 20:38:39 +00:00
gravity_CheckDNSResolutionAvailable
2017-07-24 11:24:34 +00:00
}
2017-03-31 18:16:09 +00:00
2019-06-30 21:21:10 +00:00
# Retrieve blocklist URLs and parse domains from adlist.list
2019-09-04 21:14:29 +00:00
gravity_DownloadBlocklists( ) {
2017-09-15 12:39:17 +00:00
echo -e " ${ INFO } ${ COL_BOLD } Neutrino emissions detected ${ COL_NC } ... "
2017-03-31 18:16:09 +00:00
2019-04-25 08:46:18 +00:00
# Retrieve source URLs from gravity database
# We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true)
2019-06-30 21:21:10 +00:00
mapfile -t sources <<< " $( sqlite3 " ${ gravityDBfile } " "SELECT address FROM vw_adlist;" 2> /dev/null) "
2019-09-04 21:14:29 +00:00
mapfile -t sourceIDs <<< " $( sqlite3 " ${ gravityDBfile } " "SELECT id FROM vw_adlist;" 2> /dev/null) "
2017-07-24 11:24:34 +00:00
# Parse source domains from $sources
2017-09-20 12:25:33 +00:00
mapfile -t sourceDomains <<< " $(
2017-09-15 12:39:17 +00:00
# Logic: Split by folder/port
2017-07-24 11:24:34 +00:00
awk -F '[/:]' ' {
2017-09-15 12:39:17 +00:00
# Remove URL protocol & optional username:password@
2017-11-21 19:55:47 +00:00
gsub( /( .*:\/ \/ | .*:.*@) /, "" , $0 )
2017-11-21 17:30:40 +00:00
if ( length( $1 ) >0) { print $1 }
else { print "local" }
2017-07-24 11:24:34 +00:00
} ' <<< "$(printf ' %s\n ' " ${ sources [@] } " ) " 2> /dev/null
2017-09-20 12:25:33 +00:00
) "
2017-07-24 11:24:34 +00:00
2018-04-30 20:45:03 +00:00
local str = "Pulling blocklist source list into range"
2017-07-27 02:34:35 +00:00
if [ [ -n " ${ sources [*] } " ] ] && [ [ -n " ${ sourceDomains [*] } " ] ] ; then
2017-07-24 11:24:34 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
else
echo -e " ${ OVER } ${ CROSS } ${ str } "
2018-04-30 20:45:03 +00:00
echo -e " ${ INFO } No source list found, or it is empty "
echo ""
2019-04-26 15:39:35 +00:00
return 1
2017-07-24 11:24:34 +00:00
fi
2020-08-03 20:46:14 +00:00
local url domain agent cmd_ext str target compression
2017-07-27 02:34:35 +00:00
echo ""
2020-01-24 17:39:13 +00:00
# Prepare new gravity database
str = "Preparing new gravity database"
2019-09-04 21:14:29 +00:00
echo -ne " ${ INFO } ${ str } ... "
2020-01-24 17:39:13 +00:00
rm " ${ gravityTEMPfile } " > /dev/null 2>& 1
output = $( { sqlite3 " ${ gravityTEMPfile } " < " ${ gravityDBschema } " ; } 2>& 1 )
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to create new database ${ gravityTEMPfile } \\n ${ output } "
gravity_Cleanup "error"
else
2019-09-04 21:14:29 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2020-01-24 17:39:13 +00:00
target = " $( mktemp -p "/tmp" --suffix= ".gravity" ) "
2020-08-03 20:46:14 +00:00
# Use compression to reduce the amount of data that is transfered
# between the Pi-hole and the ad list provider. Use this feature
# only if it is supported by the locally available version of curl
if curl -V | grep -q "Features:.* libz" ; then
compression = "--compressed"
echo -e " ${ INFO } Using libz compression\n "
else
compression = ""
echo -e " ${ INFO } Libz compression not available\n "
fi
2017-09-15 12:39:17 +00:00
# Loop through $sources and download each one
2017-07-27 02:34:35 +00:00
for ( ( i = 0; i < " ${# sources [@] } " ; i++) ) ; do
url = " ${ sources [ $i ] } "
domain = " ${ sourceDomains [ $i ] } "
2020-07-05 12:32:33 +00:00
id = " ${ sourceIDs [ $i ] } "
2017-07-27 02:34:35 +00:00
# Save the file as list.#.domain
2020-07-05 12:32:33 +00:00
saveLocation = " ${ piholeDir } /list. ${ id } . ${ domain } . ${ domainsExtension } "
2017-07-27 02:34:35 +00:00
activeDomains[ $i ] = " ${ saveLocation } "
2017-08-28 01:36:02 +00:00
# Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-)
2018-12-06 17:04:17 +00:00
agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
2017-07-27 02:34:35 +00:00
2017-08-28 01:36:02 +00:00
# Provide special commands for blocklists which may need them
2017-07-27 02:34:35 +00:00
case " ${ domain } " in
"pgl.yoyo.org" ) cmd_ext = "-d mimetype=plaintext -d hostformat=hosts" ; ;
*) cmd_ext = "" ; ;
esac
2019-12-12 10:29:44 +00:00
echo -e " ${ INFO } Target: ${ url } "
2020-03-31 20:48:10 +00:00
local regex
# Check for characters NOT allowed in URLs
2020-05-26 13:53:01 +00:00
regex = "[^a-zA-Z0-9:/?&%=~._()-;]"
2020-03-31 20:48:10 +00:00
if [ [ " ${ url } " = ~ ${ regex } ] ] ; then
echo -e " ${ CROSS } Invalid Target "
else
2020-08-03 20:46:14 +00:00
gravity_DownloadBlocklistFromUrl " ${ url } " " ${ cmd_ext } " " ${ agent } " " ${ sourceIDs [ $i ] } " " ${ saveLocation } " " ${ target } " " ${ compression } "
2020-03-31 20:48:10 +00:00
fi
2019-04-25 09:18:54 +00:00
echo ""
2017-07-27 02:34:35 +00:00
done
2020-01-24 17:39:13 +00:00
str = "Storing downloaded domains in new gravity database"
echo -ne " ${ INFO } ${ str } ... "
output = $( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" " ${ target } " | sqlite3 " ${ gravityTEMPfile } " ; } 2>& 1 )
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to fill gravity table in database ${ gravityTEMPfile } \\n ${ output } "
gravity_Cleanup "error"
else
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2020-02-21 18:56:48 +00:00
if [ [ " ${ status } " -eq 0 && -n " ${ output } " ] ] ; then
2020-02-21 20:41:28 +00:00
echo -e " Encountered non-critical SQL warnings. Please check the suitability of the lists you're using!\\n\\n SQL warnings:"
local warning file line lineno
while IFS = read -r line; do
echo " - ${ line } "
warning = " $( grep -oh "^[^:]*:[0-9]*" <<< " ${ line } " ) "
file = " ${ warning % : * } "
lineno = " ${ warning #* : } "
if [ [ -n " ${ file } " && -n " ${ lineno } " ] ] ; then
echo -n " Line contains: "
2020-02-22 14:17:24 +00:00
awk " NR== ${ lineno } " < " ${ file } "
2020-02-21 20:41:28 +00:00
fi
done <<< " ${ output } "
echo ""
2020-02-21 18:56:48 +00:00
fi
2020-01-24 17:39:13 +00:00
rm " ${ target } " > /dev/null 2>& 1 || \
echo -e " ${ CROSS } Unable to remove ${ target } "
2017-09-23 00:32:56 +00:00
gravity_Blackbody = true
2017-07-27 02:34:35 +00:00
}
2020-02-22 12:00:38 +00:00
total_num = 0
2020-02-21 18:56:48 +00:00
parseList( ) {
2020-02-22 12:00:38 +00:00
local adlistID = " ${ 1 } " src = " ${ 2 } " target = " ${ 3 } " incorrect_lines
2020-02-22 14:17:24 +00:00
# This sed does the following things:
# 1. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_)
# 2. Append ,adlistID to every line
# 3. Ensures there is a newline on the last line
sed -e " /[^a-zA-Z0-9.\_-]/d;s/ $/, ${ adlistID } /;/. $/a\\ " " ${ src } " >> " ${ target } "
# Find (up to) five domains containing invalid characters (see above)
2020-02-22 12:00:38 +00:00
incorrect_lines = " $( sed -e "/[^a-zA-Z0-9.\_-]/!d" " ${ src } " | head -n 5) "
2020-02-23 21:50:06 +00:00
local num_lines num_target_lines num_correct_lines num_invalid
2020-02-22 14:17:24 +00:00
# Get number of lines in source file
2020-02-22 12:00:38 +00:00
num_lines = " $( grep -c "^" " ${ src } " ) "
2020-02-22 14:17:24 +00:00
# Get number of lines in destination file
2020-02-22 12:00:38 +00:00
num_target_lines = " $( grep -c "^" " ${ target } " ) "
num_correct_lines = " $(( num_target_lines-total_num )) "
total_num = " $num_target_lines "
2020-02-23 21:50:06 +00:00
num_invalid = " $(( num_lines-num_correct_lines )) "
2020-02-24 06:06:15 +00:00
if [ [ " ${ num_invalid } " -eq 0 ] ] ; then
echo " ${ INFO } Received ${ num_lines } domains "
else
echo " ${ INFO } Received ${ num_lines } domains, ${ num_invalid } domains invalid! "
fi
2020-02-22 12:00:38 +00:00
2020-02-22 14:17:24 +00:00
# Display sample of invalid lines if we found some
2020-02-22 12:00:38 +00:00
if [ [ -n " ${ incorrect_lines } " ] ] ; then
2020-02-23 21:50:06 +00:00
echo " Sample of invalid domains:"
2020-02-22 12:00:38 +00:00
while IFS = read -r line; do
echo " - ${ line } "
done <<< " ${ incorrect_lines } "
fi
2020-02-21 18:56:48 +00:00
}
2017-09-14 10:23:49 +00:00
# Download specified URL and perform checks on HTTP status and file content
2018-01-14 20:38:39 +00:00
gravity_DownloadBlocklistFromUrl( ) {
2020-08-03 20:46:14 +00:00
local url = " ${ 1 } " cmd_ext = " ${ 2 } " agent = " ${ 3 } " adlistID = " ${ 4 } " saveLocation = " ${ 5 } " target = " ${ 6 } " compression = " ${ 7 } "
local heisenbergCompensator = "" patternBuffer str httpCode success = ""
2017-07-27 02:34:35 +00:00
2017-09-15 12:39:17 +00:00
# Create temp file to store content on disk instead of RAM
patternBuffer = $( mktemp -p "/tmp" --suffix= ".phgpb" )
2017-08-28 01:36:02 +00:00
2017-09-15 12:39:17 +00:00
# Determine if $saveLocation has read permission
2017-11-21 17:35:58 +00:00
if [ [ -r " ${ saveLocation } " && $url != "file" * ] ] ; then
2017-09-15 12:39:17 +00:00
# Have curl determine if a remote file has been modified since last retrieval
# Uses "Last-Modified" header, which certain web servers do not provide (e.g: raw github urls)
2017-11-21 17:35:58 +00:00
# Note: Don't do this for local files, always download them
2017-07-27 02:34:35 +00:00
heisenbergCompensator = " -z ${ saveLocation } "
fi
str = "Status:"
echo -ne " ${ INFO } ${ str } Pending... "
2018-08-11 23:15:42 +00:00
blocked = false
case $BLOCKINGMODE in
"IP-NODATA-AAAA" | "IP" )
if [ [ $( dig " ${ domain } " +short | grep " ${ IPV4_ADDRESS } " -c) -ge 1 ] ] ; then
blocked = true
fi ; ;
"NXDOMAIN" )
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
fi ; ;
2018-09-30 19:00:06 +00:00
"NULL" | *)
2018-08-11 23:15:42 +00:00
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
fi ; ;
esac
2018-09-30 19:00:06 +00:00
if [ [ " ${ blocked } " = = true ] ] ; then
2018-11-13 01:21:34 +00:00
printf -v ip_addr "%s" " ${ PIHOLE_DNS_1 %#* } "
2018-11-13 01:15:24 +00:00
if [ [ ${ PIHOLE_DNS_1 } != *"#" * ] ] ; then
port = 53
else
2018-11-13 01:21:34 +00:00
printf -v port "%s" " ${ PIHOLE_DNS_1 #*# } "
2018-11-13 01:15:24 +00:00
fi
2019-08-04 19:21:08 +00:00
ip = $( dig " @ ${ ip_addr } " -p " ${ port } " +short " ${ domain } " | tail -1)
2018-08-11 23:15:42 +00:00
if [ [ $( echo " ${ url } " | awk -F '://' '{print $1}' ) = "https" ] ] ; then
2018-08-11 12:33:33 +00:00
port = 443;
2018-08-11 23:15:42 +00:00
else port = 80
2018-08-11 12:33:33 +00:00
fi
2019-05-04 18:35:21 +00:00
bad_list = $( pihole -q -adlist " ${ domain } " | head -n1 | awk -F 'Match found in ' '{print $2}' )
2018-09-30 19:00:06 +00:00
echo -e " ${ OVER } ${ CROSS } ${ str } ${ domain } is blocked by ${ bad_list % : } . Using DNS on ${ PIHOLE_DNS_1 } to download ${ url } " ;
2018-08-11 12:33:33 +00:00
echo -ne " ${ INFO } ${ str } Pending... "
cmd_ext = " --resolve $domain : $port : $ip $cmd_ext "
fi
2019-08-10 11:33:30 +00:00
2017-07-27 02:34:35 +00:00
# shellcheck disable=SC2086
2019-08-10 11:33:30 +00:00
httpCode = $( curl -s -L ${ compression } ${ cmd_ext } ${ heisenbergCompensator } -w "%{http_code}" -A " ${ agent } " " ${ url } " -o " ${ patternBuffer } " 2> /dev/null)
2017-07-27 02:34:35 +00:00
2017-11-21 17:30:40 +00:00
case $url in
2018-07-02 09:59:22 +00:00
# Did we "download" a local file?
"file" *)
if [ [ -s " ${ patternBuffer } " ] ] ; then
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful " ; success = true
else
echo -e " ${ OVER } ${ CROSS } ${ str } Not found / empty list "
fi ; ;
2017-11-21 17:30:40 +00:00
# Did we "download" a remote file?
2018-07-02 09:59:22 +00:00
*)
2017-11-21 17:30:40 +00:00
# Determine "Status:" output based on HTTP response
case " ${ httpCode } " in
"200" ) echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful " ; success = true; ;
"304" ) echo -e " ${ OVER } ${ TICK } ${ str } No changes detected " ; success = true; ;
"000" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Refused " ; ;
"403" ) echo -e " ${ OVER } ${ CROSS } ${ str } Forbidden " ; ;
"404" ) echo -e " ${ OVER } ${ CROSS } ${ str } Not found " ; ;
"408" ) echo -e " ${ OVER } ${ CROSS } ${ str } Time-out " ; ;
"451" ) echo -e " ${ OVER } ${ CROSS } ${ str } Unavailable For Legal Reasons " ; ;
"500" ) echo -e " ${ OVER } ${ CROSS } ${ str } Internal Server Error " ; ;
"504" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Gateway) " ; ;
"521" ) echo -e " ${ OVER } ${ CROSS } ${ str } Web Server Is Down (Cloudflare) " ; ;
"522" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Cloudflare) " ; ;
2018-07-02 09:59:22 +00:00
* ) echo -e " ${ OVER } ${ CROSS } ${ str } ${ url } ( ${ httpCode } ) " ; ;
2017-11-21 17:30:40 +00:00
esac ; ;
2017-07-27 02:34:35 +00:00
esac
# Determine if the blocklist was downloaded and saved correctly
2017-09-15 12:39:17 +00:00
if [ [ " ${ success } " = = true ] ] ; then
2017-07-27 02:34:35 +00:00
if [ [ " ${ httpCode } " = = "304" ] ] ; then
2020-01-24 17:39:13 +00:00
# Add domains to database table file
2020-02-21 18:56:48 +00:00
parseList " ${ adlistID } " " ${ saveLocation } " " ${ target } "
2017-09-14 10:23:49 +00:00
# Check if $patternbuffer is a non-zero length file
2017-07-27 02:34:35 +00:00
elif [ [ -s " ${ patternBuffer } " ] ] ; then
# Determine if blocklist is non-standard and parse as appropriate
gravity_ParseFileIntoDomains " ${ patternBuffer } " " ${ saveLocation } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
parseList " ${ adlistID } " " ${ saveLocation } " " ${ target } "
2020-07-13 18:47:12 +00:00
# Update date_updated field in gravity database table
database_adlist_updated " ${ adlistID } "
2017-07-27 02:34:35 +00:00
else
2017-09-14 10:23:49 +00:00
# Fall back to previously cached list if $patternBuffer is empty
2017-11-21 17:35:58 +00:00
echo -e " ${ INFO } Received empty file: ${ COL_LIGHT_GREEN } using previously cached list ${ COL_NC } "
2017-07-27 02:34:35 +00:00
fi
else
2017-09-15 12:39:17 +00:00
# Determine if cached list has read permission
2017-07-27 02:34:35 +00:00
if [ [ -r " ${ saveLocation } " ] ] ; then
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_GREEN } using previously cached list ${ COL_NC } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
parseList " ${ adlistID } " " ${ saveLocation } " " ${ target } "
2017-07-27 02:34:35 +00:00
else
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_RED } no cached list available ${ COL_NC } "
fi
fi
}
2017-09-15 12:39:17 +00:00
# Parse source files into domains format
2017-07-27 02:34:35 +00:00
gravity_ParseFileIntoDomains( ) {
2019-12-09 21:35:54 +00:00
local source = " ${ 1 } " destination = " ${ 2 } " firstLine
2017-07-24 11:24:34 +00:00
2017-09-15 12:39:17 +00:00
# Determine if we are parsing a consolidated list
2019-09-04 21:14:29 +00:00
#if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
2017-12-12 12:36:09 +00:00
# Remove comments and print only the domain name
2020-03-08 23:53:14 +00:00
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contiguous
2017-12-12 12:36:09 +00:00
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
2019-01-19 16:48:29 +00:00
# 1) Remove carriage returns
# 2) Convert all characters to lowercase
2020-03-31 18:36:40 +00:00
# 3) Remove comments (text starting with "#", include possible spaces before the hash sign)
# 4) Remove lines containing "/"
# 5) Remove leading tabs, spaces, etc.
# 6) Delete lines not matching domain names
2019-02-23 21:42:15 +00:00
< " ${ source } " tr -d '\r' | \
2019-01-20 15:04:07 +00:00
tr '[:upper:]' '[:lower:]' | \
2020-03-31 18:36:40 +00:00
sed 's/\s*#.*//g' | \
sed -r '/(\/).*$/d' | \
2019-01-20 15:04:07 +00:00
sed -r 's/^.*\s+//g' | \
2019-02-23 21:42:15 +00:00
sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > " ${ destination } "
2019-05-10 09:13:19 +00:00
chmod 644 " ${ destination } "
2017-09-15 12:39:17 +00:00
return 0
2019-09-04 21:14:29 +00:00
#fi
2017-09-15 12:39:17 +00:00
# Individual file parsing: Keep comments, while parsing domains from each line
# We keep comments to respect the list maintainer's licensing
read -r firstLine < " ${ source } "
# Determine how to parse individual source file formats
if [ [ " ${ firstLine ,, } " = ~ ( adblock| ublock| ^!) ] ] ; then
# Compare $firstLine against lower case words found in Adblock lists
2019-09-03 22:52:35 +00:00
echo -e " ${ CROSS } Format: Adblock (list type not supported) "
2017-09-15 12:39:17 +00:00
elif grep -q "^address=/" " ${ source } " & > /dev/null; then
2017-09-14 10:23:49 +00:00
# Parse Dnsmasq format lists
2017-09-18 07:36:03 +00:00
echo -e " ${ CROSS } Format: Dnsmasq (list type not supported) "
elif grep -q -E "^https?://" " ${ source } " & > /dev/null; then
# Parse URL list if source file contains "http://" or "https://"
2017-09-15 12:39:17 +00:00
# Scanning for "^IPv4$" is too slow with large (1M) lists on low-end hardware
2017-09-18 07:36:03 +00:00
echo -ne " ${ INFO } Format: URL "
2017-09-15 12:39:17 +00:00
2018-03-07 05:44:29 +00:00
awk '
2018-03-07 05:48:12 +00:00
# Remove URL scheme, optional "username:password@", and ":?/;"
# The scheme must be matched carefully to avoid blocking the wrong URL
# in cases like:
# http://www.evil.com?http://www.good.com
# See RFC 3986 section 3.1 for details.
/[ :?\/ ; ] / { gsub( /( ^[ a-zA-Z] [ a-zA-Z0-9+.-] *:\/ \/ ( .*:.*@) ?| [ :?\/ ; ] .*) /, "" , $0 ) }
2018-03-07 05:44:29 +00:00
# Skip lines which are only IPv4 addresses
/^[ 0-9] +\. [ 0-9] +\. [ 0-9] +\. [ 0-9] +$/ { next }
# Print if nonempty
2018-03-07 05:48:12 +00:00
length { print }
2018-03-07 05:44:29 +00:00
' " ${ source } " 2> /dev/null > " ${ destination } "
2019-05-12 09:25:48 +00:00
chmod 644 " ${ destination } "
2017-09-18 07:36:03 +00:00
echo -e " ${ OVER } ${ TICK } Format: URL "
2017-09-15 12:39:17 +00:00
else
# Default: Keep hosts/domains file in same format as it was downloaded
output = $( { mv " ${ source } " " ${ destination } " ; } 2>& 1 )
2019-05-12 09:25:48 +00:00
chmod 644 " ${ destination } "
2017-09-15 12:39:17 +00:00
if [ [ ! -e " ${ destination } " ] ] ; then
2017-09-24 01:57:15 +00:00
echo -e " \\n ${ CROSS } Unable to move tmp file to ${ piholeDir }
2017-09-15 12:39:17 +00:00
${ output } "
gravity_Cleanup "error"
2017-03-31 18:16:09 +00:00
fi
2017-07-24 11:24:34 +00:00
fi
2015-11-23 08:36:01 +00:00
}
2015-11-23 07:49:38 +00:00
2019-04-25 09:18:54 +00:00
# Report number of entries in a table
gravity_Table_Count( ) {
local table = " ${ 1 } "
local str = " ${ 2 } "
2017-12-16 13:55:52 +00:00
local num
2019-12-04 21:02:46 +00:00
num = " $( sqlite3 " ${ gravityDBfile } " " SELECT COUNT(*) FROM ${ table } ; " ) "
if [ [ " ${ table } " = = "vw_gravity" ] ] ; then
local unique
unique = " $( sqlite3 " ${ gravityDBfile } " " SELECT COUNT(DISTINCT domain) FROM ${ table } ; " ) "
2020-04-21 07:10:21 +00:00
echo -e " ${ INFO } Number of ${ str } : ${ num } ( ${ COL_BOLD } ${ unique } unique domains ${ COL_NC } ) "
2020-02-02 22:46:33 +00:00
sqlite3 " ${ gravityDBfile } " " INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count', ${ unique } ); "
2019-12-04 21:02:46 +00:00
else
echo -e " ${ INFO } Number of ${ str } : ${ num } "
fi
2019-04-25 09:18:54 +00:00
}
2017-06-21 11:49:05 +00:00
2019-04-25 09:18:54 +00:00
# Output count of blacklisted domains and regex filters
gravity_ShowCount( ) {
2019-12-04 21:02:46 +00:00
gravity_Table_Count "vw_gravity" "gravity domains" ""
gravity_Table_Count "vw_blacklist" "exact blacklisted domains"
gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters"
gravity_Table_Count "vw_whitelist" "exact whitelisted domains"
gravity_Table_Count "vw_regex_whitelist" "regex whitelist filters"
2015-11-23 11:11:16 +00:00
}
2015-11-26 23:48:52 +00:00
2017-07-24 11:24:34 +00:00
# Parse list of domains into hosts format
gravity_ParseDomainsIntoHosts( ) {
2017-09-15 12:39:17 +00:00
awk -v ipv4 = " $IPV4_ADDRESS " -v ipv6 = " $IPV6_ADDRESS " ' {
# Remove windows CR line endings
sub( /\r $/, "" )
# Parse each line as "ipaddr domain"
if ( ipv6 && ipv4) {
print ipv4" " $0 "\n" ipv6" " $0
} else if ( !ipv6) {
print ipv4" " $0
} else {
print ipv6" " $0
}
} ' >> " ${ 2 } " < " ${ 1 } "
2017-06-17 11:50:10 +00:00
}
2017-09-14 06:39:30 +00:00
# Create "localhost" entries into hosts format
2019-04-25 09:18:54 +00:00
gravity_generateLocalList( ) {
2017-07-27 02:34:35 +00:00
local hostname
2017-09-15 12:39:17 +00:00
if [ [ -s "/etc/hostname" ] ] ; then
2017-07-24 11:24:34 +00:00
hostname = $( < "/etc/hostname" )
2017-09-14 06:39:30 +00:00
elif command -v hostname & > /dev/null; then
2017-06-21 11:49:05 +00:00
hostname = $( hostname -f)
else
echo -e " ${ CROSS } Unable to determine fully qualified domain name of host "
2017-09-15 12:39:17 +00:00
return 0
2017-06-21 11:49:05 +00:00
fi
2017-07-24 11:24:34 +00:00
echo -e " ${ hostname } \\npi.hole " > " ${ localList } .tmp "
2017-09-15 12:39:17 +00:00
# Empty $localList if it already exists, otherwise, create it
: > " ${ localList } "
2019-05-01 09:20:26 +00:00
chmod 644 " ${ localList } "
2017-09-15 12:39:17 +00:00
2017-07-24 11:24:34 +00:00
gravity_ParseDomainsIntoHosts " ${ localList } .tmp " " ${ localList } "
2017-09-14 06:39:30 +00:00
2017-09-15 12:39:17 +00:00
# Add additional LAN hosts provided by OpenVPN (if available)
2017-09-14 06:39:30 +00:00
if [ [ -f " ${ VPNList } " ] ] ; then
2017-09-30 11:03:49 +00:00
awk -F, '{printf $2"\t"$1".vpn\n"}' " ${ VPNList } " >> " ${ localList } "
2017-09-14 06:39:30 +00:00
fi
2017-06-17 11:57:27 +00:00
}
2016-04-02 00:19:47 +00:00
2017-07-24 11:24:34 +00:00
# Trap Ctrl-C
gravity_Trap( ) {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
2015-12-26 18:37:51 +00:00
}
2015-12-05 03:41:37 +00:00
2017-09-15 12:39:17 +00:00
# Clean up after Gravity upon exit or cancellation
2017-07-24 11:24:34 +00:00
gravity_Cleanup( ) {
local error = " ${ 1 :- } "
2017-07-27 02:34:35 +00:00
str = "Cleaning up stray matter"
2017-07-24 11:24:34 +00:00
echo -ne " ${ INFO } ${ str } ... "
2017-06-21 11:49:05 +00:00
2017-09-15 12:39:17 +00:00
# Delete tmp content generated by Gravity
2017-07-24 11:24:34 +00:00
rm ${ piholeDir } /pihole.*.txt 2> /dev/null
rm ${ piholeDir } /*.tmp 2> /dev/null
2017-09-15 12:39:17 +00:00
rm /tmp/*.phgpb 2> /dev/null
2017-07-24 11:24:34 +00:00
2018-01-14 20:38:39 +00:00
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
2017-09-23 00:32:56 +00:00
if [ [ " ${ gravity_Blackbody :- } " = = true ] ] ; then
2017-09-22 04:17:56 +00:00
# Remove any unused .domains files
2019-12-09 21:35:54 +00:00
for file in " ${ piholeDir } " /*." ${ domainsExtension } " ; do
2017-09-22 04:17:56 +00:00
# If list is not in active array, then remove it
if [ [ ! " ${ activeDomains [*] } " = = *" ${ file } " * ] ] ; then
rm -f " ${ file } " 2> /dev/null || \
echo -e " ${ CROSS } Failed to remove ${ file ##*/ } "
fi
done
fi
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
# Only restart DNS service if offline
2020-07-21 19:39:59 +00:00
if ! pgrep pihole-FTL & > /dev/null; then
2017-07-24 11:24:34 +00:00
" ${ PIHOLE_COMMAND } " restartdns
2017-09-14 10:23:49 +00:00
dnsWasOffline = true
2017-07-24 11:24:34 +00:00
fi
2020-03-08 23:53:14 +00:00
# Print Pi-hole status if an error occurred
2017-08-28 01:36:02 +00:00
if [ [ -n " ${ error } " ] ] ; then
2017-07-24 11:24:34 +00:00
" ${ PIHOLE_COMMAND } " status
exit 1
fi
2015-11-06 02:11:34 +00:00
}
2015-08-23 04:44:41 +00:00
2017-07-27 02:34:35 +00:00
helpFunc( ) {
echo " Usage: pihole -g
Update domains from blocklists specified in adlists.list
Options:
-f, --force Force the download of all specified blocklists
-h, --help Show this help dialog"
exit 0
}
2016-10-22 06:02:45 +00:00
for var in " $@ " ; do
2017-06-21 11:49:05 +00:00
case " ${ var } " in
2017-07-24 11:24:34 +00:00
"-f" | "--force" ) forceDelete = true; ;
2019-11-26 09:58:39 +00:00
"-r" | "--recreate" ) recreate_database = true; ;
2017-07-24 11:24:34 +00:00
"-h" | "--help" ) helpFunc; ;
2017-06-21 11:49:05 +00:00
esac
2016-08-17 18:08:55 +00:00
done
2017-09-15 12:39:17 +00:00
# Trap Ctrl-C
2017-07-24 11:24:34 +00:00
gravity_Trap
2019-11-26 09:58:39 +00:00
if [ [ " ${ recreate_database :- } " = = true ] ] ; then
str = "Restoring from migration backup"
echo -ne " ${ INFO } ${ str } ... "
rm " ${ gravityDBfile } "
2019-12-09 16:52:03 +00:00
pushd " ${ piholeDir } " > /dev/null || exit
2019-11-26 09:58:39 +00:00
cp migration_backup/* .
2019-12-09 16:52:03 +00:00
popd > /dev/null || exit
2019-11-26 09:58:39 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2019-04-24 17:55:05 +00:00
# Move possibly existing legacy files to the gravity database
migrate_to_database
2017-09-15 12:39:17 +00:00
if [ [ " ${ forceDelete :- } " = = true ] ] ; then
2017-09-18 07:36:03 +00:00
str = "Deleting existing list cache"
2017-06-21 11:49:05 +00:00
echo -ne " ${ INFO } ${ str } ... "
2017-09-18 07:36:03 +00:00
rm /etc/pihole/list.* 2> /dev/null || true
echo -e " ${ OVER } ${ TICK } ${ str } "
2016-08-17 18:08:55 +00:00
fi
2019-04-25 09:18:54 +00:00
# Gravity downloads blocklists next
gravity_CheckDNSResolutionAvailable
2019-09-04 21:14:29 +00:00
gravity_DownloadBlocklists
2017-07-24 11:24:34 +00:00
2019-04-25 09:18:54 +00:00
# Create local.list
gravity_generateLocalList
2017-06-17 11:57:27 +00:00
2020-01-24 17:39:13 +00:00
# Migrate rest of the data from old to new database
gravity_swap_databases
2020-02-12 18:26:25 +00:00
# Update gravity timestamp
update_gravity_timestamp
2020-01-24 17:39:13 +00:00
# Ensure proper permissions are set for the database
chown pihole:pihole " ${ gravityDBfile } "
chmod g+w " ${ piholeDir } " " ${ gravityDBfile } "
2017-09-14 10:23:49 +00:00
2020-02-17 20:07:48 +00:00
# Compute numbers to be displayed
gravity_ShowCount
2017-09-14 10:23:49 +00:00
# Determine if DNS has been restarted by this instance of gravity
if [ [ -z " ${ dnsWasOffline :- } " ] ] ; then
2019-04-25 09:18:54 +00:00
" ${ PIHOLE_COMMAND } " restartdns reload
2017-09-14 10:23:49 +00:00
fi
2020-01-24 17:39:13 +00:00
gravity_Cleanup
echo ""
2017-03-12 22:15:23 +00:00
" ${ PIHOLE_COMMAND } " status