2015-11-11 08:43:00 +00:00
#!/usr/bin/env bash
2017-07-24 11:24:34 +00:00
# shellcheck disable=SC1090
2015-11-23 10:52:12 +00:00
# Pi-hole: A black hole for Internet advertisements
2017-02-22 17:55:20 +00:00
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
#
2017-07-24 11:24:34 +00:00
# Usage: "pihole -g"
2015-11-06 23:03:55 +00:00
# Compiles a list of ad-serving domains by downloading them from multiple sources
2015-12-06 13:55:50 +00:00
#
2017-02-22 17:55:20 +00:00
# This file is copyright under the latest version of the EUPL.
# Please see LICENSE file for your rights under this license.
2018-02-21 11:33:29 +00:00
export LC_ALL = C
2023-02-11 12:34:12 +00:00
PI_HOLE_SCRIPT_DIR = "/opt/pihole"
# Source utils.sh for GetFTLConfigValue
utilsfile = " ${ PI_HOLE_SCRIPT_DIR } /utils.sh "
# shellcheck disable=SC1090
. " ${ utilsfile } "
coltable = " ${ PI_HOLE_SCRIPT_DIR } /COL_TABLE "
# shellcheck disable=SC1090
. " ${ coltable } "
2019-07-07 08:46:20 +00:00
# shellcheck disable=SC1091
2023-02-11 12:34:12 +00:00
. "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
2016-04-11 10:29:14 +00:00
2017-07-24 11:24:34 +00:00
basename = "pihole"
PIHOLE_COMMAND = " /usr/local/bin/ ${ basename } "
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
piholeDir = " /etc/ ${ basename } "
2019-04-24 17:55:05 +00:00
# Legacy (pre v5.0) list file locations
2017-07-24 11:24:34 +00:00
whitelistFile = " ${ piholeDir } /whitelist.txt "
blacklistFile = " ${ piholeDir } /blacklist.txt "
2018-06-17 12:26:57 +00:00
regexFile = " ${ piholeDir } /regex.list "
2019-04-24 17:55:05 +00:00
adListFile = " ${ piholeDir } /adlists.list "
2016-01-21 22:14:55 +00:00
2017-07-24 11:24:34 +00:00
localList = " ${ piholeDir } /local.list "
2017-09-14 06:39:30 +00:00
VPNList = "/etc/openvpn/ipp.txt"
2016-01-26 20:26:09 +00:00
2019-02-03 12:04:31 +00:00
piholeGitDir = "/etc/.pihole"
2023-02-11 12:34:12 +00:00
GRAVITYDB = $( getFTLConfigValue files.gravity)
2019-02-05 18:05:11 +00:00
gravityDBschema = " ${ piholeGitDir } /advanced/Templates/gravity.db.sql "
2020-01-24 17:39:13 +00:00
gravityDBcopy = " ${ piholeGitDir } /advanced/Templates/gravity_copy.sql "
2019-02-03 12:04:31 +00:00
2017-07-24 11:24:34 +00:00
domainsExtension = "domains"
2022-08-31 19:41:57 +00:00
curl_connect_timeout = 10
2017-07-24 11:24:34 +00:00
2016-10-15 19:03:33 +00:00
2023-03-19 04:32:46 +00:00
# Set up tmp dir variable in case it's not configured
: " ${ GRAVITY_TMPDIR : =/tmp } "
if [ ! -d " ${ GRAVITY_TMPDIR } " ] || [ ! -w " ${ GRAVITY_TMPDIR } " ] ; then
echo -e " ${ COL_LIGHT_RED } Gravity temporary directory does not exist or is not a writeable directory, falling back to /tmp. ${ COL_NC } "
GRAVITY_TMPDIR = "/tmp"
fi
2021-03-18 07:57:03 +00:00
# Set this only after sourcing pihole-FTL.conf as the gravity database path may
# have changed
gravityDBfile = " ${ GRAVITYDB } "
gravityTEMPfile = " ${ GRAVITYDB } _temp "
2021-06-14 18:27:10 +00:00
gravityDIR = " $( dirname -- " ${ gravityDBfile } " ) "
gravityOLDfile = " ${ gravityDIR } /gravity_old.db "
2021-03-18 07:57:03 +00:00
2022-01-29 21:39:45 +00:00
# Generate new SQLite3 file from schema template
2019-02-03 12:04:31 +00:00
generate_gravity_database( ) {
2022-01-29 21:39:45 +00:00
if ! pihole-FTL sqlite3 " ${ gravityDBfile } " < " ${ gravityDBschema } " ; then
2021-12-20 19:36:55 +00:00
echo -e " ${ CROSS } Unable to create ${ gravityDBfile } "
return 1
fi
2021-12-20 18:56:42 +00:00
chown pihole:pihole " ${ gravityDBfile } "
chmod g+w " ${ piholeDir } " " ${ gravityDBfile } "
2019-02-03 12:04:31 +00:00
}
2023-10-22 06:14:11 +00:00
# Build gravity tree
gravity_build_tree( ) {
local str
2020-01-24 17:39:13 +00:00
str = "Building tree"
echo -ne " ${ INFO } ${ str } ... "
2021-01-26 07:04:37 +00:00
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
2022-01-29 21:39:45 +00:00
output = $( { pihole-FTL sqlite3 " ${ gravityTEMPfile } " "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);" ; } 2>& 1 )
2019-09-01 12:42:07 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 17:39:13 +00:00
echo -e " \\n ${ CROSS } Unable to build gravity tree in ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
2019-09-01 12:42:07 +00:00
fi
2020-01-24 17:39:13 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2023-10-22 06:14:11 +00:00
}
2019-09-01 12:42:07 +00:00
2023-10-22 06:14:11 +00:00
# Copy data from old to new database file and swap them
gravity_swap_databases( ) {
2020-01-24 17:39:13 +00:00
str = "Swapping databases"
echo -ne " ${ INFO } ${ str } ... "
2019-02-03 12:21:26 +00:00
2021-06-14 18:27:10 +00:00
# Swap databases and remove or conditionally rename old database
# Number of available blocks on disk
availableBlocks = $( stat -f --format "%a" " ${ gravityDIR } " )
# Number of blocks, used by gravity.db
2023-11-22 20:06:09 +00:00
gravityBlocks = $( stat --format "%b" " ${ gravityDBfile } " )
2021-06-14 18:27:10 +00:00
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
# Better be safe than sorry...
2022-01-30 09:36:20 +00:00
oldAvail = false
2021-09-12 16:24:15 +00:00
if [ " ${ availableBlocks } " -gt " $(( gravityBlocks * 2 )) " ] && [ -f " ${ gravityDBfile } " ] ; then
2022-01-30 09:36:20 +00:00
oldAvail = true
2021-06-14 18:27:10 +00:00
mv " ${ gravityDBfile } " " ${ gravityOLDfile } "
else
rm " ${ gravityDBfile } "
fi
2020-01-24 17:39:13 +00:00
mv " ${ gravityTEMPfile } " " ${ gravityDBfile } "
2022-01-30 09:36:20 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2022-01-30 09:42:13 +00:00
if $oldAvail ; then
2023-05-15 17:25:56 +00:00
echo -e " ${ TICK } The old database remains available "
2022-01-30 09:36:20 +00:00
fi
2020-01-24 17:39:13 +00:00
}
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp( ) {
2023-10-22 06:14:11 +00:00
output = $( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 " ${ gravityTEMPfile } " ; } 2>& 1 )
2020-01-24 17:39:13 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-10-22 06:14:11 +00:00
echo -e " \\n ${ CROSS } Unable to update gravity timestamp in database ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file( ) {
# Define locals
2022-08-01 17:38:15 +00:00
local table src backup_path backup_file tmpFile list_type
2019-09-04 21:14:29 +00:00
table = " ${ 1 } "
2022-08-01 17:38:15 +00:00
src = " ${ 2 } "
2019-09-04 21:14:29 +00:00
backup_path = " ${ piholeDir } /migration_backup "
backup_file = " ${ backup_path } / $( basename " ${ 2 } " ) "
2023-05-10 04:52:51 +00:00
# Create a temporary file. We don't use '--suffix' here because not all
# implementations of mktemp support it, e.g. on Alpine
tmpFile = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ tmpFile } " " ${ tmpFile %.* } .gravity "
2023-10-23 19:36:18 +00:00
tmpFile = " ${ tmpFile %.* } .gravity "
2020-01-24 17:39:13 +00:00
2019-02-22 21:46:19 +00:00
local timestamp
timestamp = " $( date --utc +'%s' ) "
2020-01-24 17:39:13 +00:00
2019-09-04 21:14:29 +00:00
local rowid
declare -i rowid
rowid = 1
2019-12-09 22:30:41 +00:00
2020-01-24 17:39:13 +00:00
# Special handling for domains to be imported into the common domainlist table
if [ [ " ${ table } " = = "whitelist" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "0"
2020-01-24 17:39:13 +00:00
table = "domainlist"
elif [ [ " ${ table } " = = "blacklist" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "1"
2020-01-24 17:39:13 +00:00
table = "domainlist"
elif [ [ " ${ table } " = = "regex" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "3"
2020-01-24 17:39:13 +00:00
table = "domainlist"
2019-12-09 22:30:41 +00:00
fi
2019-09-04 21:14:29 +00:00
2020-01-24 17:39:13 +00:00
# Get MAX(id) from domainlist when INSERTing into this table
if [ [ " ${ table } " = = "domainlist" ] ] ; then
2022-01-29 21:39:45 +00:00
rowid = " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "SELECT MAX(id) FROM domainlist;" ) "
2020-01-24 17:39:13 +00:00
if [ [ -z " $rowid " ] ] ; then
rowid = 0
fi
rowid += 1
fi
2022-08-01 17:38:15 +00:00
# Loop over all domains in ${src} file
2020-01-24 17:39:13 +00:00
# Read file line by line
2022-08-01 17:38:15 +00:00
grep -v '^ *#' < " ${ src } " | while IFS = read -r domain
2020-01-24 17:39:13 +00:00
do
# Only add non-empty lines
if [ [ -n " ${ domain } " ] ] ; then
if [ [ " ${ table } " = = "domain_audit" ] ] ; then
# domain_audit table format (no enable or modified fields)
echo " ${ rowid } ,\" ${ domain } \", ${ timestamp } " >> " ${ tmpFile } "
elif [ [ " ${ table } " = = "adlist" ] ] ; then
# Adlist table format
2023-07-06 17:19:55 +00:00
echo " ${ rowid } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \",,0,0,0,0,0 " >> " ${ tmpFile } "
2020-01-24 17:39:13 +00:00
else
# White-, black-, and regexlist table format
2022-08-01 17:38:15 +00:00
echo " ${ rowid } , ${ list_type } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \" " >> " ${ tmpFile } "
2020-01-24 17:39:13 +00:00
fi
rowid += 1
fi
done
2019-12-12 10:13:51 +00:00
2019-04-24 17:55:05 +00:00
# Store domains in database table specified by ${table}
2019-02-06 17:57:48 +00:00
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
2022-01-29 21:39:45 +00:00
output = $( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" " ${ tmpFile } " " ${ table } " | pihole-FTL sqlite3 " ${ gravityDBfile } " ; } 2>& 1 )
2019-02-03 12:21:26 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2022-08-01 17:38:15 +00:00
echo -e " \\n ${ CROSS } Unable to fill table ${ table } ${ list_type } in database ${ gravityDBfile } \\n ${ output } "
2019-02-03 12:21:26 +00:00
gravity_Cleanup "error"
fi
2019-04-28 20:15:47 +00:00
# Move source file to backup directory, create directory if not existing
mkdir -p " ${ backup_path } "
2022-08-01 17:38:15 +00:00
mv " ${ src } " " ${ backup_file } " 2> /dev/null || \
echo -e " ${ CROSS } Unable to backup ${ src } to ${ backup_path } "
2020-01-24 17:39:13 +00:00
# Delete tmpFile
rm " ${ tmpFile } " > /dev/null 2>& 1 || \
echo -e " ${ CROSS } Unable to remove ${ tmpFile } "
2019-02-03 12:21:26 +00:00
}
2020-12-27 18:14:52 +00:00
# Check if a column with name ${2} exists in gravity table with name ${1}
gravity_column_exists( ) {
2023-05-30 15:01:58 +00:00
output = $( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" " ${ 1 } " " ${ 2 } " | pihole-FTL sqlite3 " ${ gravityTEMPfile } " ; } 2>& 1 )
2020-12-27 18:14:52 +00:00
if [ [ " ${ output } " = = "1" ] ] ; then
return 0 # Bash 0 is success
fi
return 1 # Bash non-0 is failure
}
# Update number of domain on this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_number( ) {
# Only try to set number of domains when this field exists in the gravity database
if ! gravity_column_exists "adlist" "number" ; then
return ;
fi
2023-05-30 15:01:58 +00:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 3 } " " ${ 1 } " | pihole-FTL sqlite3 " ${ gravityTEMPfile } " ; } 2>& 1 )
2020-12-27 18:14:52 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 15:01:58 +00:00
echo -e " \\n ${ CROSS } Unable to update number of domains in adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 18:14:52 +00:00
gravity_Cleanup "error"
fi
}
# Update status of this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_status( ) {
# Only try to set the status when this field exists in the gravity database
if ! gravity_column_exists "adlist" "status" ; then
return ;
fi
2023-05-30 15:01:58 +00:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 1 } " | pihole-FTL sqlite3 " ${ gravityTEMPfile } " ; } 2>& 1 )
2020-12-27 18:14:52 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 15:01:58 +00:00
echo -e " \\n ${ CROSS } Unable to update status of adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 18:14:52 +00:00
gravity_Cleanup "error"
fi
}
2019-04-25 08:46:18 +00:00
# Migrate pre-v5.0 list files to database-based Pi-hole versions
2019-04-24 17:55:05 +00:00
migrate_to_database( ) {
2019-05-01 19:12:22 +00:00
# Create database file only if not present
2019-07-05 12:03:57 +00:00
if [ ! -e " ${ gravityDBfile } " ] ; then
2019-07-07 08:33:08 +00:00
# Create new database file - note that this will be created in version 1
2019-07-05 12:03:57 +00:00
echo -e " ${ INFO } Creating new gravity database "
2021-12-20 19:36:55 +00:00
if ! generate_gravity_database; then
echo -e " ${ CROSS } Error creating new gravity database. Please contact support. "
return 1
fi
2020-01-24 17:39:13 +00:00
# Check if gravity database needs to be updated
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2019-07-07 08:33:08 +00:00
# Migrate list files to new database
if [ -e " ${ adListFile } " ] ; then
# Store adlist domains in database
echo -e " ${ INFO } Migrating content of ${ adListFile } into new database "
database_table_from_file "adlist" " ${ adListFile } "
fi
if [ -e " ${ blacklistFile } " ] ; then
# Store blacklisted domains in database
echo -e " ${ INFO } Migrating content of ${ blacklistFile } into new database "
database_table_from_file "blacklist" " ${ blacklistFile } "
fi
if [ -e " ${ whitelistFile } " ] ; then
# Store whitelisted domains in database
echo -e " ${ INFO } Migrating content of ${ whitelistFile } into new database "
database_table_from_file "whitelist" " ${ whitelistFile } "
fi
if [ -e " ${ regexFile } " ] ; then
# Store regex domains in database
2019-07-08 19:39:30 +00:00
# Important note: We need to add the domains to the "regex" table
# as it will only later be renamed to "regex_blacklist"!
2019-07-07 08:33:08 +00:00
echo -e " ${ INFO } Migrating content of ${ regexFile } into new database "
2019-07-08 19:39:30 +00:00
database_table_from_file "regex" " ${ regexFile } "
2019-07-07 08:33:08 +00:00
fi
2019-05-01 19:12:22 +00:00
fi
2019-07-05 12:03:57 +00:00
# Check if gravity database needs to be updated
2019-07-09 09:41:44 +00:00
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2018-08-13 11:43:14 +00:00
}
2017-09-15 12:39:17 +00:00
# Determine if DNS resolution is available before proceeding
2018-01-14 20:38:39 +00:00
gravity_CheckDNSResolutionAvailable( ) {
2017-12-16 13:55:52 +00:00
local lookupDomain = "pi.hole"
2017-07-24 11:24:34 +00:00
2019-05-27 17:27:28 +00:00
# Determine if $localList does not exist, and ensure it is not empty
if [ [ ! -e " ${ localList } " ] ] || [ [ -s " ${ localList } " ] ] ; then
2017-09-14 06:39:30 +00:00
lookupDomain = "raw.githubusercontent.com"
fi
2017-09-15 12:39:17 +00:00
# Determine if $lookupDomain is resolvable
2019-11-26 09:58:39 +00:00
if timeout 4 getent hosts " ${ lookupDomain } " & > /dev/null; then
2017-09-15 12:39:17 +00:00
# Print confirmation of resolvability if it had previously failed
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
2017-07-27 02:34:35 +00:00
fi
2017-09-15 12:39:17 +00:00
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-12-08 04:38:47 +00:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
exit 1
fi
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
2020-03-08 23:53:14 +00:00
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
2017-12-08 04:38:47 +00:00
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
2019-11-26 09:58:39 +00:00
if timeout 4 dig +short " ${ lookupDomain } " & > /dev/null; then
2017-12-08 04:38:47 +00:00
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
fi
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-09-21 17:56:53 +00:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
2017-09-15 12:39:17 +00:00
exit 1
fi
2017-07-27 02:34:35 +00:00
2017-09-15 12:39:17 +00:00
# Determine error output message
2020-07-21 19:39:59 +00:00
if pgrep pihole-FTL & > /dev/null; then
2017-09-15 12:39:17 +00:00
echo -e " ${ CROSS } DNS resolution is currently unavailable "
else
echo -e " ${ CROSS } DNS service is not running "
" ${ PIHOLE_COMMAND } " restartdns
2017-07-27 02:34:35 +00:00
fi
2017-09-15 12:39:17 +00:00
2017-09-21 17:56:53 +00:00
# Ensure DNS server is given time to be resolvable
secs = "120"
2017-12-16 13:55:52 +00:00
echo -ne " ${ INFO } Time until retry: ${ secs } "
2017-09-21 17:56:53 +00:00
until timeout 1 getent hosts " ${ lookupDomain } " & > /dev/null; do
[ [ " ${ secs :- } " -eq 0 ] ] && break
2017-12-16 13:55:52 +00:00
echo -ne " ${ OVER } ${ INFO } Time until retry: ${ secs } "
2017-09-15 12:39:17 +00:00
: $(( secs--))
2017-09-21 17:56:53 +00:00
sleep 1
2017-09-15 12:39:17 +00:00
done
# Try again
2018-01-14 20:38:39 +00:00
gravity_CheckDNSResolutionAvailable
2017-07-24 11:24:34 +00:00
}
2017-03-31 18:16:09 +00:00
2019-06-30 21:21:10 +00:00
# Retrieve blocklist URLs and parse domains from adlist.list
2019-09-04 21:14:29 +00:00
gravity_DownloadBlocklists( ) {
2017-09-15 12:39:17 +00:00
echo -e " ${ INFO } ${ COL_BOLD } Neutrino emissions detected ${ COL_NC } ... "
2017-03-31 18:16:09 +00:00
2021-03-18 07:57:03 +00:00
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
echo -e " ${ INFO } Storing gravity database in ${ COL_BOLD } ${ gravityDBfile } ${ COL_NC } "
fi
2019-04-25 08:46:18 +00:00
# Retrieve source URLs from gravity database
2022-01-29 21:39:45 +00:00
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
mapfile -t sources <<< " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "SELECT address FROM vw_adlist;" 2> /dev/null) "
mapfile -t sourceIDs <<< " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "SELECT id FROM vw_adlist;" 2> /dev/null) "
2023-07-05 20:24:11 +00:00
mapfile -t sourceTypes <<< " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "SELECT type FROM vw_adlist;" 2> /dev/null) "
2017-07-24 11:24:34 +00:00
# Parse source domains from $sources
2017-09-20 12:25:33 +00:00
mapfile -t sourceDomains <<< " $(
2017-09-15 12:39:17 +00:00
# Logic: Split by folder/port
2017-07-24 11:24:34 +00:00
awk -F '[/:]' ' {
2017-09-15 12:39:17 +00:00
# Remove URL protocol & optional username:password@
2017-11-21 19:55:47 +00:00
gsub( /( .*:\/ \/ | .*:.*@) /, "" , $0 )
2017-11-21 17:30:40 +00:00
if ( length( $1 ) >0) { print $1 }
else { print "local" }
2017-07-24 11:24:34 +00:00
} ' <<< "$(printf ' %s\n ' " ${ sources [@] } " ) " 2> /dev/null
2017-09-20 12:25:33 +00:00
) "
2017-07-24 11:24:34 +00:00
2018-04-30 20:45:03 +00:00
local str = "Pulling blocklist source list into range"
2022-01-08 13:15:26 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2018-04-30 20:45:03 +00:00
2022-01-08 13:15:26 +00:00
if [ [ -z " ${ sources [*] } " ] ] || [ [ -z " ${ sourceDomains [*] } " ] ] ; then
2018-04-30 20:45:03 +00:00
echo -e " ${ INFO } No source list found, or it is empty "
echo ""
2022-01-08 13:15:26 +00:00
unset sources
2017-07-24 11:24:34 +00:00
fi
2023-09-11 09:43:56 +00:00
local url domain str target compression adlist_type
2017-07-27 02:34:35 +00:00
echo ""
2020-01-24 17:39:13 +00:00
# Prepare new gravity database
str = "Preparing new gravity database"
2019-09-04 21:14:29 +00:00
echo -ne " ${ INFO } ${ str } ... "
2020-01-24 17:39:13 +00:00
rm " ${ gravityTEMPfile } " > /dev/null 2>& 1
2022-01-29 21:39:45 +00:00
output = $( { pihole-FTL sqlite3 " ${ gravityTEMPfile } " < " ${ gravityDBschema } " ; } 2>& 1 )
2020-01-24 17:39:13 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to create new database ${ gravityTEMPfile } \\n ${ output } "
2023-07-06 20:52:28 +00:00
gravity_Cleanup "error"
2020-01-24 17:39:13 +00:00
else
2019-09-04 21:14:29 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2023-05-15 17:25:56 +00:00
str = "Creating new gravity databases"
echo -ne " ${ INFO } ${ str } ... "
# Gravity copying SQL script
copyGravity = " $( cat " ${ gravityDBcopy } " ) "
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
# Replace default gravity script location by custom location
copyGravity = " ${ copyGravity // " ${ gravityDBfile_default } " / " ${ gravityDBfile } " } "
fi
output = $( { pihole-FTL sqlite3 " ${ gravityTEMPfile } " <<< " ${ copyGravity } " ; } 2>& 1 )
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to copy data from ${ gravityDBfile } to ${ gravityTEMPfile } \\n ${ output } "
return 1
fi
echo -e " ${ OVER } ${ TICK } ${ str } "
2020-01-24 17:39:13 +00:00
2021-01-19 18:33:38 +00:00
# Use compression to reduce the amount of data that is transferred
2020-08-03 20:46:14 +00:00
# between the Pi-hole and the ad list provider. Use this feature
# only if it is supported by the locally available version of curl
if curl -V | grep -q "Features:.* libz" ; then
compression = "--compressed"
echo -e " ${ INFO } Using libz compression\n "
else
2021-11-25 06:41:40 +00:00
compression = ""
echo -e " ${ INFO } Libz compression not available\n "
fi
2017-09-15 12:39:17 +00:00
# Loop through $sources and download each one
2017-07-27 02:34:35 +00:00
for ( ( i = 0; i < " ${# sources [@] } " ; i++) ) ; do
url = " ${ sources [ $i ] } "
domain = " ${ sourceDomains [ $i ] } "
2020-07-05 12:32:33 +00:00
id = " ${ sourceIDs [ $i ] } "
2023-07-05 20:24:11 +00:00
if [ [ " ${ sourceTypes [ $i ] } " -eq "0" ] ] ; then
# Gravity list
str = "blocklist"
adlist_type = "gravity"
else
# AntiGravity list
str = "allowlist"
adlist_type = "antigravity"
fi
2017-07-27 02:34:35 +00:00
# Save the file as list.#.domain
2020-07-05 12:32:33 +00:00
saveLocation = " ${ piholeDir } /list. ${ id } . ${ domain } . ${ domainsExtension } "
2017-07-27 02:34:35 +00:00
activeDomains[ $i ] = " ${ saveLocation } "
2019-12-12 10:29:44 +00:00
echo -e " ${ INFO } Target: ${ url } "
2020-12-06 23:23:04 +00:00
local regex check_url
2020-03-31 20:48:10 +00:00
# Check for characters NOT allowed in URLs
2020-05-26 13:53:01 +00:00
regex = "[^a-zA-Z0-9:/?&%=~._()-;]"
2020-12-06 23:23:04 +00:00
# this will remove first @ that is after schema and before domain
# \1 is optional schema, \2 is userinfo
check_url = " $( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< " $url " ) "
if [ [ " ${ check_url } " = ~ ${ regex } ] ] ; then
2021-11-25 06:41:40 +00:00
echo -e " ${ CROSS } Invalid Target "
2020-03-31 20:48:10 +00:00
else
2023-11-22 19:56:23 +00:00
gravity_DownloadBlocklistFromUrl " ${ url } " " ${ sourceIDs [ $i ] } " " ${ saveLocation } " " ${ target } " " ${ compression } " " ${ adlist_type } " " ${ domain } "
2020-03-31 20:48:10 +00:00
fi
2019-04-25 09:18:54 +00:00
echo ""
2017-07-27 02:34:35 +00:00
done
2020-01-24 17:39:13 +00:00
2017-09-23 00:32:56 +00:00
gravity_Blackbody = true
2017-07-27 02:34:35 +00:00
}
2020-12-29 08:54:25 +00:00
compareLists( ) {
2020-12-29 19:35:48 +00:00
local adlistID = " ${ 1 } " target = " ${ 2 } "
2020-12-29 08:54:25 +00:00
# Verify checksum when an older checksum exists
if [ [ -s " ${ target } .sha1 " ] ] ; then
if ! sha1sum --check --status --strict " ${ target } .sha1 " ; then
# The list changed upstream, we need to update the checksum
sha1sum " ${ target } " > " ${ target } .sha1 "
echo " ${ INFO } List has been updated "
database_adlist_status " ${ adlistID } " "1"
else
echo " ${ INFO } List stayed unchanged "
database_adlist_status " ${ adlistID } " "2"
fi
else
# No checksum available, create one for comparing on the next run
sha1sum " ${ target } " > " ${ target } .sha1 "
# We assume here it was changed upstream
database_adlist_status " ${ adlistID } " "1"
fi
}
2020-02-21 18:56:48 +00:00
2017-09-14 10:23:49 +00:00
# Download specified URL and perform checks on HTTP status and file content
2018-01-14 20:38:39 +00:00
gravity_DownloadBlocklistFromUrl( ) {
2023-11-22 19:56:23 +00:00
local url = " ${ 1 } " adlistID = " ${ 2 } " saveLocation = " ${ 3 } " target = " ${ 4 } " compression = " ${ 5 } " gravity_type = " ${ 6 } " domain = " ${ 7 } "
2023-04-07 10:36:50 +00:00
local heisenbergCompensator = "" listCurlBuffer str httpCode success = "" ip cmd_ext
2017-07-27 02:34:35 +00:00
2017-09-15 12:39:17 +00:00
# Create temp file to store content on disk instead of RAM
2023-05-10 04:52:51 +00:00
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
listCurlBuffer = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ listCurlBuffer } " " ${ listCurlBuffer %.* } .phgpb "
2023-10-23 19:36:18 +00:00
listCurlBuffer = " ${ listCurlBuffer %.* } .phgpb "
2017-08-28 01:36:02 +00:00
2017-09-15 12:39:17 +00:00
# Determine if $saveLocation has read permission
2017-11-21 17:35:58 +00:00
if [ [ -r " ${ saveLocation } " && $url != "file" * ] ] ; then
2017-09-15 12:39:17 +00:00
# Have curl determine if a remote file has been modified since last retrieval
# Uses "Last-Modified" header, which certain web servers do not provide (e.g: raw github urls)
2017-11-21 17:35:58 +00:00
# Note: Don't do this for local files, always download them
2017-07-27 02:34:35 +00:00
heisenbergCompensator = " -z ${ saveLocation } "
fi
str = "Status:"
echo -ne " ${ INFO } ${ str } Pending... "
2018-08-11 23:15:42 +00:00
blocked = false
2023-02-11 12:34:12 +00:00
case $( getFTLConfigValue dns.blocking.mode) in
2018-08-11 23:15:42 +00:00
"IP-NODATA-AAAA" | "IP" )
2021-11-25 06:41:40 +00:00
# Get IP address of this domain
ip = " $( dig " ${ domain } " +short) "
# Check if this IP matches any IP of the system
if [ [ -n " ${ ip } " && $( grep -Ec " inet(|6) ${ ip } " <<< " $( ip a) " ) -gt 0 ] ] ; then
blocked = true
fi ; ;
2018-08-11 23:15:42 +00:00
"NXDOMAIN" )
2021-11-25 06:41:40 +00:00
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
fi ; ;
2021-12-03 08:17:19 +00:00
"NODATA" )
2021-12-21 21:01:34 +00:00
if [ [ $( dig " ${ domain } " | grep "NOERROR" -c) -ge 1 ] ] && [ [ -z $( dig +short " ${ domain } " ) ] ] ; then
2021-12-03 08:17:19 +00:00
blocked = true
fi ; ;
2018-09-30 19:00:06 +00:00
"NULL" | *)
2021-11-25 06:41:40 +00:00
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
fi ; ;
esac
2018-08-11 23:15:42 +00:00
2023-11-22 20:04:46 +00:00
2018-09-30 19:00:06 +00:00
if [ [ " ${ blocked } " = = true ] ] ; then
2023-11-22 20:04:46 +00:00
# Get first defined upstream server
local upstream
upstream = " $( getFTLConfigValue dns.upstreams) "
# Isolate first upstream server from a string like
# [ 1.2.3.4#1234, 5.6.7.8#5678, ... ]
upstream = " ${ upstream %%,* } "
upstream = " ${ upstream ##*[ } "
upstream = " ${ upstream %%]* } "
# Get IP address and port of this upstream server
local ip_addr port
printf -v ip_addr "%s" " ${ upstream %#* } "
if [ [ ${ upstream } != *"#" * ] ] ; then
2021-11-25 06:41:40 +00:00
port = 53
2018-11-13 01:15:24 +00:00
else
2023-11-22 20:04:46 +00:00
printf -v port "%s" " ${ upstream #*# } "
2018-11-13 01:15:24 +00:00
fi
2019-08-04 19:21:08 +00:00
ip = $( dig " @ ${ ip_addr } " -p " ${ port } " +short " ${ domain } " | tail -1)
2018-08-11 23:15:42 +00:00
if [ [ $( echo " ${ url } " | awk -F '://' '{print $1}' ) = "https" ] ] ; then
2018-08-11 12:33:33 +00:00
port = 443;
2018-08-11 23:15:42 +00:00
else port = 80
2018-08-11 12:33:33 +00:00
fi
2019-05-04 18:35:21 +00:00
bad_list = $( pihole -q -adlist " ${ domain } " | head -n1 | awk -F 'Match found in ' '{print $2}' )
2023-11-22 20:04:46 +00:00
echo -e " ${ OVER } ${ CROSS } ${ str } ${ domain } is blocked by ${ bad_list % : } . Using DNS on ${ upstream } to download ${ url } " ;
2018-08-11 12:33:33 +00:00
echo -ne " ${ INFO } ${ str } Pending... "
2023-04-07 10:36:50 +00:00
cmd_ext = " --resolve $domain : $port : $ip "
2018-08-11 12:33:33 +00:00
fi
2019-08-10 11:33:30 +00:00
2017-07-27 02:34:35 +00:00
# shellcheck disable=SC2086
2023-08-21 19:28:17 +00:00
httpCode = $( curl --connect-timeout ${ curl_connect_timeout } -s -L ${ compression } ${ cmd_ext } ${ heisenbergCompensator } -w "%{http_code}" " ${ url } " -o " ${ listCurlBuffer } " 2> /dev/null)
2017-07-27 02:34:35 +00:00
2017-11-21 17:30:40 +00:00
case $url in
2018-07-02 09:59:22 +00:00
# Did we "download" a local file?
"file" *)
2023-03-24 23:15:49 +00:00
if [ [ -s " ${ listCurlBuffer } " ] ] ; then
2021-11-25 06:41:40 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful " ; success = true
else
echo -e " ${ OVER } ${ CROSS } ${ str } Not found / empty list "
fi ; ;
2017-11-21 17:30:40 +00:00
# Did we "download" a remote file?
2018-07-02 09:59:22 +00:00
*)
2017-11-21 17:30:40 +00:00
# Determine "Status:" output based on HTTP response
case " ${ httpCode } " in
"200" ) echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful " ; success = true; ;
"304" ) echo -e " ${ OVER } ${ TICK } ${ str } No changes detected " ; success = true; ;
"000" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Refused " ; ;
"403" ) echo -e " ${ OVER } ${ CROSS } ${ str } Forbidden " ; ;
"404" ) echo -e " ${ OVER } ${ CROSS } ${ str } Not found " ; ;
"408" ) echo -e " ${ OVER } ${ CROSS } ${ str } Time-out " ; ;
"451" ) echo -e " ${ OVER } ${ CROSS } ${ str } Unavailable For Legal Reasons " ; ;
"500" ) echo -e " ${ OVER } ${ CROSS } ${ str } Internal Server Error " ; ;
"504" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Gateway) " ; ;
"521" ) echo -e " ${ OVER } ${ CROSS } ${ str } Web Server Is Down (Cloudflare) " ; ;
"522" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Cloudflare) " ; ;
2018-07-02 09:59:22 +00:00
* ) echo -e " ${ OVER } ${ CROSS } ${ str } ${ url } ( ${ httpCode } ) " ; ;
2017-11-21 17:30:40 +00:00
esac ; ;
2017-07-27 02:34:35 +00:00
esac
2020-12-27 18:14:52 +00:00
local done = "false"
2017-07-27 02:34:35 +00:00
# Determine if the blocklist was downloaded and saved correctly
2017-09-15 12:39:17 +00:00
if [ [ " ${ success } " = = true ] ] ; then
2017-07-27 02:34:35 +00:00
if [ [ " ${ httpCode } " = = "304" ] ] ; then
2020-01-24 17:39:13 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "2"
done = "true"
2023-03-24 23:15:49 +00:00
# Check if $listCurlBuffer is a non-zero length file
elif [ [ -s " ${ listCurlBuffer } " ] ] ; then
2017-07-27 02:34:35 +00:00
# Determine if blocklist is non-standard and parse as appropriate
2023-03-24 23:15:49 +00:00
gravity_ParseFileIntoDomains " ${ listCurlBuffer } " " ${ saveLocation } "
# Remove curl buffer file after its use
rm " ${ listCurlBuffer } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-29 08:54:25 +00:00
# Compare lists, are they identical?
compareLists " ${ adlistID } " " ${ saveLocation } "
2020-12-27 18:14:52 +00:00
done = "true"
2017-07-27 02:34:35 +00:00
else
2023-03-24 23:15:49 +00:00
# Fall back to previously cached list if $listCurlBuffer is empty
2020-12-27 18:14:52 +00:00
echo -e " ${ INFO } Received empty file "
2017-07-27 02:34:35 +00:00
fi
2020-12-27 18:14:52 +00:00
fi
# Do we need to fall back to a cached list (if available)?
if [ [ " ${ done } " != "true" ] ] ; then
2017-09-15 12:39:17 +00:00
# Determine if cached list has read permission
2017-07-27 02:34:35 +00:00
if [ [ -r " ${ saveLocation } " ] ] ; then
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_GREEN } using previously cached list ${ COL_NC } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "3"
2017-07-27 02:34:35 +00:00
else
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_RED } no cached list available ${ COL_NC } "
2020-12-29 19:35:48 +00:00
# Manually reset these two numbers because we do not call parseList here
2023-05-15 17:25:56 +00:00
database_adlist_number " ${ adlistID } " 0 0
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "4"
2017-07-27 02:34:35 +00:00
fi
fi
}
2017-09-15 12:39:17 +00:00
# Parse source files into domains format
2017-07-27 02:34:35 +00:00
gravity_ParseFileIntoDomains( ) {
2022-10-16 18:54:24 +00:00
local src = " ${ 1 } " destination = " ${ 2 } "
# Remove comments and print only the domain name
# Most of the lists downloaded are already in hosts file format but the spacing/formatting is not contiguous
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
2023-02-19 17:47:10 +00:00
# 1) Convert all characters to lowercase
tr '[:upper:]' '[:lower:]' < " ${ src } " > " ${ destination } "
# 2) Remove carriage returns
2023-04-07 08:14:59 +00:00
# 3) Remove lines starting with ! (ABP Comments)
# 4) Remove lines starting with [ (ABP Header)
2023-04-08 06:01:46 +00:00
# 5) Remove lines containing ABP extended CSS selectors ("##", "#!#", "#@#", "#?#") preceded by a letter
2023-04-07 08:14:59 +00:00
# 6) Remove comments (text starting with "#", include possible spaces before the hash sign)
2023-04-07 08:25:25 +00:00
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
# 8) Remove empty lines
2023-04-07 07:44:31 +00:00
sed -i -r \
-e 's/\r$//' \
-e 's/\s*!.*//g' \
-e 's/\s*\[.*//g' \
2023-04-08 06:01:46 +00:00
-e '/[a-z]\#[$?@]{0,1}\#/d' \
2023-04-07 08:14:59 +00:00
-e 's/\s*#.*//g' \
2023-04-07 07:44:31 +00:00
-e 's/^.*\s+//g' \
-e '/^$/d' " ${ destination } "
2023-02-19 17:47:10 +00:00
2022-10-16 18:54:24 +00:00
chmod 644 " ${ destination } "
2015-11-23 08:36:01 +00:00
}
2015-11-23 07:49:38 +00:00
2019-04-25 09:18:54 +00:00
# Report number of entries in a table
gravity_Table_Count( ) {
local table = " ${ 1 } "
local str = " ${ 2 } "
2017-12-16 13:55:52 +00:00
local num
2023-10-22 06:14:11 +00:00
num = " $( pihole-FTL sqlite3 " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM ${ table } ; " ) "
2023-05-15 17:25:56 +00:00
if [ [ " ${ table } " = = "gravity" ] ] ; then
2019-12-04 21:02:46 +00:00
local unique
2023-10-22 06:14:11 +00:00
unique = " $( pihole-FTL sqlite3 " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${ table } ); " ) "
2020-04-21 07:10:21 +00:00
echo -e " ${ INFO } Number of ${ str } : ${ num } ( ${ COL_BOLD } ${ unique } unique domains ${ COL_NC } ) "
2023-10-22 06:14:11 +00:00
pihole-FTL sqlite3 " ${ gravityTEMPfile } " " INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count', ${ unique } ); "
2019-12-04 21:02:46 +00:00
else
echo -e " ${ INFO } Number of ${ str } : ${ num } "
fi
2019-04-25 09:18:54 +00:00
}
2017-06-21 11:49:05 +00:00
2019-04-25 09:18:54 +00:00
# Output count of blacklisted domains and regex filters
gravity_ShowCount( ) {
2023-05-15 17:25:56 +00:00
# Here we use the table "gravity" instead of the view "vw_gravity" for speed.
# It's safe to replace it here, because right after a gravity run both will show the exactly same number of domains.
gravity_Table_Count "gravity" "gravity domains" ""
2023-10-04 15:20:38 +00:00
gravity_Table_Count "vw_blacklist" "exact denied domains"
gravity_Table_Count "vw_regex_blacklist" "regex denied filters"
gravity_Table_Count "vw_whitelist" "exact allowed domains"
gravity_Table_Count "vw_regex_whitelist" "regex allowed filters"
2015-11-23 11:11:16 +00:00
}
2015-11-26 23:48:52 +00:00
2017-09-14 06:39:30 +00:00
# Create "localhost" entries into hosts format
2019-04-25 09:18:54 +00:00
gravity_generateLocalList( ) {
2017-09-15 12:39:17 +00:00
# Empty $localList if it already exists, otherwise, create it
2021-04-16 09:41:26 +00:00
echo "### Do not modify this file, it will be overwritten by pihole -g" > " ${ localList } "
2019-05-01 09:20:26 +00:00
chmod 644 " ${ localList } "
2017-09-15 12:39:17 +00:00
# Add additional LAN hosts provided by OpenVPN (if available)
2017-09-14 06:39:30 +00:00
if [ [ -f " ${ VPNList } " ] ] ; then
2017-09-30 11:03:49 +00:00
awk -F, '{printf $2"\t"$1".vpn\n"}' " ${ VPNList } " >> " ${ localList } "
2017-09-14 06:39:30 +00:00
fi
2017-06-17 11:57:27 +00:00
}
2016-04-02 00:19:47 +00:00
2017-07-24 11:24:34 +00:00
# Trap Ctrl-C
gravity_Trap( ) {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
2015-12-26 18:37:51 +00:00
}
2015-12-05 03:41:37 +00:00
2017-09-15 12:39:17 +00:00
# Clean up after Gravity upon exit or cancellation
2017-07-24 11:24:34 +00:00
gravity_Cleanup( ) {
local error = " ${ 1 :- } "
2017-07-27 02:34:35 +00:00
str = "Cleaning up stray matter"
2017-07-24 11:24:34 +00:00
echo -ne " ${ INFO } ${ str } ... "
2017-06-21 11:49:05 +00:00
2017-09-15 12:39:17 +00:00
# Delete tmp content generated by Gravity
2017-07-24 11:24:34 +00:00
rm ${ piholeDir } /pihole.*.txt 2> /dev/null
rm ${ piholeDir } /*.tmp 2> /dev/null
2023-03-24 23:15:49 +00:00
# listCurlBuffer location
2023-03-19 04:32:46 +00:00
rm " ${ GRAVITY_TMPDIR } " /*.phgpb 2> /dev/null
2023-03-24 23:15:49 +00:00
# invalid_domains location
rm " ${ GRAVITY_TMPDIR } " /*.ph-non-domains 2> /dev/null
2017-07-24 11:24:34 +00:00
2018-01-14 20:38:39 +00:00
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
2017-09-23 00:32:56 +00:00
if [ [ " ${ gravity_Blackbody :- } " = = true ] ] ; then
2017-09-22 04:17:56 +00:00
# Remove any unused .domains files
2019-12-09 21:35:54 +00:00
for file in " ${ piholeDir } " /*." ${ domainsExtension } " ; do
2017-09-22 04:17:56 +00:00
# If list is not in active array, then remove it
if [ [ ! " ${ activeDomains [*] } " = = *" ${ file } " * ] ] ; then
rm -f " ${ file } " 2> /dev/null || \
echo -e " ${ CROSS } Failed to remove ${ file ##*/ } "
fi
done
fi
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2023-02-12 17:39:37 +00:00
# # Only restart DNS service if offline
# if ! pgrep pihole-FTL &> /dev/null; then
# "${PIHOLE_COMMAND}" restartdns
# dnsWasOffline=true
# fi
2017-07-24 11:24:34 +00:00
2020-03-08 23:53:14 +00:00
# Print Pi-hole status if an error occurred
2017-08-28 01:36:02 +00:00
if [ [ -n " ${ error } " ] ] ; then
2017-07-24 11:24:34 +00:00
" ${ PIHOLE_COMMAND } " status
exit 1
fi
2015-11-06 02:11:34 +00:00
}
2015-08-23 04:44:41 +00:00
2021-08-20 18:48:57 +00:00
database_recovery( ) {
2021-12-21 13:00:46 +00:00
local result
2022-05-29 07:51:33 +00:00
local str = "Checking integrity of existing gravity database (this can take a while)"
2021-12-21 15:20:02 +00:00
local option = " ${ 1 } "
2021-08-20 18:48:57 +00:00
echo -ne " ${ INFO } ${ str } ... "
2022-05-29 07:51:33 +00:00
result = " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "PRAGMA integrity_check" 2>& 1) "
if [ [ ${ result } = "ok" ] ] ; then
2021-08-20 18:48:57 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2022-05-29 07:51:33 +00:00
str = "Checking foreign keys of existing gravity database (this can take a while)"
2021-09-11 19:54:42 +00:00
echo -ne " ${ INFO } ${ str } ... "
2022-05-29 07:51:33 +00:00
unset result
result = " $( pihole-FTL sqlite3 " ${ gravityDBfile } " "PRAGMA foreign_key_check" 2>& 1) "
if [ [ -z ${ result } ] ] ; then
2021-09-11 19:54:42 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2021-12-21 15:20:02 +00:00
if [ [ " ${ option } " != "force" ] ] ; then
2021-09-11 19:56:44 +00:00
return
fi
2021-09-11 19:54:42 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
while IFS = read -r line ; do echo " - $line " ; done <<< " $result "
fi
2021-08-20 18:48:57 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
while IFS = read -r line ; do echo " - $line " ; done <<< " $result "
fi
str = "Trying to recover existing gravity database"
echo -ne " ${ INFO } ${ str } ... "
# We have to remove any possibly existing recovery database or this will fail
rm -f " ${ gravityDBfile } .recovered " > /dev/null 2>& 1
if result = " $( pihole-FTL sqlite3 " ${ gravityDBfile } " ".recover" | pihole-FTL sqlite3 " ${ gravityDBfile } .recovered " 2>& 1) " ; then
echo -e " ${ OVER } ${ TICK } ${ str } - success "
mv " ${ gravityDBfile } " " ${ gravityDBfile } .old "
mv " ${ gravityDBfile } .recovered " " ${ gravityDBfile } "
2021-12-22 18:52:08 +00:00
echo -ne " ${ INFO } ${ gravityDBfile } has been recovered "
echo -ne " ${ INFO } The old ${ gravityDBfile } has been moved to ${ gravityDBfile } .old "
2021-08-20 18:48:57 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - the following errors happened: "
while IFS = read -r line ; do echo " - $line " ; done <<< " $result "
echo -e " ${ CROSS } Recovery failed. Try \"pihole -r recreate\" instead. "
exit 1
fi
echo ""
}
2017-07-27 02:34:35 +00:00
helpFunc( ) {
echo " Usage: pihole -g
Update domains from blocklists specified in adlists.list
Options:
-f, --force Force the download of all specified blocklists
-h, --help Show this help dialog"
exit 0
}
2021-08-20 18:48:57 +00:00
repairSelector( ) {
case " $1 " in
"recover" ) recover_database = true; ;
"recreate" ) recreate_database = true; ;
*) echo " Usage: pihole -g -r {recover,recreate}
Attempt to repair gravity database
Available options:
2021-12-22 18:53:52 +00:00
pihole -g -r recover Try to recover a damaged gravity database file.
Pi-hole tries to restore as much as possible
from a corrupted gravity database.
pihole -g -r recover force Pi-hole will run the recovery process even when
no damage is detected. This option is meant to be
a last resort. Recovery is a fragile task
consuming a lot of resources and shouldn' t be
performed unnecessarily.
pihole -g -r recreate Create a new gravity database file from scratch.
This will remove your existing gravity database
and create a new file from scratch. If you still
have the migration backup created when migrating
to Pi-hole v5.0, Pi-hole will import these files."
2021-08-20 18:48:57 +00:00
exit 0; ;
esac
}
2016-10-22 06:02:45 +00:00
for var in " $@ " ; do
2017-06-21 11:49:05 +00:00
case " ${ var } " in
2017-07-24 11:24:34 +00:00
"-f" | "--force" ) forceDelete = true; ;
2021-08-20 18:48:57 +00:00
"-r" | "--repair" ) repairSelector " $3 " ; ;
2017-07-24 11:24:34 +00:00
"-h" | "--help" ) helpFunc; ;
2017-06-21 11:49:05 +00:00
esac
2016-08-17 18:08:55 +00:00
done
2021-06-14 18:27:10 +00:00
# Remove OLD (backup) gravity file, if it exists
if [ [ -f " ${ gravityOLDfile } " ] ] ; then
rm " ${ gravityOLDfile } "
fi
2017-09-15 12:39:17 +00:00
# Trap Ctrl-C
2017-07-24 11:24:34 +00:00
gravity_Trap
2019-11-26 09:58:39 +00:00
if [ [ " ${ recreate_database :- } " = = true ] ] ; then
2021-08-20 18:48:57 +00:00
str = "Recreating gravity database from migration backup"
2019-11-26 09:58:39 +00:00
echo -ne " ${ INFO } ${ str } ... "
rm " ${ gravityDBfile } "
2019-12-09 16:52:03 +00:00
pushd " ${ piholeDir } " > /dev/null || exit
2019-11-26 09:58:39 +00:00
cp migration_backup/* .
2019-12-09 16:52:03 +00:00
popd > /dev/null || exit
2019-11-26 09:58:39 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2021-08-20 18:48:57 +00:00
if [ [ " ${ recover_database :- } " = = true ] ] ; then
2021-09-11 19:56:44 +00:00
database_recovery " $4 "
2021-08-20 18:48:57 +00:00
fi
2019-04-24 17:55:05 +00:00
# Move possibly existing legacy files to the gravity database
2021-12-20 19:36:55 +00:00
if ! migrate_to_database; then
echo -e " ${ CROSS } Unable to migrate to database. Please contact support. "
exit 1
fi
2019-04-24 17:55:05 +00:00
2017-09-15 12:39:17 +00:00
if [ [ " ${ forceDelete :- } " = = true ] ] ; then
2017-09-18 07:36:03 +00:00
str = "Deleting existing list cache"
2017-06-21 11:49:05 +00:00
echo -ne " ${ INFO } ${ str } ... "
2017-09-18 07:36:03 +00:00
rm /etc/pihole/list.* 2> /dev/null || true
echo -e " ${ OVER } ${ TICK } ${ str } "
2016-08-17 18:08:55 +00:00
fi
2019-04-25 09:18:54 +00:00
# Gravity downloads blocklists next
2021-12-20 19:36:55 +00:00
if ! gravity_CheckDNSResolutionAvailable; then
2021-12-20 19:09:11 +00:00
echo -e " ${ CROSS } Can not complete gravity update, no DNS is available. Please contact support. "
exit 1
fi
2023-03-24 23:15:49 +00:00
if ! gravity_DownloadBlocklists; then
echo -e " ${ CROSS } Unable to create gravity database. Please try again later. If the problem persists, please contact support. "
exit 1
fi
2017-07-24 11:24:34 +00:00
2019-04-25 09:18:54 +00:00
# Create local.list
gravity_generateLocalList
2017-06-17 11:57:27 +00:00
2020-02-12 18:26:25 +00:00
# Update gravity timestamp
update_gravity_timestamp
2020-01-24 17:39:13 +00:00
# Ensure proper permissions are set for the database
2023-10-22 06:14:11 +00:00
chown pihole:pihole " ${ gravityTEMPfile } "
chmod g+w " ${ piholeDir } " " ${ gravityTEMPfile } "
# Build the tree
gravity_build_tree
2017-09-14 10:23:49 +00:00
2023-10-22 06:14:11 +00:00
# Compute numbers to be displayed (do this after building the tree to get the
# numbers quickly from the tree instead of having to scan the whole database)
2020-02-17 20:07:48 +00:00
gravity_ShowCount
2023-10-22 06:14:11 +00:00
# Migrate rest of the data from old to new database
# IMPORTANT: Swapping the databases must be the last step before the cleanup
if ! gravity_swap_databases; then
echo -e " ${ CROSS } Unable to create database. Please contact support. "
exit 1
fi
2020-01-24 17:39:13 +00:00
gravity_Cleanup
echo ""
2023-10-16 20:19:44 +00:00
echo " ${ TICK } Done. "
2023-02-12 17:39:37 +00:00
# "${PIHOLE_COMMAND}" status