Store domains without sorting and unifying them first. This allows us to preserve the relationship of the individual domains to the lists they came from.

Signed-off-by: DL6ER <dl6er@dl6er.de>
pull/3036/head
DL6ER 5 years ago
parent ab90ff565a
commit a8af2e1837
No known key found for this signature in database
GPG Key ID: FB60471F0575164A

@ -39,4 +39,9 @@ upgrade_gravityDB(){
sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql"
version=3
fi
if [[ "$version" == "3" ]]; then
# This migration script upgrades ...
sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/3_to_4.sql"
version=3
fi
}

@ -97,25 +97,39 @@ update_gravity_timestamp() {
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}"
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file() {
# Define locals
local table source backup_path backup_file
database_truncate_table() {
local table
table="${1}"
source="${2}"
backup_path="${piholeDir}/migration_backup"
backup_file="${backup_path}/$(basename "${2}")"
# Truncate table
output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}"
gravity_Cleanup "error"
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file() {
# Define locals
local table source backup_path backup_file arg
table="${1}"
source="${2}"
arg="${3}"
backup_path="${piholeDir}/migration_backup"
backup_file="${backup_path}/$(basename "${2}")"
# Truncate table only if not gravity (we add multiple times to this table)
if [[ "${table}" != "gravity" ]]; then
database_truncate_table "${table}"
fi
local tmpFile
@ -123,31 +137,30 @@ database_table_from_file() {
local timestamp
timestamp="$(date --utc +'%s')"
local inputfile
if [[ "${table}" == "gravity" ]]; then
# No need to modify the input data for the gravity table
inputfile="${source}"
else
# Apply format for white-, blacklist, regex, and adlist tables
# Read file line by line
local rowid
declare -i rowid
rowid=1
grep -v '^ *#' < "${source}" | while IFS= read -r domain
do
# Only add non-empty lines
if [[ -n "${domain}" ]]; then
if [[ "${table}" == "domain_audit" ]]; then
# domain_audit table format (no enable or modified fields)
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
else
# White-, black-, and regexlist format
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
fi
rowid+=1
# Apply format for white-, blacklist, regex, and adlist tables
# Read file line by line
local rowid
declare -i rowid
rowid=1
grep -v '^ *#' < "${source}" | while IFS= read -r domain
do
# Only add non-empty lines
if [[ -n "${domain}" ]]; then
if [[ "${table}" == "domain_audit" ]]; then
# domain_audit table format (no enable or modified fields)
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
elif [[ "${table}" == "gravity" ]]; then
# gravity table format
echo "\"${domain}\",${arg}" >> "${tmpFile}"
else
# White-, black-, and regexlist format
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
fi
done
inputfile="${tmpFile}"
fi
rowid+=1
fi
done
inputfile="${tmpFile}"
# Store domains in database table specified by ${table}
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
@ -263,12 +276,13 @@ gravity_CheckDNSResolutionAvailable() {
}
# Retrieve blocklist URLs and parse domains from adlist.list
gravity_GetBlocklistUrls() {
gravity_DownloadBlocklists() {
echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..."
# Retrieve source URLs from gravity database
# We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true)
mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
mapfile -t sourceIDs <<< "$(sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
# Parse source domains from $sources
mapfile -t sourceDomains <<< "$(
@ -285,21 +299,23 @@ gravity_GetBlocklistUrls() {
if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then
echo -e "${OVER} ${TICK} ${str}"
return 0
else
echo -e "${OVER} ${CROSS} ${str}"
echo -e " ${INFO} No source list found, or it is empty"
echo ""
return 1
fi
}
# Define options for when retrieving blocklists
gravity_SetDownloadOptions() {
local url domain agent cmd_ext str
echo ""
# Flush gravity table once before looping over sources
str="Flushing gravity table"
echo -ne " ${INFO} ${str}..."
if database_truncate_table "gravity"; then
echo -e "${OVER} ${TICK} ${str}"
fi
# Loop through $sources and download each one
for ((i = 0; i < "${#sources[@]}"; i++)); do
url="${sources[$i]}"
@ -319,7 +335,7 @@ gravity_SetDownloadOptions() {
esac
echo -e " ${INFO} Target: ${domain} (${url##*/})"
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}"
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}"
echo ""
done
gravity_Blackbody=true
@ -327,7 +343,7 @@ gravity_SetDownloadOptions() {
# Download specified URL and perform checks on HTTP status and file content
gravity_DownloadBlocklistFromUrl() {
local url="${1}" cmd_ext="${2}" agent="${3}" heisenbergCompensator="" patternBuffer str httpCode success=""
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" heisenbergCompensator="" patternBuffer str httpCode success=""
# Create temp file to store content on disk instead of RAM
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
@ -408,11 +424,20 @@ gravity_DownloadBlocklistFromUrl() {
# Determine if the blocklist was downloaded and saved correctly
if [[ "${success}" == true ]]; then
if [[ "${httpCode}" == "304" ]]; then
: # Do not attempt to re-parse file
# Add domains to database table
str="Adding to database table"
echo -ne " ${INFO} ${str}..."
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
echo -e "${OVER} ${TICK} ${str}"
# Check if $patternbuffer is a non-zero length file
elif [[ -s "${patternBuffer}" ]]; then
# Determine if blocklist is non-standard and parse as appropriate
gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}"
# Add domains to database table
str="Adding to database table"
echo -ne " ${INFO} ${str}..."
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
echo -e "${OVER} ${TICK} ${str}"
else
# Fall back to previously cached list if $patternBuffer is empty
echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
@ -421,6 +446,11 @@ gravity_DownloadBlocklistFromUrl() {
# Determine if cached list has read permission
if [[ -r "${saveLocation}" ]]; then
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
# Add domains to database table
str="Adding to database table"
echo -ne " ${INFO} ${str}..."
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
echo -e "${OVER} ${TICK} ${str}"
else
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
fi
@ -432,7 +462,7 @@ gravity_ParseFileIntoDomains() {
local source="${1}" destination="${2}" firstLine abpFilter
# Determine if we are parsing a consolidated list
if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
#if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
# Remove comments and print only the domain name
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
# This helps with that and makes it easier to read
@ -449,7 +479,7 @@ gravity_ParseFileIntoDomains() {
sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}"
chmod 644 "${destination}"
return 0
fi
#fi
# Individual file parsing: Keep comments, while parsing domains from each line
# We keep comments to respect the list maintainer's licensing
@ -536,80 +566,23 @@ gravity_ParseFileIntoDomains() {
fi
}
# Create (unfiltered) "Matter and Light" consolidated list
gravity_ConsolidateDownloadedBlocklists() {
local str lastLine
str="Consolidating blocklists"
echo -ne " ${INFO} ${str}..."
# Empty $matterAndLight if it already exists, otherwise, create it
: > "${piholeDir}/${matterAndLight}"
chmod 644 "${piholeDir}/${matterAndLight}"
# Loop through each *.domains file
for i in "${activeDomains[@]}"; do
# Determine if file has read permissions, as download might have failed
if [[ -r "${i}" ]]; then
# Remove windows CRs from file, convert list to lower case, and append into $matterAndLight
tr -d '\r' < "${i}" | tr '[:upper:]' '[:lower:]' >> "${piholeDir}/${matterAndLight}"
# Ensure that the first line of a new list is on a new line
lastLine=$(tail -1 "${piholeDir}/${matterAndLight}")
if [[ "${#lastLine}" -gt 0 ]]; then
echo "" >> "${piholeDir}/${matterAndLight}"
fi
fi
done
echo -e "${OVER} ${TICK} ${str}"
}
# Parse consolidated list into (filtered, unique) domains-only format
gravity_SortAndFilterConsolidatedList() {
local str num
str="Extracting domains from blocklists"
echo -ne " ${INFO} ${str}..."
# Parse into file
gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}"
# Format $parsedMatter line total as currency
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")")
echo -e "${OVER} ${TICK} ${str}"
echo -e " ${INFO} Gravity pulled in ${COL_BLUE}${num}${COL_NC} domains"
str="Removing duplicate domains"
echo -ne " ${INFO} ${str}..."
sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}"
chmod 644 "${piholeDir}/${preEventHorizon}"
echo -e "${OVER} ${TICK} ${str}"
# Format $preEventHorizon line total as currency
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
str="Storing ${COL_BLUE}${num}${COL_NC} unique blocking domains in database"
echo -ne " ${INFO} ${str}..."
database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}"
echo -e "${OVER} ${TICK} ${str}"
}
# Report number of entries in a table
gravity_Table_Count() {
local table="${1}"
local str="${2}"
local extra="${3}"
local num
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} WHERE enabled = 1;")"
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} ${extra};")"
echo -e " ${INFO} Number of ${str}: ${num}"
}
# Output count of blacklisted domains and regex filters
gravity_ShowCount() {
gravity_Table_Count "blacklist" "exact blacklisted domains"
gravity_Table_Count "regex_blacklist" "regex blacklist filters"
gravity_Table_Count "whitelist" "exact whitelisted domains"
gravity_Table_Count "regex_whitelist" "regex whitelist filters"
gravity_Table_Count "gravity" "gravity domains" ""
gravity_Table_Count "blacklist" "exact blacklisted domains" "WHERE enabled = 1"
gravity_Table_Count "regex_blacklist" "regex blacklist filters" "WHERE enabled = 1"
gravity_Table_Count "whitelist" "exact whitelisted domains" "WHERE enabled = 1"
gravity_Table_Count "regex_whitelist" "regex whitelist filters" "WHERE enabled = 1"
}
# Parse list of domains into hosts format
@ -748,12 +721,7 @@ fi
# Gravity downloads blocklists next
gravity_CheckDNSResolutionAvailable
if gravity_GetBlocklistUrls; then
gravity_SetDownloadOptions
# Build preEventHorizon
gravity_ConsolidateDownloadedBlocklists
gravity_SortAndFilterConsolidatedList
fi
gravity_DownloadBlocklists
# Create local.list
gravity_generateLocalList

Loading…
Cancel
Save