mirror of
https://github.com/pi-hole/pi-hole
synced 2024-12-22 14:58:08 +00:00
Pi-hole v5.17 (#5288)
This commit is contained in:
commit
bea63f9d2d
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
uses: actions/checkout@v3.4.0
|
uses: actions/checkout@v3.5.2
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
-
|
-
|
||||||
name: Initialize CodeQL
|
name: Initialize CodeQL
|
||||||
|
27
.github/workflows/stale.yml
vendored
27
.github/workflows/stale.yml
vendored
@ -4,23 +4,42 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '0 8 * * *'
|
- cron: '0 8 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
issue_comment:
|
||||||
|
|
||||||
|
env:
|
||||||
|
stale_label: stale
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale_action:
|
||||||
|
if: github.event_name != 'issue_comment'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v7.0.0
|
- uses: actions/stale@v8.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
days-before-close: 5
|
days-before-close: 5
|
||||||
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Please comment or update this issue or it will be closed in 5 days.'
|
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Please comment or update this issue or it will be closed in 5 days.'
|
||||||
stale-issue-label: 'stale'
|
stale-issue-label: $stale_label
|
||||||
exempt-issue-labels: 'Internal, Fixed in next release, Bug: Confirmed, Documentation Needed'
|
exempt-issue-labels: 'Internal, Fixed in next release, Bug: Confirmed, Documentation Needed'
|
||||||
exempt-all-issue-assignees: true
|
exempt-all-issue-assignees: true
|
||||||
operations-per-run: 300
|
operations-per-run: 300
|
||||||
close-issue-reason: 'not_planned'
|
close-issue-reason: 'not_planned'
|
||||||
|
|
||||||
|
remove_stale: # trigger "stale" removal immediately when stale issues are commented on
|
||||||
|
if: github.event_name == 'issue_comment'
|
||||||
|
permissions:
|
||||||
|
contents: read # for actions/checkout
|
||||||
|
issues: write # to edit issues label
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3.5.2
|
||||||
|
- name: Remove 'stale' label
|
||||||
|
run: gh issue edit ${{ github.event.issue.number }} --remove-label $stale_label
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
2
.github/workflows/stale_pr.yml
vendored
2
.github/workflows/stale_pr.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v7.0.0
|
- uses: actions/stale@v8.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
# Do not automatically mark PR/issue as stale
|
# Do not automatically mark PR/issue as stale
|
||||||
|
2
.github/workflows/sync-back-to-dev.yml
vendored
2
.github/workflows/sync-back-to-dev.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
name: Syncing branches
|
name: Syncing branches
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3.4.0
|
uses: actions/checkout@v3.5.2
|
||||||
- name: Opening pull request
|
- name: Opening pull request
|
||||||
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
||||||
env:
|
env:
|
||||||
|
8
.github/workflows/test.yml
vendored
8
.github/workflows/test.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3.4.0
|
uses: actions/checkout@v3.5.2
|
||||||
|
|
||||||
- name: Check scripts in repository are executable
|
- name: Check scripts in repository are executable
|
||||||
run: |
|
run: |
|
||||||
@ -53,19 +53,21 @@ jobs:
|
|||||||
debian_11,
|
debian_11,
|
||||||
ubuntu_20,
|
ubuntu_20,
|
||||||
ubuntu_22,
|
ubuntu_22,
|
||||||
|
ubuntu_23,
|
||||||
centos_8,
|
centos_8,
|
||||||
centos_9,
|
centos_9,
|
||||||
fedora_36,
|
fedora_36,
|
||||||
fedora_37,
|
fedora_37,
|
||||||
|
fedora_38,
|
||||||
]
|
]
|
||||||
env:
|
env:
|
||||||
DISTRO: ${{matrix.distro}}
|
DISTRO: ${{matrix.distro}}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3.4.0
|
uses: actions/checkout@v3.5.2
|
||||||
|
|
||||||
- name: Set up Python 3.10
|
- name: Set up Python 3.10
|
||||||
uses: actions/setup-python@v4.5.0
|
uses: actions/setup-python@v4.6.1
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
|
@ -230,10 +230,8 @@ initialize_debug() {
|
|||||||
|
|
||||||
# This is a function for visually displaying the current test that is being run.
|
# This is a function for visually displaying the current test that is being run.
|
||||||
# Accepts one variable: the name of what is being diagnosed
|
# Accepts one variable: the name of what is being diagnosed
|
||||||
# Colors do not show in the dashboard, but the icons do: [i], [✓], and [✗]
|
|
||||||
echo_current_diagnostic() {
|
echo_current_diagnostic() {
|
||||||
# Colors are used for visually distinguishing each test in the output
|
# Colors are used for visually distinguishing each test in the output
|
||||||
# These colors do not show in the GUI, but the formatting will
|
|
||||||
log_write "\\n${COL_PURPLE}*** [ DIAGNOSING ]:${COL_NC} ${1}"
|
log_write "\\n${COL_PURPLE}*** [ DIAGNOSING ]:${COL_NC} ${1}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,31 +69,16 @@ if [[ -n "${str:-}" ]]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Scan an array of files for matching strings
|
# Scan a domain again a list of RegEX
|
||||||
scanList(){
|
scanRegExList(){
|
||||||
# Escape full stops
|
local domain="${1}" list="${2}"
|
||||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" list_type="${3:-}"
|
|
||||||
|
|
||||||
# Prevent grep from printing file path
|
for entry in ${list}; do
|
||||||
cd "$piholeDir" || exit 1
|
if [[ "${domain}" =~ ${entry} ]]; then
|
||||||
|
printf "%b\n" "${entry}";
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Prevent grep -i matching slowly: https://bit.ly/2xFXtUX
|
|
||||||
export LC_CTYPE=C
|
|
||||||
|
|
||||||
# /dev/null forces filename to be printed when only one list has been generated
|
|
||||||
case "${list_type}" in
|
|
||||||
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" "${lists}" /dev/null 2>/dev/null;;
|
|
||||||
# Iterate through each regexp and check whether it matches the domainQuery
|
|
||||||
# If it does, print the matching regexp and continue looping
|
|
||||||
# Input 1 - regexps | Input 2 - domainQuery
|
|
||||||
"regex" )
|
|
||||||
for list in ${lists}; do
|
|
||||||
if [[ "${domain}" =~ ${list} ]]; then
|
|
||||||
printf "%b\n" "${list}";
|
|
||||||
fi
|
|
||||||
done;;
|
|
||||||
* ) grep -i "${esc_domain}" "${lists}" /dev/null 2>/dev/null;;
|
|
||||||
esac
|
|
||||||
}
|
}
|
||||||
|
|
||||||
scanDatabaseTable() {
|
scanDatabaseTable() {
|
||||||
@ -188,7 +173,7 @@ scanRegexDatabaseTable() {
|
|||||||
# Split regexps over a new line
|
# Split regexps over a new line
|
||||||
str_regexList=$(printf '%s\n' "${regexList[@]}")
|
str_regexList=$(printf '%s\n' "${regexList[@]}")
|
||||||
# Check domain against regexps
|
# Check domain against regexps
|
||||||
mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex")
|
mapfile -t regexMatches < <(scanRegExList "${domain}" "${str_regexList}")
|
||||||
# If there were regex matches
|
# If there were regex matches
|
||||||
if [[ "${#regexMatches[@]}" -ne 0 ]]; then
|
if [[ "${#regexMatches[@]}" -ne 0 ]]; then
|
||||||
# Split matching regexps over a new line
|
# Split matching regexps over a new line
|
||||||
|
@ -22,12 +22,14 @@ readonly dnscustomcnamefile="/etc/dnsmasq.d/05-pihole-custom-cname.conf"
|
|||||||
|
|
||||||
readonly gravityDBfile="/etc/pihole/gravity.db"
|
readonly gravityDBfile="/etc/pihole/gravity.db"
|
||||||
|
|
||||||
# Source install script for ${setupVars}, ${PI_HOLE_BIN_DIR} and valid_ip()
|
|
||||||
readonly PI_HOLE_FILES_DIR="/etc/.pihole"
|
|
||||||
# shellcheck disable=SC2034 # used in basic-install to source the script without running it
|
|
||||||
SKIP_INSTALL="true"
|
|
||||||
source "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh"
|
|
||||||
|
|
||||||
|
readonly setupVars="/etc/pihole/setupVars.conf"
|
||||||
|
readonly PI_HOLE_BIN_DIR="/usr/local/bin"
|
||||||
|
|
||||||
|
# Root of the web server
|
||||||
|
readonly webroot="/var/www/html"
|
||||||
|
|
||||||
|
# Source utils script
|
||||||
utilsfile="/opt/pihole/utils.sh"
|
utilsfile="/opt/pihole/utils.sh"
|
||||||
source "${utilsfile}"
|
source "${utilsfile}"
|
||||||
|
|
||||||
@ -98,6 +100,47 @@ HashPassword() {
|
|||||||
echo "${return}"
|
echo "${return}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Check an IP address to see if it is a valid one
|
||||||
|
valid_ip() {
|
||||||
|
# Local, named variables
|
||||||
|
local ip=${1}
|
||||||
|
local stat=1
|
||||||
|
|
||||||
|
# Regex matching one IPv4 component, i.e. an integer from 0 to 255.
|
||||||
|
# See https://tools.ietf.org/html/rfc1340
|
||||||
|
local ipv4elem="(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]?|0)";
|
||||||
|
# Regex matching an optional port (starting with '#') range of 1-65536
|
||||||
|
local portelem="(#(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|6[0-4][0-9]{3}|[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))?";
|
||||||
|
# Build a full IPv4 regex from the above subexpressions
|
||||||
|
local regex="^${ipv4elem}\\.${ipv4elem}\\.${ipv4elem}\\.${ipv4elem}${portelem}$"
|
||||||
|
|
||||||
|
# Evaluate the regex, and return the result
|
||||||
|
[[ $ip =~ ${regex} ]]
|
||||||
|
|
||||||
|
stat=$?
|
||||||
|
return "${stat}"
|
||||||
|
}
|
||||||
|
|
||||||
|
valid_ip6() {
|
||||||
|
local ip=${1}
|
||||||
|
local stat=1
|
||||||
|
|
||||||
|
# Regex matching one IPv6 element, i.e. a hex value from 0000 to FFFF
|
||||||
|
local ipv6elem="[0-9a-fA-F]{1,4}"
|
||||||
|
# Regex matching an IPv6 CIDR, i.e. 1 to 128
|
||||||
|
local v6cidr="(\\/([1-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8])){0,1}"
|
||||||
|
# Regex matching an optional port (starting with '#') range of 1-65536
|
||||||
|
local portelem="(#(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|6[0-4][0-9]{3}|[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))?";
|
||||||
|
# Build a full IPv6 regex from the above subexpressions
|
||||||
|
local regex="^(((${ipv6elem}))*((:${ipv6elem}))*::((${ipv6elem}))*((:${ipv6elem}))*|((${ipv6elem}))((:${ipv6elem})){7})${v6cidr}${portelem}$"
|
||||||
|
|
||||||
|
# Evaluate the regex, and return the result
|
||||||
|
[[ ${ip} =~ ${regex} ]]
|
||||||
|
|
||||||
|
stat=$?
|
||||||
|
return "${stat}"
|
||||||
|
}
|
||||||
|
|
||||||
SetWebPassword() {
|
SetWebPassword() {
|
||||||
if [ "${SUDO_USER}" == "www-data" ]; then
|
if [ "${SUDO_USER}" == "www-data" ]; then
|
||||||
echo "Security measure: user www-data is not allowed to change webUI password!"
|
echo "Security measure: user www-data is not allowed to change webUI password!"
|
||||||
@ -613,7 +656,6 @@ Teleporter() {
|
|||||||
host="${host//./_}"
|
host="${host//./_}"
|
||||||
filename="pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz"
|
filename="pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz"
|
||||||
fi
|
fi
|
||||||
# webroot is sourced from basic-install above
|
|
||||||
php "${webroot}/admin/scripts/pi-hole/php/teleporter.php" > "${filename}"
|
php "${webroot}/admin/scripts/pi-hole/php/teleporter.php" > "${filename}"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -622,7 +664,7 @@ checkDomain()
|
|||||||
local domain validDomain
|
local domain validDomain
|
||||||
# Convert to lowercase
|
# Convert to lowercase
|
||||||
domain="${1,,}"
|
domain="${1,,}"
|
||||||
validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check
|
validDomain=$(grep -P "^((-|_)*[a-z0-9]((-|_)*[a-z0-9)*(-|_)*)(\\.(-|_)*([a-z0-9]((-|_)*[a-z0-9])*))*$" <<< "${domain}") # Valid chars check
|
||||||
validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label
|
validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label
|
||||||
echo "${validDomain}"
|
echo "${validDomain}"
|
||||||
}
|
}
|
||||||
|
@ -19,8 +19,6 @@ INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist;
|
|||||||
DELETE FROM OLD.adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM OLD.adlist);
|
DELETE FROM OLD.adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM OLD.adlist);
|
||||||
INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
|
INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
|
||||||
|
|
||||||
INSERT OR REPLACE INTO info SELECT * FROM OLD.info;
|
|
||||||
|
|
||||||
INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
|
INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
|
||||||
DELETE FROM OLD.client_by_group WHERE client_id NOT IN (SELECT id FROM OLD.client);
|
DELETE FROM OLD.client_by_group WHERE client_id NOT IN (SELECT id FROM OLD.client);
|
||||||
INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
|
INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
|
||||||
|
@ -357,7 +357,7 @@ package_manager_detect() {
|
|||||||
# These variable names match the ones for apt-get. See above for an explanation of what they are for.
|
# These variable names match the ones for apt-get. See above for an explanation of what they are for.
|
||||||
PKG_INSTALL=("${PKG_MANAGER}" install -y)
|
PKG_INSTALL=("${PKG_MANAGER}" install -y)
|
||||||
# CentOS package manager returns 100 when there are packages to update so we need to || true to prevent the script from exiting.
|
# CentOS package manager returns 100 when there are packages to update so we need to || true to prevent the script from exiting.
|
||||||
PKG_COUNT="${PKG_MANAGER} check-update | grep -E '(.i686|.x86|.noarch|.arm|.src)' | wc -l || true"
|
PKG_COUNT="${PKG_MANAGER} check-update | grep -E '(.i686|.x86|.noarch|.arm|.src|.riscv64)' | wc -l || true"
|
||||||
OS_CHECK_DEPS=(grep bind-utils)
|
OS_CHECK_DEPS=(grep bind-utils)
|
||||||
INSTALLER_DEPS=(git dialog iproute newt procps-ng chkconfig ca-certificates)
|
INSTALLER_DEPS=(git dialog iproute newt procps-ng chkconfig ca-certificates)
|
||||||
PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc libcap nmap-ncat jq)
|
PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc libcap nmap-ncat jq)
|
||||||
@ -2366,6 +2366,9 @@ get_binary_name() {
|
|||||||
# set the binary to be used
|
# set the binary to be used
|
||||||
l_binary="pihole-FTL-linux-x86_64"
|
l_binary="pihole-FTL-linux-x86_64"
|
||||||
fi
|
fi
|
||||||
|
elif [[ "${machine}" == "riscv64" ]]; then
|
||||||
|
printf "%b %b Detected riscv64 processor\\n" "${OVER}" "${TICK}"
|
||||||
|
l_binary="pihole-FTL-riscv64-linux-gnu"
|
||||||
else
|
else
|
||||||
# Something else - we try to use 32bit executable and warn the user
|
# Something else - we try to use 32bit executable and warn the user
|
||||||
if [[ ! "${machine}" == "i686" ]]; then
|
if [[ ! "${machine}" == "i686" ]]; then
|
||||||
|
231
gravity.sh
231
gravity.sh
@ -129,7 +129,7 @@ gravity_swap_databases() {
|
|||||||
echo -e "${OVER} ${TICK} ${str}"
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
|
|
||||||
if $oldAvail; then
|
if $oldAvail; then
|
||||||
echo -e " ${TICK} The old database remains available."
|
echo -e " ${TICK} The old database remains available"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -145,18 +145,6 @@ update_gravity_timestamp() {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
# Update timestamp when the gravity table was last updated successfully
|
|
||||||
set_abp_info() {
|
|
||||||
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('abp_domains',${abp_domains});"
|
|
||||||
status="$?"
|
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
|
||||||
echo -e "\\n ${CROSS} Unable to update ABP domain status in database ${gravityDBfile}\\n ${output}"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
# Import domains from file and store them in the specified database table
|
# Import domains from file and store them in the specified database table
|
||||||
database_table_from_file() {
|
database_table_from_file() {
|
||||||
# Define locals
|
# Define locals
|
||||||
@ -165,7 +153,10 @@ database_table_from_file() {
|
|||||||
src="${2}"
|
src="${2}"
|
||||||
backup_path="${piholeDir}/migration_backup"
|
backup_path="${piholeDir}/migration_backup"
|
||||||
backup_file="${backup_path}/$(basename "${2}")"
|
backup_file="${backup_path}/$(basename "${2}")"
|
||||||
tmpFile="$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".gravity")"
|
# Create a temporary file. We don't use '--suffix' here because not all
|
||||||
|
# implementations of mktemp support it, e.g. on Alpine
|
||||||
|
tmpFile="$(mktemp -p "${GRAVITY_TMPDIR}")"
|
||||||
|
mv "${tmpFile}" "${tmpFile%.*}.gravity"
|
||||||
|
|
||||||
local timestamp
|
local timestamp
|
||||||
timestamp="$(date --utc +'%s')"
|
timestamp="$(date --utc +'%s')"
|
||||||
@ -236,17 +227,6 @@ database_table_from_file() {
|
|||||||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Update timestamp of last update of this list. We store this in the "old" database as all values in the new database will later be overwritten
|
|
||||||
database_adlist_updated() {
|
|
||||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
|
|
||||||
status="$?"
|
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
|
||||||
echo -e "\\n ${CROSS} Unable to update timestamp of adlist with ID ${1} in database ${gravityDBfile}\\n ${output}"
|
|
||||||
gravity_Cleanup "error"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if a column with name ${2} exists in gravity table with name ${1}
|
# Check if a column with name ${2} exists in gravity table with name ${1}
|
||||||
gravity_column_exists() {
|
gravity_column_exists() {
|
||||||
output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
|
output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
|
||||||
@ -264,7 +244,7 @@ database_adlist_number() {
|
|||||||
return;
|
return;
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_domains}" "${num_non_domains}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
|
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
@ -421,7 +401,7 @@ gravity_DownloadBlocklists() {
|
|||||||
unset sources
|
unset sources
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local url domain agent cmd_ext str target compression
|
local url domain agent str target compression
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Prepare new gravity database
|
# Prepare new gravity database
|
||||||
@ -438,7 +418,24 @@ gravity_DownloadBlocklists() {
|
|||||||
echo -e "${OVER} ${TICK} ${str}"
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
target="$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".gravity")"
|
str="Creating new gravity databases"
|
||||||
|
echo -ne " ${INFO} ${str}..."
|
||||||
|
|
||||||
|
# Gravity copying SQL script
|
||||||
|
copyGravity="$(cat "${gravityDBcopy}")"
|
||||||
|
if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then
|
||||||
|
# Replace default gravity script location by custom location
|
||||||
|
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
|
||||||
|
status="$?"
|
||||||
|
|
||||||
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
|
|
||||||
# Use compression to reduce the amount of data that is transferred
|
# Use compression to reduce the amount of data that is transferred
|
||||||
# between the Pi-hole and the ad list provider. Use this feature
|
# between the Pi-hole and the ad list provider. Use this feature
|
||||||
@ -463,12 +460,6 @@ gravity_DownloadBlocklists() {
|
|||||||
# Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-)
|
# Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-)
|
||||||
agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
|
agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
|
||||||
|
|
||||||
# Provide special commands for blocklists which may need them
|
|
||||||
case "${domain}" in
|
|
||||||
"pgl.yoyo.org") cmd_ext="-d mimetype=plaintext -d hostformat=hosts";;
|
|
||||||
*) cmd_ext="";;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo -e " ${INFO} Target: ${url}"
|
echo -e " ${INFO} Target: ${url}"
|
||||||
local regex check_url
|
local regex check_url
|
||||||
# Check for characters NOT allowed in URLs
|
# Check for characters NOT allowed in URLs
|
||||||
@ -481,114 +472,14 @@ gravity_DownloadBlocklists() {
|
|||||||
if [[ "${check_url}" =~ ${regex} ]]; then
|
if [[ "${check_url}" =~ ${regex} ]]; then
|
||||||
echo -e " ${CROSS} Invalid Target"
|
echo -e " ${CROSS} Invalid Target"
|
||||||
else
|
else
|
||||||
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}"
|
gravity_DownloadBlocklistFromUrl "${url}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}"
|
||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
done
|
done
|
||||||
|
|
||||||
str="Creating new gravity databases"
|
|
||||||
echo -ne " ${INFO} ${str}..."
|
|
||||||
|
|
||||||
# Gravity copying SQL script
|
|
||||||
copyGravity="$(cat "${gravityDBcopy}")"
|
|
||||||
if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then
|
|
||||||
# Replace default gravity script location by custom location
|
|
||||||
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
|
|
||||||
status="$?"
|
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
|
||||||
echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
|
||||||
|
|
||||||
str="Storing downloaded domains in new gravity database"
|
|
||||||
echo -ne " ${INFO} ${str}..."
|
|
||||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 )
|
|
||||||
status="$?"
|
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
|
||||||
echo -e "\\n ${CROSS} Unable to fill gravity table in database ${gravityTEMPfile}\\n ${output}"
|
|
||||||
gravity_Cleanup "error"
|
|
||||||
else
|
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${status}" -eq 0 && -n "${output}" ]]; then
|
|
||||||
echo -e " Encountered non-critical SQL warnings. Please check the suitability of the lists you're using!\\n\\n SQL warnings:"
|
|
||||||
local warning file line lineno
|
|
||||||
while IFS= read -r line; do
|
|
||||||
echo " - ${line}"
|
|
||||||
warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")"
|
|
||||||
file="${warning%:*}"
|
|
||||||
lineno="${warning#*:}"
|
|
||||||
if [[ -n "${file}" && -n "${lineno}" ]]; then
|
|
||||||
echo -n " Line contains: "
|
|
||||||
awk "NR==${lineno}" < "${file}"
|
|
||||||
fi
|
|
||||||
done <<< "${output}"
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm "${target}" > /dev/null 2>&1 || \
|
|
||||||
echo -e " ${CROSS} Unable to remove ${target}"
|
|
||||||
|
|
||||||
gravity_Blackbody=true
|
gravity_Blackbody=true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# global variable to indicate if we found ABP style domains during the gravity run
|
|
||||||
# is saved in gravtiy's info table to signal FTL if such domains are available
|
|
||||||
abp_domains=0
|
|
||||||
parseList() {
|
|
||||||
local adlistID="${1}" src="${2}" target="${3}" temp_file temp_file_base non_domains sample_non_domains valid_domain_pattern abp_domain_pattern
|
|
||||||
|
|
||||||
# define valid domain patterns
|
|
||||||
# no need to include uppercase letters, as we convert to lowercase in gravity_ParseFileIntoDomains() already
|
|
||||||
# adapted from https://stackoverflow.com/a/30007882
|
|
||||||
# supported ABP style: ||subdomain.domain.tlp^
|
|
||||||
|
|
||||||
valid_domain_pattern="([a-z0-9]([a-z0-9_-]{0,61}[a-z0-9]){0,1}\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]"
|
|
||||||
abp_domain_pattern="\|\|${valid_domain_pattern}\^"
|
|
||||||
|
|
||||||
# A list of items of common local hostnames not to report as unusable
|
|
||||||
# Some lists (i.e StevenBlack's) contain these as they are supposed to be used as HOST files
|
|
||||||
# but flagging them as unusable causes more confusion than it's worth - so we suppress them from the output
|
|
||||||
false_positives="localhost|localhost.localdomain|local|broadcasthost|localhost|ip6-localhost|ip6-loopback|lo0 localhost|ip6-localnet|ip6-mcastprefix|ip6-allnodes|ip6-allrouters|ip6-allhosts"
|
|
||||||
|
|
||||||
# Extract valid domains from source file and append ,${adlistID} to each line and save count to variable for display.
|
|
||||||
num_domains=$(grep -E "^(${valid_domain_pattern}|${abp_domain_pattern})$" "${src}" | tee >(sed "s/$/,${adlistID}/" >> "${target}") | wc -l)
|
|
||||||
|
|
||||||
# Check if the source file contained AdBlock Plus style domains, if so we set the global variable and inform the user
|
|
||||||
if grep -E "^${abp_domain_pattern}$" -m 1 -q "${src}"; then
|
|
||||||
echo " ${INFO} List contained AdBlock Plus style domains"
|
|
||||||
abp_domains=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# For completeness, we will get a count of non_domains (this is the number of entries left after stripping the source of comments/duplicates/false positives/domains)
|
|
||||||
invalid_domains="$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".ph-non-domains")"
|
|
||||||
|
|
||||||
num_non_domains=$(grep -Ev "^(${valid_domain_pattern}|${abp_domain_pattern}|${false_positives})$" "${src}" | tee "${invalid_domains}" | wc -l)
|
|
||||||
|
|
||||||
# If there are unusable lines, we display some information about them. This is not error or major cause for concern.
|
|
||||||
if [[ "${num_non_domains}" -ne 0 ]]; then
|
|
||||||
type="domains"
|
|
||||||
if [[ "${abp_domains}" -ne 0 ]]; then
|
|
||||||
type="patterns"
|
|
||||||
fi
|
|
||||||
echo " ${INFO} Imported ${num_domains} ${type}, ignoring ${num_non_domains} non-domain entries"
|
|
||||||
echo " Sample of non-domain entries:"
|
|
||||||
invalid_lines=$(head -n 5 "${invalid_domains}")
|
|
||||||
echo "${invalid_lines}" | awk '{print " - " $0}'
|
|
||||||
else
|
|
||||||
echo " ${INFO} Imported ${num_domains} domains"
|
|
||||||
fi
|
|
||||||
rm "${invalid_domains}"
|
|
||||||
}
|
|
||||||
|
|
||||||
compareLists() {
|
compareLists() {
|
||||||
local adlistID="${1}" target="${2}"
|
local adlistID="${1}" target="${2}"
|
||||||
|
|
||||||
@ -599,7 +490,6 @@ compareLists() {
|
|||||||
sha1sum "${target}" > "${target}.sha1"
|
sha1sum "${target}" > "${target}.sha1"
|
||||||
echo " ${INFO} List has been updated"
|
echo " ${INFO} List has been updated"
|
||||||
database_adlist_status "${adlistID}" "1"
|
database_adlist_status "${adlistID}" "1"
|
||||||
database_adlist_updated "${adlistID}"
|
|
||||||
else
|
else
|
||||||
echo " ${INFO} List stayed unchanged"
|
echo " ${INFO} List stayed unchanged"
|
||||||
database_adlist_status "${adlistID}" "2"
|
database_adlist_status "${adlistID}" "2"
|
||||||
@ -609,17 +499,18 @@ compareLists() {
|
|||||||
sha1sum "${target}" > "${target}.sha1"
|
sha1sum "${target}" > "${target}.sha1"
|
||||||
# We assume here it was changed upstream
|
# We assume here it was changed upstream
|
||||||
database_adlist_status "${adlistID}" "1"
|
database_adlist_status "${adlistID}" "1"
|
||||||
database_adlist_updated "${adlistID}"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Download specified URL and perform checks on HTTP status and file content
|
# Download specified URL and perform checks on HTTP status and file content
|
||||||
gravity_DownloadBlocklistFromUrl() {
|
gravity_DownloadBlocklistFromUrl() {
|
||||||
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}" compression="${7}"
|
local url="${1}" agent="${2}" adlistID="${3}" saveLocation="${4}" target="${5}" compression="${6}"
|
||||||
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip
|
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip cmd_ext
|
||||||
|
|
||||||
# Create temp file to store content on disk instead of RAM
|
# Create temp file to store content on disk instead of RAM
|
||||||
listCurlBuffer=$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".phgpb")
|
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
|
||||||
|
listCurlBuffer="$(mktemp -p "${GRAVITY_TMPDIR}")"
|
||||||
|
mv "${listCurlBuffer}" "${listCurlBuffer%.*}.phgpb"
|
||||||
|
|
||||||
# Determine if $saveLocation has read permission
|
# Determine if $saveLocation has read permission
|
||||||
if [[ -r "${saveLocation}" && $url != "file"* ]]; then
|
if [[ -r "${saveLocation}" && $url != "file"* ]]; then
|
||||||
@ -669,7 +560,7 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
||||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
|
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
|
||||||
echo -ne " ${INFO} ${str} Pending..."
|
echo -ne " ${INFO} ${str} Pending..."
|
||||||
cmd_ext="--resolve $domain:$port:$ip $cmd_ext"
|
cmd_ext="--resolve $domain:$port:$ip"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
@ -707,9 +598,8 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
if [[ "${success}" == true ]]; then
|
if [[ "${success}" == true ]]; then
|
||||||
if [[ "${httpCode}" == "304" ]]; then
|
if [[ "${httpCode}" == "304" ]]; then
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
pihole-FTL gravity parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
database_adlist_status "${adlistID}" "2"
|
database_adlist_status "${adlistID}" "2"
|
||||||
database_adlist_number "${adlistID}"
|
|
||||||
done="true"
|
done="true"
|
||||||
# Check if $listCurlBuffer is a non-zero length file
|
# Check if $listCurlBuffer is a non-zero length file
|
||||||
elif [[ -s "${listCurlBuffer}" ]]; then
|
elif [[ -s "${listCurlBuffer}" ]]; then
|
||||||
@ -718,12 +608,9 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
# Remove curl buffer file after its use
|
# Remove curl buffer file after its use
|
||||||
rm "${listCurlBuffer}"
|
rm "${listCurlBuffer}"
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
pihole-FTL gravity parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
# Compare lists, are they identical?
|
# Compare lists, are they identical?
|
||||||
compareLists "${adlistID}" "${saveLocation}"
|
compareLists "${adlistID}" "${saveLocation}"
|
||||||
# Update gravity database table (status and updated timestamp are set in
|
|
||||||
# compareLists)
|
|
||||||
database_adlist_number "${adlistID}"
|
|
||||||
done="true"
|
done="true"
|
||||||
else
|
else
|
||||||
# Fall back to previously cached list if $listCurlBuffer is empty
|
# Fall back to previously cached list if $listCurlBuffer is empty
|
||||||
@ -737,15 +624,12 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
if [[ -r "${saveLocation}" ]]; then
|
if [[ -r "${saveLocation}" ]]; then
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
pihole-FTL gravity parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
database_adlist_number "${adlistID}"
|
|
||||||
database_adlist_status "${adlistID}" "3"
|
database_adlist_status "${adlistID}" "3"
|
||||||
else
|
else
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
||||||
# Manually reset these two numbers because we do not call parseList here
|
# Manually reset these two numbers because we do not call parseList here
|
||||||
num_domains=0
|
database_adlist_number "${adlistID}" 0 0
|
||||||
num_non_domains=0
|
|
||||||
database_adlist_number "${adlistID}"
|
|
||||||
database_adlist_status "${adlistID}" "4"
|
database_adlist_status "${adlistID}" "4"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@ -763,25 +647,21 @@ gravity_ParseFileIntoDomains() {
|
|||||||
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
|
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
|
||||||
|
|
||||||
# 2) Remove carriage returns
|
# 2) Remove carriage returns
|
||||||
sed -i 's/\r$//' "${destination}"
|
# 3) Remove lines starting with ! (ABP Comments)
|
||||||
|
# 4) Remove lines starting with [ (ABP Header)
|
||||||
|
# 5) Remove lines containing ABP extended CSS selectors ("##", "#!#", "#@#", "#?#") preceded by a letter
|
||||||
|
# 6) Remove comments (text starting with "#", include possible spaces before the hash sign)
|
||||||
|
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
||||||
|
# 8) Remove empty lines
|
||||||
|
|
||||||
# 3a) Remove comments (text starting with "#", include possible spaces before the hash sign)
|
sed -i -r \
|
||||||
sed -i 's/\s*#.*//g' "${destination}"
|
-e 's/\r$//' \
|
||||||
|
-e 's/\s*!.*//g' \
|
||||||
# 3b) Remove lines starting with ! (ABP Comments)
|
-e 's/\s*\[.*//g' \
|
||||||
sed -i 's/\s*!.*//g' "${destination}"
|
-e '/[a-z]\#[$?@]{0,1}\#/d' \
|
||||||
|
-e 's/\s*#.*//g' \
|
||||||
# 3c) Remove lines starting with [ (ABP Header)
|
-e 's/^.*\s+//g' \
|
||||||
sed -i 's/\s*\[.*//g' "${destination}"
|
-e '/^$/d' "${destination}"
|
||||||
|
|
||||||
# 4) Remove lines containing "/"
|
|
||||||
sed -i -r '/(\/).*$/d' "${destination}"
|
|
||||||
|
|
||||||
# 5) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
|
||||||
sed -i -r 's/^.*\s+//g' "${destination}"
|
|
||||||
|
|
||||||
# 6) Remove empty lines
|
|
||||||
sed -i '/^$/d' "${destination}"
|
|
||||||
|
|
||||||
chmod 644 "${destination}"
|
chmod 644 "${destination}"
|
||||||
}
|
}
|
||||||
@ -792,9 +672,9 @@ gravity_Table_Count() {
|
|||||||
local str="${2}"
|
local str="${2}"
|
||||||
local num
|
local num
|
||||||
num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")"
|
num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")"
|
||||||
if [[ "${table}" == "vw_gravity" ]]; then
|
if [[ "${table}" == "gravity" ]]; then
|
||||||
local unique
|
local unique
|
||||||
unique="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")"
|
unique="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
|
||||||
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
||||||
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
||||||
else
|
else
|
||||||
@ -804,7 +684,9 @@ gravity_Table_Count() {
|
|||||||
|
|
||||||
# Output count of blacklisted domains and regex filters
|
# Output count of blacklisted domains and regex filters
|
||||||
gravity_ShowCount() {
|
gravity_ShowCount() {
|
||||||
gravity_Table_Count "vw_gravity" "gravity domains" ""
|
# Here we use the table "gravity" instead of the view "vw_gravity" for speed.
|
||||||
|
# It's safe to replace it here, because right after a gravity run both will show the exactly same number of domains.
|
||||||
|
gravity_Table_Count "gravity" "gravity domains" ""
|
||||||
gravity_Table_Count "vw_blacklist" "exact blacklisted domains"
|
gravity_Table_Count "vw_blacklist" "exact blacklisted domains"
|
||||||
gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters"
|
gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters"
|
||||||
gravity_Table_Count "vw_whitelist" "exact whitelisted domains"
|
gravity_Table_Count "vw_whitelist" "exact whitelisted domains"
|
||||||
@ -1021,9 +903,6 @@ fi
|
|||||||
# Update gravity timestamp
|
# Update gravity timestamp
|
||||||
update_gravity_timestamp
|
update_gravity_timestamp
|
||||||
|
|
||||||
# Set abp_domain info field
|
|
||||||
set_abp_info
|
|
||||||
|
|
||||||
# Ensure proper permissions are set for the database
|
# Ensure proper permissions are set for the database
|
||||||
chown pihole:pihole "${gravityDBfile}"
|
chown pihole:pihole "${gravityDBfile}"
|
||||||
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
||||||
|
18
test/_fedora_38.Dockerfile
Normal file
18
test/_fedora_38.Dockerfile
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
FROM fedora:38
|
||||||
|
RUN dnf install -y git initscripts
|
||||||
|
|
||||||
|
ENV GITDIR /etc/.pihole
|
||||||
|
ENV SCRIPTDIR /opt/pihole
|
||||||
|
|
||||||
|
RUN mkdir -p $GITDIR $SCRIPTDIR /etc/pihole
|
||||||
|
ADD . $GITDIR
|
||||||
|
RUN cp $GITDIR/advanced/Scripts/*.sh $GITDIR/gravity.sh $GITDIR/pihole $GITDIR/automated\ install/*.sh $SCRIPTDIR/
|
||||||
|
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$SCRIPTDIR
|
||||||
|
|
||||||
|
RUN true && \
|
||||||
|
chmod +x $SCRIPTDIR/*
|
||||||
|
|
||||||
|
ENV SKIP_INSTALL true
|
||||||
|
ENV OS_CHECK_DOMAIN_NAME dev-supportedos.pi-hole.net
|
||||||
|
|
||||||
|
#sed '/# Start the installer/Q' /opt/pihole/basic-install.sh > /opt/pihole/stub_basic-install.sh && \
|
18
test/_ubuntu_23.Dockerfile
Normal file
18
test/_ubuntu_23.Dockerfile
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
FROM buildpack-deps:lunar-scm
|
||||||
|
|
||||||
|
ENV GITDIR /etc/.pihole
|
||||||
|
ENV SCRIPTDIR /opt/pihole
|
||||||
|
|
||||||
|
RUN mkdir -p $GITDIR $SCRIPTDIR /etc/pihole
|
||||||
|
ADD . $GITDIR
|
||||||
|
RUN cp $GITDIR/advanced/Scripts/*.sh $GITDIR/gravity.sh $GITDIR/pihole $GITDIR/automated\ install/*.sh $SCRIPTDIR/
|
||||||
|
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$SCRIPTDIR
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
RUN true && \
|
||||||
|
chmod +x $SCRIPTDIR/*
|
||||||
|
|
||||||
|
ENV SKIP_INSTALL true
|
||||||
|
ENV OS_CHECK_DOMAIN_NAME dev-supportedos.pi-hole.net
|
||||||
|
|
||||||
|
#sed '/# Start the installer/Q' /opt/pihole/basic-install.sh > /opt/pihole/stub_basic-install.sh && \
|
@ -1,6 +1,6 @@
|
|||||||
docker-compose == 1.29.2
|
docker-compose == 1.29.2
|
||||||
pytest == 7.2.2
|
pytest == 7.3.1
|
||||||
pytest-xdist == 3.2.1
|
pytest-xdist == 3.3.1
|
||||||
pytest-testinfra == 7.0.0
|
pytest-testinfra == 8.1.0
|
||||||
tox == 4.4.7
|
tox == 4.5.1
|
||||||
|
|
||||||
|
8
test/tox.fedora_38.ini
Normal file
8
test/tox.fedora_38.ini
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[tox]
|
||||||
|
envlist = py3
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
allowlist_externals = docker
|
||||||
|
deps = -rrequirements.txt
|
||||||
|
commands = docker buildx build --load --progress plain -f _fedora_38.Dockerfile -t pytest_pihole:test_container ../
|
||||||
|
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
|
8
test/tox.ubuntu_23.ini
Normal file
8
test/tox.ubuntu_23.ini
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[tox]
|
||||||
|
envlist = py3
|
||||||
|
|
||||||
|
[testenv:py3]
|
||||||
|
allowlist_externals = docker
|
||||||
|
deps = -rrequirements.txt
|
||||||
|
commands = docker buildx build --load --progress plain -f _ubuntu_23.Dockerfile -t pytest_pihole:test_container ../
|
||||||
|
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
Loading…
Reference in New Issue
Block a user