From c2080324b75487cb1130dec5f3450f57c3d62ad7 Mon Sep 17 00:00:00 2001 From: MichaIng Date: Fri, 7 Jan 2022 18:55:15 +0100 Subject: [PATCH 01/24] Install netcat-openbsd as dependency explicitly Since Debian Stretch and Ubuntu Bionic, the "netcat" package is a transitional dummy package which pulls in "netcat-traditional" on Debian Stretch+Buster and Ubuntu Bionic, and "netcat-openbsd" on Debian Bullseye, Ubuntu Focal and up. On Debian Bookworm (testing), however, the "netcat" package has been removed during the last 3 days at time or writing, so that it fails do be installed. While "netcat-traditional" and "netcat-openbsd" both "Provides: netcat", since it's two alternatives, APT does not automatically pick one but aborts, and the only solution is to install one explicitly. While this is likely a temporary state of the Debian testing suite, having a closer look at the two alternatives shows that "netcat-openbsd" is a much more actively maintained newer version with additional support for IPv6, proxies, and UNIX sockets, which is likely the reason for the gradual transition via meta package from "netcat-traditional" to "netcat-openbsd". This commit hence consequently follows this aim by skipping the transitional dummy package and installing "netcat-openbsd" explicitly as dependency, to avoid any possible errors like the one which occurs currently on Bookworm. Both packages can be installed concurrently and do no conflict, but are managed via dpkg's "update-alternatives". For reference: - https://packages.debian.org/netcat - https://packages.ubuntu.com/netcat Signed-off-by: MichaIng --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 3780f7b0..6591634e 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -287,7 +287,7 @@ package_manager_detect() { # Packages required to run this install script (stored as an array) INSTALLER_DEPS=(git iproute2 whiptail ca-certificates) # Packages required to run Pi-hole (stored as an array) - PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 sqlite3 libcap2-bin dns-root-data libcap2 netcat) + PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 sqlite3 libcap2-bin dns-root-data libcap2 netcat-openbsd) # Packages required for the Web admin interface (stored as an array) # It's useful to separate this from Pi-hole, since the two repos are also setup separately PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-sqlite3" "${phpVer}-xml" "${phpVer}-intl") From 3097c8fbdc431aed44e83f5e8116f2bc9242ecfd Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 8 Jan 2022 12:57:49 +0000 Subject: [PATCH 02/24] Skip the required ports check if installed in docker container. Unpriv'ed containers do not have access to the information required to resolve the service name listening - and the container should not start if there was a port conflict anyway (#4536) Signed-off-by: Adam Warner --- advanced/Scripts/piholeDebug.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index dc4a2729..77e348c9 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -787,7 +787,9 @@ check_networking() { detect_ip_addresses "6" ping_gateway "4" ping_gateway "6" - check_required_ports + # Skip the following check if installed in docker container. Unpriv'ed containers do not have access to the information required + # to resolve the service name listening - and the container should not start if there was a port conflict anyway + [ -z "${PIHOLE_DOCKER_TAG}" ] && check_required_ports } check_x_headers() { From ce86157067b4de06462f6d220aba03f7b93adaa6 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 8 Jan 2022 14:15:26 +0100 Subject: [PATCH 03/24] Fix gravity in case there are no adlists at all or all are disabled (#4535) Signed-off-by: DL6ER --- gravity.sh | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index a6ab3c86..2f24fbdb 100755 --- a/gravity.sh +++ b/gravity.sh @@ -402,14 +402,12 @@ gravity_DownloadBlocklists() { )" local str="Pulling blocklist source list into range" + echo -e "${OVER} ${TICK} ${str}" - if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then - echo -e "${OVER} ${TICK} ${str}" - else - echo -e "${OVER} ${CROSS} ${str}" + if [[ -z "${sources[*]}" ]] || [[ -z "${sourceDomains[*]}" ]]; then echo -e " ${INFO} No source list found, or it is empty" echo "" - return 1 + unset sources fi local url domain agent cmd_ext str target compression From a65a841c56ecce666499b20ad557c330e1c5b89b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Sun, 9 Jan 2022 07:13:51 +0100 Subject: [PATCH 04/24] Remove oneline from ss call MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 77e348c9..ac4d45e2 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -753,7 +753,7 @@ check_required_ports() { # Sort the addresses and remove duplicates while IFS= read -r line; do ports_in_use+=( "$line" ) - done < <( ss --listening --numeric --tcp --udp --processes --oneline --no-header ) + done < <( ss --listening --numeric --tcp --udp --processes --no-header ) # Now that we have the values stored, for i in "${!ports_in_use[@]}"; do From b20b38d44fcabf685a8cd78dfee181c97e1eb6ce Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 9 Jan 2022 11:31:47 +0100 Subject: [PATCH 05/24] Include ip addr show and ip route show for us to help with local-service issues (where hops-away is measured) Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 77e348c9..51220833 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -779,6 +779,21 @@ check_required_ports() { done } +ip_command() { + # Obtain and log information from "ip XYZ show" commands + echo_current_diagnostic "${2}" + local entries=() + mapfile -t entries < <(ip "${1}" show) + for line in "${entries[@]}"; do + log_write " ${line}" + done +} + +check_ip_command() { + ip_command "addr" "Network interfaces and addresses" + ip_command "route" "Network routing table" +} + check_networking() { # Runs through several of the functions made earlier; we just clump them # together since they are all related to the networking aspect of things @@ -1454,6 +1469,7 @@ check_selinux check_firewalld processor_check disk_usage +check_ip_command check_networking check_name_resolution check_dhcp_servers From ed6b85241bb820644c1c7c922a587fd3a0b3f89d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Schl=C3=B6tterer?= <80917404+lschloetterer@users.noreply.github.com> Date: Wed, 12 Jan 2022 09:23:13 +0100 Subject: [PATCH 06/24] use sed substitute instead of delete and append (#4555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * use sed substitute instead of delete and append doesn't move the line to the end of the file, instead keeps the order of the lines in setupVars.conf intact Signed-off-by: Lukas Schlötterer <80917404+lschloetterer@users.noreply.github.com> * Match start of line as suggested in the review Signed-off-by: Lukas Schlötterer <80917404+lschloetterer@users.noreply.github.com> Co-authored-by: yubiuser Co-authored-by: yubiuser --- pihole | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pihole b/pihole index 8af47dc8..ddb8c707 100755 --- a/pihole +++ b/pihole @@ -223,8 +223,7 @@ Time: fi local str="Pi-hole Disabled" - sed -i "/BLOCKING_ENABLED=/d" "${setupVars}" - echo "BLOCKING_ENABLED=false" >> "${setupVars}" + sed -i "s/^BLOCKING_ENABLED=true/BLOCKING_ENABLED=false/" "${setupVars}" fi else # Enable Pi-hole @@ -236,8 +235,7 @@ Time: echo -e " ${INFO} Enabling blocking" local str="Pi-hole Enabled" - sed -i "/BLOCKING_ENABLED=/d" "${setupVars}" - echo "BLOCKING_ENABLED=true" >> "${setupVars}" + sed -i "s/^BLOCKING_ENABLED=false/BLOCKING_ENABLED=true/" "${setupVars}" fi restartDNS reload-lists From 6ead24b3157ae379e018d7a5e893ebc757b458a8 Mon Sep 17 00:00:00 2001 From: RD WebDesign Date: Fri, 14 Jan 2022 13:00:34 -0300 Subject: [PATCH 07/24] Move space into variable (#4562) Signed-off-by: rdwebdesign --- advanced/Scripts/query.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 0fd9871a..9ddfdc62 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -233,7 +233,7 @@ for result in "${results[@]}"; do adlistAddress="${extra/|*/}" extra="${extra#*|}" if [[ "${extra}" == "0" ]]; then - extra="(disabled)" + extra=" (disabled)" else extra="" fi @@ -241,7 +241,7 @@ for result in "${results[@]}"; do if [[ -n "${blockpage}" ]]; then echo "0 ${adlistAddress}" elif [[ -n "${exact}" ]]; then - echo " - ${adlistAddress} ${extra}" + echo " - ${adlistAddress}${extra}" else if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then count="" @@ -256,7 +256,7 @@ for result in "${results[@]}"; do [[ "${count}" -gt "${max_count}" ]] && continue echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}" else - echo " ${match} ${extra}" + echo " ${match}${extra}" fi fi done From 7aa28e4a3aa27a3cfdb5e495969fb4c9f923af5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Sat, 22 Jan 2022 22:09:15 +0100 Subject: [PATCH 08/24] Do a full fetch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- advanced/Scripts/update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index d18d2e78..9da85c89 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -41,7 +41,7 @@ GitCheckUpdateAvail() { cd "${directory}" || return # Fetch latest changes in this repo - git fetch --tags --quiet origin + git fetch --quiet origin # Check current branch. If it is master, then check for the latest available tag instead of latest commit. curBranch=$(git rev-parse --abbrev-ref HEAD) From bad6d8a59e6617ab0fa1fab4606579324fd64475 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Schl=C3=B6tterer?= <80917404+lschloetterer@users.noreply.github.com> Date: Fri, 28 Jan 2022 16:26:57 +0100 Subject: [PATCH 09/24] add parameter to set filename for teleporter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Make it possible to write pihole -a -t myname.tar.gz to configure the filename however you want Signed-off-by: Lukas Schlötterer <80917404+lschloetterer@users.noreply.github.com> --- advanced/Scripts/webpage.sh | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 4f44eca8..d823a7c1 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -640,12 +640,17 @@ Interfaces: } Teleporter() { - local datetimestamp - local host - datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") - host=$(hostname) - host="${host//./_}" - php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz" + local filename + filename="${args[2]}" + if [[ -z "${filename}" ]]; then + local datetimestamp + local host + datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") + host=$(hostname) + host="${host//./_}" + filename="pi-hole-${host:-noname}-teleporter_${datetimestamp}.tar.gz" + fi + php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "${filename}" } checkDomain() From f0f5cc52d9bd7bb542a9735bdcb489152da05826 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 29 Jan 2022 22:39:45 +0100 Subject: [PATCH 10/24] Use internal SQLite3 engine in more places in gravity.sh Signed-off-by: DL6ER --- gravity.sh | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/gravity.sh b/gravity.sh index 2f24fbdb..ad0ba9a0 100755 --- a/gravity.sh +++ b/gravity.sh @@ -73,9 +73,9 @@ if [[ -r "${piholeDir}/pihole.conf" ]]; then echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}" fi -# Generate new sqlite3 file from schema template +# Generate new SQLite3 file from schema template generate_gravity_database() { - if ! sqlite3 "${gravityDBfile}" < "${gravityDBschema}"; then + if ! pihole-FTL sqlite3 "${gravityDBfile}" < "${gravityDBschema}"; then echo -e " ${CROSS} Unable to create ${gravityDBfile}" return 1 fi @@ -90,7 +90,7 @@ gravity_swap_databases() { echo -ne " ${INFO} ${str}..." # The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once - output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) + output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -136,7 +136,7 @@ gravity_swap_databases() { # Update timestamp when the gravity table was last updated successfully update_gravity_timestamp() { - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -177,7 +177,7 @@ database_table_from_file() { # Get MAX(id) from domainlist when INSERTing into this table if [[ "${table}" == "domainlist" ]]; then - rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" + rowid="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" if [[ -z "$rowid" ]]; then rowid=0 fi @@ -207,7 +207,7 @@ database_table_from_file() { # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -227,7 +227,7 @@ database_table_from_file() { # Update timestamp of last update of this list. We store this in the "old" database as all values in the new database will later be overwritten database_adlist_updated() { - output=$( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nUPDATE adlist SET date_updated = (cast(strftime('%%s', 'now') as int)) WHERE id = %i;\\n" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -238,7 +238,7 @@ database_adlist_updated() { # Check if a column with name ${2} exists in gravity table with name ${1} gravity_column_exists() { - output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) if [[ "${output}" == "1" ]]; then return 0 # Bash 0 is success fi @@ -253,7 +253,7 @@ database_adlist_number() { return; fi - output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_source_lines}" "${num_invalid}" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_source_lines}" "${num_invalid}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -269,7 +269,7 @@ database_adlist_status() { return; fi - output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -386,9 +386,9 @@ gravity_DownloadBlocklists() { fi # Retrieve source URLs from gravity database - # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true) - mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" - mapfile -t sourceIDs <<< "$(sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)" + # We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true) + mapfile -t sources <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" + mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( @@ -417,7 +417,7 @@ gravity_DownloadBlocklists() { str="Preparing new gravity database" echo -ne " ${INFO} ${str}..." rm "${gravityTEMPfile}" > /dev/null 2>&1 - output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) + output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -782,12 +782,12 @@ gravity_Table_Count() { local table="${1}" local str="${2}" local num - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")" + num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")" if [[ "${table}" == "vw_gravity" ]]; then local unique - unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")" + unique="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")" echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})" - sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" + pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" else echo -e " ${INFO} Number of ${str}: ${num}" fi From 534f9a63bf0db0493d929d3737f48712ef99a495 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jan 2022 10:36:20 +0100 Subject: [PATCH 11/24] Copy database tables earlier into the new gravity database to avoid foreign key contraint violations when adding gravity entries refering to an empty adlist table Signed-off-by: DL6ER --- gravity.sh | 47 ++++++++++++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/gravity.sh b/gravity.sh index ad0ba9a0..ac2fca1b 100755 --- a/gravity.sh +++ b/gravity.sh @@ -85,7 +85,7 @@ generate_gravity_database() { # Copy data from old to new database file and swap them gravity_swap_databases() { - local str copyGravity + local str copyGravity oldAvail str="Building tree" echo -ne " ${INFO} ${str}..." @@ -102,22 +102,6 @@ gravity_swap_databases() { str="Swapping databases" echo -ne " ${INFO} ${str}..." - # Gravity copying SQL script - copyGravity="$(cat "${gravityDBcopy}")" - if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then - # Replace default gravity script location by custom location - copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}" - fi - - output=$( { sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}" - return 1 - fi - echo -e "${OVER} ${TICK} ${str}" - # Swap databases and remove or conditionally rename old database # Number of available blocks on disk availableBlocks=$(stat -f --format "%a" "${gravityDIR}") @@ -125,13 +109,19 @@ gravity_swap_databases() { gravityBlocks=$(stat --format "%b" ${gravityDBfile}) # Only keep the old database if available disk space is at least twice the size of the existing gravity.db. # Better be safe than sorry... + oldAvail=false if [ "${availableBlocks}" -gt "$((gravityBlocks * 2))" ] && [ -f "${gravityDBfile}" ]; then - echo -e " ${TICK} The old database remains available." + oldAvail=true mv "${gravityDBfile}" "${gravityOLDfile}" else rm "${gravityDBfile}" fi mv "${gravityTEMPfile}" "${gravityDBfile}" + echo -e "${OVER} ${TICK} ${str}" + + if [ oldAvail ]; then + echo -e " ${TICK} The old database remains available." + fi } # Update timestamp when the gravity table was last updated successfully @@ -475,9 +465,28 @@ gravity_DownloadBlocklists() { echo "" done + str="Creating new gravity databases" + echo -ne " ${INFO} ${str}..." + + # Gravity copying SQL script + copyGravity="$(cat "${gravityDBcopy}")" + if [[ "${gravityDBfile}" != "${gravityDBfile_default}" ]]; then + # Replace default gravity script location by custom location + copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}" + fi + + output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}" + return 1 + fi + echo -e "${OVER} ${TICK} ${str}" + str="Storing downloaded domains in new gravity database" echo -ne " ${INFO} ${str}..." - output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then From 5bb79de70b73662e03772564f5f242b216f85bf5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jan 2022 10:38:24 +0100 Subject: [PATCH 12/24] Clean possible leftovers in domainlist_by_group, adlist_by_group, and client_by_group before copying from database base to avoid foreign key violations. Signed-off-by: DL6ER --- advanced/Templates/gravity_copy.sql | 3 +++ 1 file changed, 3 insertions(+) diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql index 4a2a9b22..a927dd8d 100644 --- a/advanced/Templates/gravity_copy.sql +++ b/advanced/Templates/gravity_copy.sql @@ -12,14 +12,17 @@ INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist; +DELETE FROM domainlist_by_group WHERE domainlist_id NOT IN (SELECT id FROM domainlist); INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group; INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist; +DELETE FROM adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM adlist); INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group; INSERT OR REPLACE INTO info SELECT * FROM OLD.info; INSERT OR REPLACE INTO client SELECT * FROM OLD.client; +DELETE FROM client_by_group WHERE client_id NOT IN (SELECT id FROM client); INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; From 8cbffa179d589cd3b6d5501733d2a634ff83cad1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jan 2022 10:42:13 +0100 Subject: [PATCH 13/24] Replace remaining sqlite3 calls by calls to our embedded pihole-FTL sqlite3 engine and remove sqlite3 as dependency in the installer. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 30 +++++++++---------- advanced/Scripts/list.sh | 20 ++++++------- advanced/Scripts/piholeARPTable.sh | 4 +-- advanced/Scripts/piholeDebug.sh | 10 +++---- advanced/Scripts/piholeLogFlush.sh | 2 +- advanced/Scripts/query.sh | 4 +-- advanced/Scripts/webpage.sh | 12 ++++---- automated install/basic-install.sh | 2 +- gravity.sh | 2 +- 9 files changed, 43 insertions(+), 43 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 09dc1727..a7ba60a9 100755 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -19,13 +19,13 @@ upgrade_gravityDB(){ auditFile="${piholeDir}/auditlog.list" # Get database version - version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" + version="$(pihole-FTL sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then # This migration script upgrades the gravity.db file by # adding the domain_audit table echo -e " ${INFO} Upgrading gravity database from version 1 to 2" - sqlite3 "${database}" < "${scriptPath}/1_to_2.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/1_to_2.sql" version=2 # Store audit domains in database table @@ -40,28 +40,28 @@ upgrade_gravityDB(){ # renaming the regex table to regex_blacklist, and # creating a new regex_whitelist table + corresponding linking table and views echo -e " ${INFO} Upgrading gravity database from version 2 to 3" - sqlite3 "${database}" < "${scriptPath}/2_to_3.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/2_to_3.sql" version=3 fi if [[ "$version" == "3" ]]; then # This migration script unifies the formally separated domain # lists into a single table with a UNIQUE domain constraint echo -e " ${INFO} Upgrading gravity database from version 3 to 4" - sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" version=4 fi if [[ "$version" == "4" ]]; then # This migration script upgrades the gravity and list views # implementing necessary changes for per-client blocking echo -e " ${INFO} Upgrading gravity database from version 4 to 5" - sqlite3 "${database}" < "${scriptPath}/4_to_5.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/4_to_5.sql" version=5 fi if [[ "$version" == "5" ]]; then # This migration script upgrades the adlist view # to return an ID used in gravity.sh echo -e " ${INFO} Upgrading gravity database from version 5 to 6" - sqlite3 "${database}" < "${scriptPath}/5_to_6.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/5_to_6.sql" version=6 fi if [[ "$version" == "6" ]]; then @@ -69,7 +69,7 @@ upgrade_gravityDB(){ # which is automatically associated to all clients not # having their own group assignments echo -e " ${INFO} Upgrading gravity database from version 6 to 7" - sqlite3 "${database}" < "${scriptPath}/6_to_7.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/6_to_7.sql" version=7 fi if [[ "$version" == "7" ]]; then @@ -77,21 +77,21 @@ upgrade_gravityDB(){ # to ensure uniqueness on the group name # We also add date_added and date_modified columns echo -e " ${INFO} Upgrading gravity database from version 7 to 8" - sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" version=8 fi if [[ "$version" == "8" ]]; then # This migration fixes some issues that were introduced # in the previous migration script. echo -e " ${INFO} Upgrading gravity database from version 8 to 9" - sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" version=9 fi if [[ "$version" == "9" ]]; then # This migration drops unused tables and creates triggers to remove # obsolete groups assignments when the linked items are deleted echo -e " ${INFO} Upgrading gravity database from version 9 to 10" - sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" version=10 fi if [[ "$version" == "10" ]]; then @@ -101,31 +101,31 @@ upgrade_gravityDB(){ # to keep the copying process generic (needs the same columns in both the # source and the destination databases). echo -e " ${INFO} Upgrading gravity database from version 10 to 11" - sqlite3 "${database}" < "${scriptPath}/10_to_11.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/10_to_11.sql" version=11 fi if [[ "$version" == "11" ]]; then # Rename group 0 from "Unassociated" to "Default" echo -e " ${INFO} Upgrading gravity database from version 11 to 12" - sqlite3 "${database}" < "${scriptPath}/11_to_12.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/11_to_12.sql" version=12 fi if [[ "$version" == "12" ]]; then # Add column date_updated to adlist table echo -e " ${INFO} Upgrading gravity database from version 12 to 13" - sqlite3 "${database}" < "${scriptPath}/12_to_13.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/12_to_13.sql" version=13 fi if [[ "$version" == "13" ]]; then # Add columns number and status to adlist table echo -e " ${INFO} Upgrading gravity database from version 13 to 14" - sqlite3 "${database}" < "${scriptPath}/13_to_14.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/13_to_14.sql" version=14 fi if [[ "$version" == "14" ]]; then # Changes the vw_adlist created in 5_to_6 echo -e " ${INFO} Upgrading gravity database from version 14 to 15" - sqlite3 "${database}" < "${scriptPath}/14_to_15.sql" + pihole-FTL sqlite3 "${database}" < "${scriptPath}/14_to_15.sql" version=15 fi } diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 8945047e..f3f97da2 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -142,18 +142,18 @@ AddDomain() { domain="$1" # Is the domain in the list we want to add it to? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" + num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" requestedListname="$(GetListnameFromTypeId "${typeId}")" if [[ "${num}" -ne 0 ]]; then - existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" + existingTypeId="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" if [[ "${existingTypeId}" == "${typeId}" ]]; then if [[ "${verbose}" == true ]]; then echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!" fi else existingListname="$(GetListnameFromTypeId "${existingTypeId}")" - sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" + pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" if [[ "${verbose}" == true ]]; then echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!" fi @@ -169,10 +169,10 @@ AddDomain() { # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) if [[ -z "${comment}" ]]; then - sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" + pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" else # also add comment when variable has been set through the "--comment" option - sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');" + pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');" fi } @@ -181,7 +181,7 @@ RemoveDomain() { domain="$1" # Is the domain in the list we want to remove it from? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" + num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" requestedListname="$(GetListnameFromTypeId "${typeId}")" @@ -198,14 +198,14 @@ RemoveDomain() { fi reload=true # Remove it from the current list - sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};" + pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};" } Displaylist() { local count num_pipes domain enabled status nicedate requestedListname requestedListname="$(GetListnameFromTypeId "${typeId}")" - data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" + data="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" if [[ -z $data ]]; then echo -e "Not showing empty list" @@ -243,10 +243,10 @@ Displaylist() { } NukeList() { - count=$(sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};") + count=$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};") listname="$(GetListnameFromTypeId "${typeId}")" if [ "$count" -gt 0 ];then - sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" + pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" echo " ${TICK} Removed ${count} domain(s) from the ${listname}" else echo " ${INFO} ${listname} already empty. Nothing to do!" diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index 66d05bf9..5daa025d 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -39,7 +39,7 @@ flushARP(){ # Truncate network_addresses table in pihole-FTL.db # This needs to be done before we can truncate the network table due to # foreign key constraints - if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then + if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table" echo " Database location: ${DBFILE}" echo " Output: ${output}" @@ -47,7 +47,7 @@ flushARP(){ fi # Truncate network table in pihole-FTL.db - if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then + if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network table" echo " Database location: ${DBFILE}" echo " Output: ${output}" diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 7be4029b..7d3e7acf 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -888,7 +888,7 @@ dig_at() { # This helps emulate queries to different domains that a user might query # It will also give extra assurance that Pi-hole is correctly resolving and blocking domains local random_url - random_url=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1") + random_url=$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1") # Next we need to check if Pi-hole can resolve a domain when the query is sent to it's IP address # This better emulates how clients will interact with Pi-hole as opposed to above where Pi-hole is @@ -1202,7 +1202,7 @@ show_db_entries() { IFS=$'\r\n' local entries=() mapfile -t entries < <(\ - sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \ + pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \ -cmd ".headers on" \ -cmd ".mode column" \ -cmd ".width ${widths}" \ @@ -1227,7 +1227,7 @@ show_FTL_db_entries() { IFS=$'\r\n' local entries=() mapfile -t entries < <(\ - sqlite3 "${PIHOLE_FTL_DB_FILE}" \ + pihole-FTL sqlite3 "${PIHOLE_FTL_DB_FILE}" \ -cmd ".headers on" \ -cmd ".mode column" \ -cmd ".width ${widths}" \ @@ -1284,7 +1284,7 @@ analyze_gravity_list() { log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" show_db_entries "Info table" "SELECT property,value FROM info" "20 40" - gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated_raw="$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" gravity_updated="$(date -d @"${gravity_updated_raw}")" log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" @@ -1292,7 +1292,7 @@ analyze_gravity_list() { OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() - mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") + mapfile -t gravity_sample < <(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh index 5c6a2c68..7547a5fd 100755 --- a/advanced/Scripts/piholeLogFlush.sh +++ b/advanced/Scripts/piholeLogFlush.sh @@ -63,7 +63,7 @@ else fi fi # Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history) - deleted=$(sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1") + deleted=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1") # Restart pihole-FTL to force reloading history sudo pihole restartdns diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 9ddfdc62..20c891bf 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -121,7 +121,7 @@ scanDatabaseTable() { fi # Send prepared query to gravity database - result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null + result="$(pihole-FTL sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null if [[ -z "${result}" ]]; then # Return early when there are no matches in this table return @@ -164,7 +164,7 @@ scanRegexDatabaseTable() { type="${3:-}" # Query all regex from the corresponding database tables - mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null) + mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 4f44eca8..0b9fb62b 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -523,13 +523,13 @@ CustomizeAdLists() { if CheckUrl "${address}"; then if [[ "${args[2]}" == "enable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" + pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" elif [[ "${args[2]}" == "disable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" + pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" elif [[ "${args[2]}" == "add" ]]; then - sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" + pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" elif [[ "${args[2]}" == "del" ]]; then - sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" else echo "Not permitted" return 1 @@ -681,12 +681,12 @@ addAudit() done # Insert only the domain here. The date_added field will be # filled with its default value (date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" + pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" } clearAudit() { - sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;" + pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;" } SetPrivacyLevel() { diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 6591634e..a5c20692 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -287,7 +287,7 @@ package_manager_detect() { # Packages required to run this install script (stored as an array) INSTALLER_DEPS=(git iproute2 whiptail ca-certificates) # Packages required to run Pi-hole (stored as an array) - PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 sqlite3 libcap2-bin dns-root-data libcap2 netcat-openbsd) + PIHOLE_DEPS=(cron curl iputils-ping psmisc sudo unzip idn2 libcap2-bin dns-root-data libcap2 netcat-openbsd) # Packages required for the Web admin interface (stored as an array) # It's useful to separate this from Pi-hole, since the two repos are also setup separately PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-sqlite3" "${phpVer}-xml" "${phpVer}-intl") diff --git a/gravity.sh b/gravity.sh index ac2fca1b..9c11fa98 100755 --- a/gravity.sh +++ b/gravity.sh @@ -119,7 +119,7 @@ gravity_swap_databases() { mv "${gravityTEMPfile}" "${gravityDBfile}" echo -e "${OVER} ${TICK} ${str}" - if [ oldAvail ]; then + if $oldAvail; then echo -e " ${TICK} The old database remains available." fi } From 1dd9d55d82501a7b05fc86eb621a2fc9a610c8b8 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 30 Jan 2022 15:53:03 +0000 Subject: [PATCH 14/24] Replace value for BLOCKING_ENABLED (and QUERY_LOGGING, for consistency) and if value that we are trying to replace does not exist, add it to the end of the file. Co-authored-by: MichaIng Signed-off-by: Adam Warner --- pihole | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/pihole b/pihole index ddb8c707..610c9f31 100755 --- a/pihole +++ b/pihole @@ -170,6 +170,16 @@ restartDNS() { fi } +addOrEditKeyValPair() { + local key="${1}" + local value="${2}" + if grep -q "^${key}=" "${setupVars}"; then + sed -i "/^${key}=/c\\${key}=${value}" "${setupVars}" + else + echo "${key}=${value}" >> "${setupVars}" + fi +} + piholeEnable() { if [[ "${2}" == "-h" ]] || [[ "${2}" == "--help" ]]; then echo "Usage: pihole disable [time] @@ -223,7 +233,7 @@ Time: fi local str="Pi-hole Disabled" - sed -i "s/^BLOCKING_ENABLED=true/BLOCKING_ENABLED=false/" "${setupVars}" + addOrEditKeyValPair "BLOCKING_ENABLED" "false" fi else # Enable Pi-hole @@ -235,7 +245,7 @@ Time: echo -e " ${INFO} Enabling blocking" local str="Pi-hole Enabled" - sed -i "s/^BLOCKING_ENABLED=false/BLOCKING_ENABLED=true/" "${setupVars}" + addOrEditKeyValPair "BLOCKING_ENABLED" "true" fi restartDNS reload-lists @@ -258,7 +268,7 @@ Options: elif [[ "${1}" == "off" ]]; then # Disable logging sed -i 's/^log-queries/#log-queries/' /etc/dnsmasq.d/01-pihole.conf - sed -i 's/^QUERY_LOGGING=true/QUERY_LOGGING=false/' /etc/pihole/setupVars.conf + addOrEditKeyValPair "QUERY_LOGGING" "false" if [[ "${2}" != "noflush" ]]; then # Flush logs "${PI_HOLE_BIN_DIR}"/pihole -f @@ -268,7 +278,7 @@ Options: elif [[ "${1}" == "on" ]]; then # Enable logging sed -i 's/^#log-queries/log-queries/' /etc/dnsmasq.d/01-pihole.conf - sed -i 's/^QUERY_LOGGING=false/QUERY_LOGGING=true/' /etc/pihole/setupVars.conf + addOrEditKeyValPair "QUERY_LOGGING" "true" echo -e " ${INFO} Enabling logging..." local str="Logging has been enabled!" else From 74d7d10554dbd96454ffbc5ce960d608b1a4a034 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jan 2022 21:09:24 +0100 Subject: [PATCH 15/24] Orphans need to be deleted in the old database Signed-off-by: DL6ER --- advanced/Templates/gravity_copy.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql index a927dd8d..3bea731d 100644 --- a/advanced/Templates/gravity_copy.sql +++ b/advanced/Templates/gravity_copy.sql @@ -12,17 +12,17 @@ INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist; -DELETE FROM domainlist_by_group WHERE domainlist_id NOT IN (SELECT id FROM domainlist); +DELETE FROM OLD.domainlist_by_group WHERE domainlist_id NOT IN (SELECT id FROM OLD.domainlist); INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group; INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist; -DELETE FROM adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM adlist); +DELETE FROM OLD.adlist_by_group WHERE adlist_id NOT IN (SELECT id FROM OLD.adlist); INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group; INSERT OR REPLACE INTO info SELECT * FROM OLD.info; INSERT OR REPLACE INTO client SELECT * FROM OLD.client; -DELETE FROM client_by_group WHERE client_id NOT IN (SELECT id FROM client); +DELETE FROM OLD.client_by_group WHERE client_id NOT IN (SELECT id FROM OLD.client); INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; From 77e5121d438f7895ae6c512222802a0307c63ebb Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 30 Jan 2022 23:05:28 +0000 Subject: [PATCH 16/24] Split new function out into a separte utility script and add a test for it. Can be used in future to organise re/commonly-used code Signed-off-by: Adam Warner --- advanced/Scripts/utils.sh | 35 +++++++++++++++++++ pihole | 21 ++++------- ...stall.py => test_any_automated_install.py} | 0 test/test_any_utils.py | 16 +++++++++ test/tox.centos_7.ini | 2 +- test/tox.centos_8.ini | 2 +- test/tox.debian_10.ini | 2 +- test/tox.debian_11.ini | 2 +- test/tox.debian_9.ini | 2 +- test/tox.fedora_33.ini | 2 +- test/tox.fedora_34.ini | 2 +- test/tox.ubuntu_16.ini | 2 +- test/tox.ubuntu_18.ini | 2 +- test/tox.ubuntu_20.ini | 2 +- test/tox.ubuntu_21.ini | 2 +- 15 files changed, 69 insertions(+), 25 deletions(-) create mode 100755 advanced/Scripts/utils.sh rename test/{test_automated_install.py => test_any_automated_install.py} (100%) create mode 100644 test/test_any_utils.py diff --git a/advanced/Scripts/utils.sh b/advanced/Scripts/utils.sh new file mode 100755 index 00000000..887816cc --- /dev/null +++ b/advanced/Scripts/utils.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# Pi-hole: A black hole for Internet advertisements +# (c) 2017 Pi-hole, LLC (https://pi-hole.net) +# Network-wide ad blocking via your own hardware. +# +# Script to hold utility functions for use in other scripts +# +# This file is copyright under the latest version of the EUPL. +# Please see LICENSE file for your rights under this license. + +# Basic Housekeeping rules +# - Functions must be self contained +# - Functions must be added in alphabetical order +# - Functions must be documented +# - New functions must have a test added for them in test/test_any_utils.py + +####################### +# Takes three arguments key, value, and file. +# Checks the target file for the existence of the key +# - If it exists, it changes the value +# - If it does not exist, it adds the value +# +# Example usage: +# addOrEditKeyValuePair "BLOCKING_ENABLED" "true" "/etc/pihole/setupVars.conf" +####################### +addOrEditKeyValPair() { + local key="${1}" + local value="${2}" + local file="${3}" + if grep -q "^${key}=" "${file}"; then + sed -i "/^${key}=/c\\${key}=${value}" "${file}" + else + echo "${key}=${value}" >> "${file}" + fi +} diff --git a/pihole b/pihole index 610c9f31..56d47eca 100755 --- a/pihole +++ b/pihole @@ -21,6 +21,9 @@ readonly FTL_PID_FILE="/run/pihole-FTL.pid" readonly colfile="${PI_HOLE_SCRIPT_DIR}/COL_TABLE" source "${colfile}" +readonly utilsfile="${PI_HOLE_SCRIPT_DIR}/utils.sh" +source "${utilsfile}" + webpageFunc() { source "${PI_HOLE_SCRIPT_DIR}/webpage.sh" main "$@" @@ -170,16 +173,6 @@ restartDNS() { fi } -addOrEditKeyValPair() { - local key="${1}" - local value="${2}" - if grep -q "^${key}=" "${setupVars}"; then - sed -i "/^${key}=/c\\${key}=${value}" "${setupVars}" - else - echo "${key}=${value}" >> "${setupVars}" - fi -} - piholeEnable() { if [[ "${2}" == "-h" ]] || [[ "${2}" == "--help" ]]; then echo "Usage: pihole disable [time] @@ -233,7 +226,7 @@ Time: fi local str="Pi-hole Disabled" - addOrEditKeyValPair "BLOCKING_ENABLED" "false" + addOrEditKeyValPair "BLOCKING_ENABLED" "false" "${setupVars}" fi else # Enable Pi-hole @@ -245,7 +238,7 @@ Time: echo -e " ${INFO} Enabling blocking" local str="Pi-hole Enabled" - addOrEditKeyValPair "BLOCKING_ENABLED" "true" + addOrEditKeyValPair "BLOCKING_ENABLED" "true" "${setupVars}" fi restartDNS reload-lists @@ -268,7 +261,7 @@ Options: elif [[ "${1}" == "off" ]]; then # Disable logging sed -i 's/^log-queries/#log-queries/' /etc/dnsmasq.d/01-pihole.conf - addOrEditKeyValPair "QUERY_LOGGING" "false" + addOrEditKeyValPair "QUERY_LOGGING" "false" "${setupVars}" if [[ "${2}" != "noflush" ]]; then # Flush logs "${PI_HOLE_BIN_DIR}"/pihole -f @@ -278,7 +271,7 @@ Options: elif [[ "${1}" == "on" ]]; then # Enable logging sed -i 's/^#log-queries/log-queries/' /etc/dnsmasq.d/01-pihole.conf - addOrEditKeyValPair "QUERY_LOGGING" "true" + addOrEditKeyValPair "QUERY_LOGGING" "true" "${setupVars}" echo -e " ${INFO} Enabling logging..." local str="Logging has been enabled!" else diff --git a/test/test_automated_install.py b/test/test_any_automated_install.py similarity index 100% rename from test/test_automated_install.py rename to test/test_any_automated_install.py diff --git a/test/test_any_utils.py b/test/test_any_utils.py new file mode 100644 index 00000000..ba9b2d23 --- /dev/null +++ b/test/test_any_utils.py @@ -0,0 +1,16 @@ +def test_key_val_replacement_works(host): + ''' Confirms addOrEditKeyValPair provides the expected output ''' + host.run(''' + setupvars=./testoutput + source /opt/pihole/utils.sh + addOrEditKeyValPair "KEY_ONE" "value1" "./testoutput" + addOrEditKeyValPair "KEY_TWO" "value2" "./testoutput" + addOrEditKeyValPair "KEY_ONE" "value3" "./testoutput" + addOrEditKeyValPair "KEY_FOUR" "value4" "./testoutput" + cat ./testoutput + ''') + output = host.run(''' + cat ./testoutput + ''') + expected_stdout = 'KEY_ONE=value3\nKEY_TWO=value2\nKEY_FOUR=value4\n' + assert expected_stdout == output.stdout diff --git a/test/tox.centos_7.ini b/test/tox.centos_7.ini index 88940fdd..319465dd 100644 --- a/test/tox.centos_7.ini +++ b/test/tox.centos_7.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _centos_7.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_7_support.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_7_support.py diff --git a/test/tox.centos_8.ini b/test/tox.centos_8.ini index 5088da16..c7926289 100644 --- a/test/tox.centos_8.ini +++ b/test/tox.centos_8.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _centos_8.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_8_support.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py ./test_centos_8_support.py diff --git a/test/tox.debian_10.ini b/test/tox.debian_10.ini index 9c2a05d1..3b182cdc 100644 --- a/test/tox.debian_10.ini +++ b/test/tox.debian_10.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _debian_10.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.debian_11.ini b/test/tox.debian_11.ini index f3cdbe84..c7e41a91 100644 --- a/test/tox.debian_11.ini +++ b/test/tox.debian_11.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _debian_11.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.debian_9.ini b/test/tox.debian_9.ini index b46e0a49..56b9d37f 100644 --- a/test/tox.debian_9.ini +++ b/test/tox.debian_9.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _debian_9.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.fedora_33.ini b/test/tox.fedora_33.ini index d33fbf53..b17bd563 100644 --- a/test/tox.fedora_33.ini +++ b/test/tox.fedora_33.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _fedora_33.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_fedora_support.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py diff --git a/test/tox.fedora_34.ini b/test/tox.fedora_34.ini index 819291fa..26856984 100644 --- a/test/tox.fedora_34.ini +++ b/test/tox.fedora_34.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _fedora_34.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py ./test_centos_fedora_common_support.py ./test_fedora_support.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py diff --git a/test/tox.ubuntu_16.ini b/test/tox.ubuntu_16.ini index bce948a2..f8f6e92a 100644 --- a/test/tox.ubuntu_16.ini +++ b/test/tox.ubuntu_16.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _ubuntu_16.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.ubuntu_18.ini b/test/tox.ubuntu_18.ini index cf7a3642..a2513dfd 100644 --- a/test/tox.ubuntu_18.ini +++ b/test/tox.ubuntu_18.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _ubuntu_18.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.ubuntu_20.ini b/test/tox.ubuntu_20.ini index 03b605ce..fb3d20d7 100644 --- a/test/tox.ubuntu_20.ini +++ b/test/tox.ubuntu_20.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _ubuntu_20.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py diff --git a/test/tox.ubuntu_21.ini b/test/tox.ubuntu_21.ini index 12b1ac0b..070d3a72 100644 --- a/test/tox.ubuntu_21.ini +++ b/test/tox.ubuntu_21.ini @@ -5,4 +5,4 @@ envlist = py38 whitelist_externals = docker deps = -rrequirements.txt commands = docker build -f _ubuntu_21.Dockerfile -t pytest_pihole:test_container ../ - pytest {posargs:-vv -n auto} ./test_automated_install.py + pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py From e09dd56807c45ab56e5285e1eea936c799052f25 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 1 Feb 2022 07:38:57 +0100 Subject: [PATCH 17/24] Remove RPM package sqlite as well Signed-off-by: DL6ER --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index a5c20692..e3dec82d 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -332,7 +332,7 @@ package_manager_detect() { PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" OS_CHECK_DEPS=(grep bind-utils) INSTALLER_DEPS=(git iproute newt procps-ng which chkconfig ca-certificates) - PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc sqlite libcap nmap-ncat) + PIHOLE_DEPS=(cronie curl findutils sudo unzip libidn2 psmisc libcap nmap-ncat) PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json php-intl) LIGHTTPD_USER="lighttpd" LIGHTTPD_GROUP="lighttpd" From 881d92632ce8ee346ad5e1224879190eeb8c6836 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Schl=C3=B6tterer?= <80917404+lschloetterer@users.noreply.github.com> Date: Tue, 1 Feb 2022 09:41:57 +0100 Subject: [PATCH 18/24] add hint for custom teleporter filename to help function MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Lukas Schlötterer <80917404+lschloetterer@users.noreply.github.com> --- advanced/Scripts/webpage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index d823a7c1..dad5380e 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -45,7 +45,7 @@ Options: -h, --help Show this help dialog -i, interface Specify dnsmasq's interface listening behavior -l, privacylevel Set privacy level (0 = lowest, 3 = highest) - -t, teleporter Backup configuration as an archive" + -t, teleporter Backup configuration as an archive. Optionally specify a custom filename" exit 0 } From 444526ad582818b03263dabc8a01c6fbee5018a4 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Thu, 3 Feb 2022 18:43:19 +0000 Subject: [PATCH 19/24] Switch from centos8 to centos8:stream base image for centos 8 tests --- test/_centos_8.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/_centos_8.Dockerfile b/test/_centos_8.Dockerfile index fddb3ed1..86e5a778 100644 --- a/test/_centos_8.Dockerfile +++ b/test/_centos_8.Dockerfile @@ -1,4 +1,4 @@ -FROM centos:8 +FROM quay.io/centos/centos:stream8 RUN yum install -y git ENV GITDIR /etc/.pihole From 7c60ee8df11d158cca3bde3b0b89a6f05bb9f409 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Wed, 29 Dec 2021 06:52:17 +0100 Subject: [PATCH 20/24] Remove pihole-FTL.conf man page MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König Remove double https:// Signed-off-by: Christian König --- automated install/basic-install.sh | 9 +- manpages/pihole-FTL.8 | 4 +- manpages/pihole-FTL.conf.5 | 313 ----------------------------- 3 files changed, 10 insertions(+), 316 deletions(-) delete mode 100644 manpages/pihole-FTL.conf.5 diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index e3dec82d..6bf55e92 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1371,7 +1371,12 @@ install_manpage() { # Testing complete, copy the files & update the man db install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 - install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5 + + # remvoe previously installed "pihole-FTL.conf" man page + if [[ -f "/usr/local/share/man/man5/pihole-FTL.conf.5" ]]; then + rm /usr/local/share/man/man5/pihole-FTL.conf.5 + fi + if mandb -q &>/dev/null; then # Updated successfully printf "%b %b man pages installed and database updated\\n" "${OVER}" "${TICK}" @@ -1379,7 +1384,7 @@ install_manpage() { else # Something is wrong with the system's man installation, clean up # our files, (leave everything how we found it). - rm /usr/local/share/man/man8/pihole.8 /usr/local/share/man/man8/pihole-FTL.8 /usr/local/share/man/man5/pihole-FTL.conf.5 + rm /usr/local/share/man/man8/pihole.8 /usr/local/share/man/man8/pihole-FTL.8 printf "%b %b man page db not updated, man pages not installed\\n" "${OVER}" "${CROSS}" fi } diff --git a/manpages/pihole-FTL.8 b/manpages/pihole-FTL.8 index c7b69d83..c1b7550f 100644 --- a/manpages/pihole-FTL.8 +++ b/manpages/pihole-FTL.8 @@ -144,7 +144,9 @@ Command line arguments can be arbitrarily combined, e.g: Start ftl in foreground with more verbose logging, process everything and shutdown immediately .br .SH "SEE ALSO" -\fBpihole\fR(8), \fBpihole-FTL.conf\fR(5) +\fBpihole\fR(8) +.br +\fBFor FTL's config options please see https://docs.pi-hole.net/ftldns/configfile/\fR .br .SH "COLOPHON" diff --git a/manpages/pihole-FTL.conf.5 b/manpages/pihole-FTL.conf.5 deleted file mode 100644 index 42405436..00000000 --- a/manpages/pihole-FTL.conf.5 +++ /dev/null @@ -1,313 +0,0 @@ -.TH "pihole-FTL.conf" "5" "pihole-FTL.conf" "pihole-FTL.conf" "November 2020" -.SH "NAME" - -pihole-FTL.conf - FTL's config file -.br -.SH "DESCRIPTION" - -/etc/pihole/pihole-FTL.conf will be read by \fBpihole-FTL(8)\fR on startup. -.br -For each setting the option shown first is the default. -.br - -\fBBLOCKINGMODE=IP|IP-AAAA-NODATA|NODATA|NXDOMAIN|NULL\fR -.br - How should FTL reply to blocked queries? - - IP - Pi-hole's IPs for blocked domains - - IP-AAAA-NODATA - Pi-hole's IP + NODATA-IPv6 for blocked domains - - NODATA - Using NODATA for blocked domains - - NXDOMAIN - NXDOMAIN for blocked domains - - NULL - Null IPs for blocked domains -.br - -\fBCNAME_DEEP_INSPECT=true|false\fR -.br - Use this option to disable deep CNAME inspection. This might be beneficial for very low-end devices. -.br - -\fBBLOCK_ESNI=true|false\fR -.br - Block requests to _esni.* sub-domains. -.br - -\fBMAXLOGAGE=24.0\fR -.br - Up to how many hours of queries should be imported from the database and logs? -.br - Maximum is 744 (31 days) -.br - -\fBPRIVACYLEVEL=0|1|2|3|4\fR -.br - Privacy level used to collect Pi-hole statistics. -.br - 0 - show everything -.br - 1 - hide domains -.br - 2 - hide domains and clients -.br - 3 - anonymous mode (hide everything) -.br - 4 - disable all statistics -.br - -\fBIGNORE_LOCALHOST=no|yes\fR -.br - Should FTL ignore queries coming from the local machine? -.br - -\fBAAAA_QUERY_ANALYSIS=yes|no\fR -.br - Should FTL analyze AAAA queries? -.br - -\fBANALYZE_ONLY_A_AND_AAAA=false|true\fR -.br - Should FTL only analyze A and AAAA queries? -.br - -\fBSOCKET_LISTENING=localonly|all\fR -.br - Listen only for local socket connections on the API port or permit all connections. -.br - -\fBFTLPORT=4711\fR -.br - On which port should FTL be listening? -.br - -\fBRESOLVE_IPV6=yes|no\fR -.br - Should FTL try to resolve IPv6 addresses to hostnames? -.br - -\fBRESOLVE_IPV4=yes|no\fR -.br - Should FTL try to resolve IPv4 addresses to hostnames? -.br - -\fBDELAY_STARTUP=0\fR -.br - Time in seconds (between 0 and 300) to delay FTL startup. -.br - -\fBNICE=-10\fR -.br - Set the niceness of the Pi-hole FTL process. -.br - Can be disabled altogether by setting a value of -999. -.br - -\fBNAMES_FROM_NETDB=true|false\fR -.br - Control whether FTL should use a fallback option and try to obtain client names from checking the network table. -.br - E.g. IPv6 clients without a hostname will be compared via MAC address to known clients. -.br - -\fB\fBREFRESH_HOSTNAMES=IPV4|ALL|NONE\fR -.br - Change how (and if) hourly PTR requests are made to check for changes in client and upstream server hostnames: -.br - IPV4 - Do the hourly PTR lookups only for IPv4 addresses resolving issues in networks with many short-lived PE IPv6 addresses. -.br - ALL - Do the hourly PTR lookups for all addresses. This can create a lot of PTR queries in networks with many IPv6 addresses. -.br - NONE - Don't do hourly PTR lookups. Look up hostnames once (when first seeing a client) and never again. Future hostname changes may be missed. -.br - -\fBMAXNETAGE=365\fR -.br - IP addresses (and associated host names) older than the specified number of days are removed. -.br - This avoids dead entries in the network overview table. -.br - -\fBEDNS0_ECS=true|false\fR -.br - Should we overwrite the query source when client information is provided through EDNS0 client subnet (ECS) information? -.br - -\fBPARSE_ARP_CACHE=true|false\fR -.br - Parse ARP cache to fill network overview table. -.br - -\fBDBIMPORT=yes|no\fR -.br - Should FTL load information from the database on startup to be aware of the most recent history? -.br - -\fBMAXDBDAYS=365\fR -.br - How long should queries be stored in the database? Setting this to 0 disables the database -.br - -\fBDBINTERVAL=1.0\fR -.br - How often do we store queries in FTL's database [minutes]? -.br - Accepts value between 0.1 (6 sec) and 1440 (1 day) -.br - -\fBDBFILE=/etc/pihole/pihole-FTL.db\fR -.br - Specify path and filename of FTL's SQLite long-term database. -.br - Setting this to DBFILE= disables the database altogether -.br - -\fBLOGFILE=/var/log/pihole-FTL.log\fR -.br - The location of FTL's log file. -.br - -\fBPIDFILE=/run/pihole-FTL.pid\fR -.br - The file which contains the PID of FTL's main process. -.br - -\fBPORTFILE=/run/pihole-FTL.port\fR -.br - Specify path and filename where the FTL process will write its API port number. -.br - -\fBSOCKETFILE=/run/pihole/FTL.sock\fR -.br - The file containing the socket FTL's API is listening on. -.br - -\fBSETUPVARSFILE=/etc/pihole/setupVars.conf\fR -.br - The config file of Pi-hole containing, e.g., the current blocking status (do not change). -.br - -\fBMACVENDORDB=/etc/pihole/macvendor.db\fR -.br - The database containing MAC -> Vendor information for the network table. -.br - -\fBGRAVITYDB=/etc/pihole/gravity.db\fR -.br - Specify path and filename of FTL's SQLite3 gravity database. This database contains all domains relevant for Pi-hole's DNS blocking. -.br - -\fBDEBUG_ALL=false|true\fR -.br - Enable all debug flags. If this is set to true, all other debug config options are ignored. -.br - -\fBDEBUG_DATABASE=false|true\fR -.br - Print debugging information about database actions such as SQL statements and performance. -.br - -\fBDEBUG_NETWORKING=false|true\fR -.br - Prints a list of the detected network interfaces on the startup of FTL. -.br - -\fBDEBUG_LOCKS=false|true\fR -.br - Print information about shared memory locks. -.br - Messages will be generated when waiting, obtaining, and releasing a lock. -.br - -\fBDEBUG_QUERIES=false|true\fR -.br - Print extensive DNS query information (domains, types, replies, etc.). -.br - -\fBDEBUG_FLAGS=false|true\fR -.br - Print flags of queries received by the DNS hooks. -.br - Only effective when \fBDEBUG_QUERIES\fR is enabled as well. - -\fBDEBUG_SHMEM=false|true\fR -.br - Print information about shared memory buffers. -.br - Messages are either about creating or enlarging shmem objects or string injections. -.br - -\fBDEBUG_GC=false|true\fR -.br - Print information about garbage collection (GC): -.br - What is to be removed, how many have been removed and how long did GC take. -.br - -\fBDEBUG_ARP=false|true\fR -.br - Print information about ARP table processing: -.br - How long did parsing take, whether read MAC addresses are valid, and if the macvendor.db file exists. -.br - -\fBDEBUG_REGEX=false|true\fR -.br - Controls if FTL should print extended details about regex matching. -.br - -\fBDEBUG_API=false|true\fR -.br - Print extra debugging information during telnet API calls. -.br - Currently only used to send extra information when getting all queries. -.br - -\fBDEBUG_OVERTIME=false|true\fR -.br - Print information about overTime memory operations, such as initializing or moving overTime slots. -.br - -\fBDEBUG_EXTBLOCKED=false|true\fR -.br - Print information about why FTL decided that certain queries were recognized as being externally blocked. -.br - -\fBDEBUG_CAPS=false|true\fR -.br - Print information about POSIX capabilities granted to the FTL process. -.br - The current capabilities are printed on receipt of SIGHUP i.e. after executing `killall -HUP pihole-FTL`. -.br - -\fBDEBUG_DNSMASQ_LINES=false|true\fR -.br - Print file and line causing a dnsmasq event into FTL's log files. -.br - This is handy to implement additional hooks missing from FTL. -.br - -\fBDEBUG_VECTORS=false|true\fR -.br - FTL uses dynamically allocated vectors for various tasks. -.br - This config option enables extensive debugging information such as information about allocation, referencing, deletion, and appending. -.br - -\fBDEBUG_RESOLVER=false|true\fR -.br - Extensive information about hostname resolution like which DNS servers are used in the first and second hostname resolving tries. -.br - -.SH "SEE ALSO" - -\fBpihole\fR(8), \fBpihole-FTL\fR(8) -.br -.SH "COLOPHON" - -Pi-hole : The Faster-Than-Light (FTL) Engine is a lightweight, purpose-built daemon used to provide statistics needed for the Pi-hole Web Interface, and its API can be easily integrated into your own projects. Although it is an optional component of the Pi-hole ecosystem, it will be installed by default to provide statistics. As the name implies, FTL does its work \fIvery quickly\fR! -.br - -Get sucked into the latest news and community activity by entering Pi-hole's orbit. Information about Pi-hole, and the latest version of the software can be found at https://pi-hole.net -.br From d7d8e9730b385342a79bbac368243f98cade359f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Thu, 30 Dec 2021 06:49:03 +0100 Subject: [PATCH 21/24] Remove pihole-FTL.conf.5 from automated tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- test/test_any_automated_install.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/test_any_automated_install.py b/test/test_any_automated_install.py index 7959e100..b7b4ccd8 100644 --- a/test/test_any_automated_install.py +++ b/test/test_any_automated_install.py @@ -351,10 +351,6 @@ def test_installPihole_fresh_install_readableFiles(host): 'r', '/usr/local/share/man/man8/pihole-FTL.8', piholeuser) actual_rc = host.run(check_man).rc assert exit_status_success == actual_rc - check_man = test_cmd.format( - 'r', '/usr/local/share/man/man5/pihole-FTL.conf.5', piholeuser) - actual_rc = host.run(check_man).rc - assert exit_status_success == actual_rc # check not readable sudoers file check_sudo = test_cmd.format( 'r', '/etc/sudoers.d/pihole', piholeuser) From c3c5342b48b226f482bc0d44ca8b5bed13b60a08 Mon Sep 17 00:00:00 2001 From: yubiuser Date: Fri, 4 Feb 2022 21:11:54 +0100 Subject: [PATCH 22/24] Fix reviewer's comment Co-authored-by: DL6ER --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 6bf55e92..1e004b8b 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1372,7 +1372,7 @@ install_manpage() { install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 - # remvoe previously installed "pihole-FTL.conf" man page + # remove previously installed "pihole-FTL.conf.5" man page if [[ -f "/usr/local/share/man/man5/pihole-FTL.conf.5" ]]; then rm /usr/local/share/man/man5/pihole-FTL.conf.5 fi From 2a0bb5b9ee12d33f35f39c035ef931f48d6370cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Schl=C3=B6tterer?= <80917404+lschloetterer@users.noreply.github.com> Date: Fri, 4 Feb 2022 21:19:09 +0100 Subject: [PATCH 23/24] Create second entry for teleporter and adjust spacing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Lukas Schlötterer <80917404+lschloetterer@users.noreply.github.com> --- advanced/Scripts/webpage.sh | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index dad5380e..c80934d9 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -37,15 +37,16 @@ Example: pihole -a -p password Set options for the Admin Console Options: - -p, password Set Admin Console password - -c, celsius Set Celsius as preferred temperature unit - -f, fahrenheit Set Fahrenheit as preferred temperature unit - -k, kelvin Set Kelvin as preferred temperature unit - -e, email Set an administrative contact address for the Block Page - -h, --help Show this help dialog - -i, interface Specify dnsmasq's interface listening behavior - -l, privacylevel Set privacy level (0 = lowest, 3 = highest) - -t, teleporter Backup configuration as an archive. Optionally specify a custom filename" + -p, password Set Admin Console password + -c, celsius Set Celsius as preferred temperature unit + -f, fahrenheit Set Fahrenheit as preferred temperature unit + -k, kelvin Set Kelvin as preferred temperature unit + -e, email Set an administrative contact address for the Block Page + -h, --help Show this help dialog + -i, interface Specify dnsmasq's interface listening behavior + -l, privacylevel Set privacy level (0 = lowest, 3 = highest) + -t, teleporter Backup configuration as an archive + -t, teleporter myname.tar.gz Backup configuration to archive with name myname.tar.gz as specified" exit 0 } From 08999bf315daf7f2d65bca06e205e9147f6375be Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Fri, 4 Feb 2022 21:16:02 +0000 Subject: [PATCH 24/24] Use case insensitive deletion when removing custom CNAME/DNS records in case of manual entries with mixed case having been added Signed-off-by: Adam Warner --- advanced/Scripts/webpage.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 23b4f402..aa4795dd 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -739,7 +739,7 @@ RemoveCustomDNSAddress() { validHost="$(checkDomain "${host}")" if [[ -n "${validHost}" ]]; then if valid_ip "${ip}" || valid_ip6 "${ip}" ; then - sed -i "/^${ip} ${validHost}$/d" "${dnscustomfile}" + sed -i "/^${ip} ${validHost}$/Id" "${dnscustomfile}" else echo -e " ${CROSS} Invalid IP has been passed" exit 1 @@ -792,7 +792,7 @@ RemoveCustomCNAMERecord() { if [[ -n "${validDomain}" ]]; then validTarget="$(checkDomain "${target}")" if [[ -n "${validTarget}" ]]; then - sed -i "/cname=${validDomain},${validTarget}$/d" "${dnscustomcnamefile}" + sed -i "/cname=${validDomain},${validTarget}$/Id" "${dnscustomcnamefile}" else echo " ${CROSS} Invalid Target Passed!" exit 1