From 0de5132e2b2afef6e6fa1c573a9060c79d73d940 Mon Sep 17 00:00:00 2001 From: xch12i5 <40517505+xCh12i5@users.noreply.github.com> Date: Fri, 18 Jan 2019 22:14:34 +0100 Subject: [PATCH 001/366] Solves RegEx issue. Signed-off-by: xch12i5 <40517505+xCh12i5@users.noreply.github.com> --- gravity.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 75a51608..3247a8e2 100755 --- a/gravity.sh +++ b/gravity.sh @@ -338,11 +338,11 @@ gravity_ParseFileIntoDomains() { # Awk -F splits on given IFS, we grab the right hand side (chops trailing #coments and /'s to grab the domain only. # Last awk command takes non-commented lines and if they have 2 fields, take the right field (the domain) and leave # the left (IP address), otherwise grab the single field. - - < ${source} awk -F '#' '{print $1}' | \ - awk -F '/' '{print $1}' | \ - awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' | \ - sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${destination} + < ${source} tr -d '\r' | \ + tr '[:upper:]' '[:lower:]' | \ + sed -r '/(\/|#).*$/d' | \ + sed -r 's/^.*\s+//g' | \ + sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > ${destination} return 0 fi From e02da90faae124a9df3847b255f2116c126d26c7 Mon Sep 17 00:00:00 2001 From: xch12i5 <40517505+xCh12i5@users.noreply.github.com> Date: Sat, 19 Jan 2019 17:48:29 +0100 Subject: [PATCH 002/366] Replaces double quotes by single quotes and updates documentation. Signed-off-by: xch12i5 <40517505+xCh12i5@users.noreply.github.com> --- gravity.sh | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/gravity.sh b/gravity.sh index 3247a8e2..7b9116a4 100755 --- a/gravity.sh +++ b/gravity.sh @@ -335,14 +335,16 @@ gravity_ParseFileIntoDomains() { # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious # This helps with that and makes it easier to read # It also helps with debugging so each stage of the script can be researched more in depth - # Awk -F splits on given IFS, we grab the right hand side (chops trailing #coments and /'s to grab the domain only. - # Last awk command takes non-commented lines and if they have 2 fields, take the right field (the domain) and leave - # the left (IP address), otherwise grab the single field. - < ${source} tr -d '\r' | \ - tr '[:upper:]' '[:lower:]' | \ - sed -r '/(\/|#).*$/d' | \ - sed -r 's/^.*\s+//g' | \ - sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > ${destination} + # 1) Remove carriage returns + # 2) Convert all characters to lowercase + # 3) Remove lines containing "#" or "/" + # 4) Remove leading tabs, spaces, etc. + # 5) Delete lines not matching domain names + < ${source} tr -d "\r" | \ + tr "[:upper:]" "[:lower:]" | \ + sed -r "/(\/|#).*$/d" | \ + sed -r "s/^.*\s+//g" | \ + sed -r "/([^\.]+\.)+[^\.]{2,}/!d" > ${destination} return 0 fi From cd8a423b32cd3900861085f9f64affc14cb544f2 Mon Sep 17 00:00:00 2001 From: xch12i5 <40517505+xCh12i5@users.noreply.github.com> Date: Sun, 20 Jan 2019 16:04:07 +0100 Subject: [PATCH 003/366] Solves RegEx issue and updates documentation. Signed-off-by: xch12i5 <40517505+xCh12i5@users.noreply.github.com> --- gravity.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 7b9116a4..6fe9be06 100755 --- a/gravity.sh +++ b/gravity.sh @@ -340,11 +340,11 @@ gravity_ParseFileIntoDomains() { # 3) Remove lines containing "#" or "/" # 4) Remove leading tabs, spaces, etc. # 5) Delete lines not matching domain names - < ${source} tr -d "\r" | \ - tr "[:upper:]" "[:lower:]" | \ - sed -r "/(\/|#).*$/d" | \ - sed -r "s/^.*\s+//g" | \ - sed -r "/([^\.]+\.)+[^\.]{2,}/!d" > ${destination} + < ${source} tr -d '\r' | \ + tr '[:upper:]' '[:lower:]' | \ + sed -r '/(\/|#).*$/d' | \ + sed -r 's/^.*\s+//g' | \ + sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > ${destination} return 0 fi From 09c4c88a6d1794a3ddb3ee8acb96e2cfb03ad7c2 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 13:04:31 +0100 Subject: [PATCH 004/366] Create and fill gravity.db during pihole -g Signed-off-by: DL6ER --- advanced/Templates/gravity.db.schema | 13 +++++++ gravity.sh | 56 ++++++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 4 deletions(-) create mode 100644 advanced/Templates/gravity.db.schema diff --git a/advanced/Templates/gravity.db.schema b/advanced/Templates/gravity.db.schema new file mode 100644 index 00000000..1eee8491 --- /dev/null +++ b/advanced/Templates/gravity.db.schema @@ -0,0 +1,13 @@ +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, comment TEXT, DateAdded DATETIME); + +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, comment TEXT, DateAdded DATETIME); + +CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); + +CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain +FROM gravity a +WHERE a.domain NOT IN (SELECT domain from whitelist); + +CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain +FROM blacklist a +WHERE a.domain NOT IN (SELECT domain from whitelist); diff --git a/gravity.sh b/gravity.sh index 75a51608..ac045a93 100755 --- a/gravity.sh +++ b/gravity.sh @@ -35,6 +35,10 @@ blackList="${piholeDir}/black.list" localList="${piholeDir}/local.list" VPNList="/etc/openvpn/ipp.txt" +piholeGitDir="/etc/.pihole" +gravityDBfile="${piholeDir}/gravity.db" +gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.schema" + domainsExtension="domains" matterAndLight="${basename}.0.matterandlight.txt" parsedMatter="${basename}.1.parsedmatter.txt" @@ -83,6 +87,11 @@ if [[ -r "${piholeDir}/pihole.conf" ]]; then echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}" fi +# Generate new sqlite3 file from schema template +generate_gravity_database() { + sqlite3 "${gravityDBfile}" < "${gravityDBschema}" +} + # Determine if Pi-hole blocking is disabled # If this is the case, we want to update # gravity.list.bck and black.list.bck instead of @@ -582,12 +591,38 @@ gravity_ParseBlacklistDomains() { cp "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}" fi - # Move the file over as /etc/pihole/gravity.list so dnsmasq can use it - output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 ) + # Create database file if not present + if [ ! -e "${gravityDBfile}" ]; then + generate_gravity_database + fi + + # Backup gravity database + cp "${gravityDBfile}" "${gravityDBfile}.bck" + + # Empty domains + output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM gravity;"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to truncate gravity database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + fi + + # Store domains in gravity database + output=$( { sqlite3 "${gravityDBfile}" <<< ".import ${piholeDir}/${accretionDisc} gravity"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}" + echo -e "\\n ${CROSS} Unable to create gravity database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + fi + + # Empty $adList if it already exists, otherwise, create it + output=$( { : > "${adList}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create empty ${adList}\\n ${output}" gravity_Cleanup "error" fi } @@ -633,6 +668,19 @@ gravity_Cleanup() { echo -e "${OVER} ${TICK} ${str}" + str="Optimizing domains database" + echo -ne " ${INFO} ${str}..." + # Store + output=$( { sqlite3 "${gravityDBfile}" <<< "VACUUM;"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + else + echo -e "${OVER} ${TICK} ${str}" + fi + # Only restart DNS service if offline if ! pidof ${resolver} &> /dev/null; then "${PIHOLE_COMMAND}" restartdns @@ -707,7 +755,7 @@ gravity_ShowBlockCount # Perform when downloading blocklists, or modifying the white/blacklist (not wildcards) if [[ "${skipDownload}" == false ]] || [[ "${listType}" == *"list" ]]; then - str="Parsing domains into hosts format" + str="Parsing domains" echo -ne " ${INFO} ${str}..." gravity_ParseUserDomains From 8a91fe699276cdb1b1671d61f719653c197794b9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 13:21:26 +0100 Subject: [PATCH 005/366] Store domains in gravit tables: gravity, whitelist, blacklist Signed-off-by: DL6ER --- gravity.sh | 104 ++++++++++++++++++++++++++--------------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/gravity.sh b/gravity.sh index ac045a93..a8c92e8a 100755 --- a/gravity.sh +++ b/gravity.sh @@ -33,6 +33,7 @@ regexFile="${piholeDir}/regex.list" adList="${piholeDir}/gravity.list" blackList="${piholeDir}/black.list" localList="${piholeDir}/local.list" +whiteList="${piholeDir}/white.list" VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" @@ -42,8 +43,6 @@ gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.schema" domainsExtension="domains" matterAndLight="${basename}.0.matterandlight.txt" parsedMatter="${basename}.1.parsedmatter.txt" -whitelistMatter="${basename}.2.whitelistmatter.txt" -accretionDisc="${basename}.3.accretionDisc.txt" preEventHorizon="list.preEventHorizon" skipDownload="false" @@ -92,6 +91,49 @@ generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" } +# Import domains from file and store them in the specified database table +gravity_store_in_database() { + # Define locals + local table="${1}" + local source="${2}" + local template="${3}" + + # Create database file if not present + if [ ! -e "${gravityDBfile}" ]; then + generate_gravity_database + fi + + # Backup gravity database + cp "${gravityDBfile}" "${gravityDBfile}.bck" + + # Empty domains + output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + fi + + # Store domains in gravity database + output=$( { sqlite3 "${gravityDBfile}" <<< ".import \"${source}\" ${table}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create ${table} database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + fi + + # Empty $adList if it already exists, otherwise, create it + output=$( { : > "${template}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create empty ${template}\\n ${output}" + gravity_Cleanup "error" + fi +} + # Determine if Pi-hole blocking is disabled # If this is the case, we want to update # gravity.list.bck and black.list.bck instead of @@ -476,7 +518,7 @@ gravity_SortAndFilterConsolidatedList() { echo -ne " ${INFO} ${str}..." fi - # Parse into hosts file + # Parse into file gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}" # Format $parsedMatter line total as currency @@ -514,8 +556,8 @@ gravity_Whitelist() { str="Number of whitelisted domains: ${num}" echo -ne " ${INFO} ${str}..." - # Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile - comm -23 "${piholeDir}/${preEventHorizon}" <(sort "${whitelistFile}") > "${piholeDir}/${whitelistMatter}" + # Store whitelisted files in gravity database + gravity_store_in_database "whitelist" "${whitelistFile}" "${whiteList}" echo -e "${OVER} ${INFO} ${str}" } @@ -581,50 +623,8 @@ gravity_ParseLocalDomains() { gravity_ParseBlacklistDomains() { local output status - # Empty $accretionDisc if it already exists, otherwise, create it - : > "${piholeDir}/${accretionDisc}" - - if [[ -f "${piholeDir}/${whitelistMatter}" ]]; then - mv "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}" - else - # There was no whitelist file, so use preEventHorizon instead of whitelistMatter. - cp "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}" - fi - - # Create database file if not present - if [ ! -e "${gravityDBfile}" ]; then - generate_gravity_database - fi - - # Backup gravity database - cp "${gravityDBfile}" "${gravityDBfile}.bck" - - # Empty domains - output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM gravity;"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to truncate gravity database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" - fi - - # Store domains in gravity database - output=$( { sqlite3 "${gravityDBfile}" <<< ".import ${piholeDir}/${accretionDisc} gravity"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create gravity database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" - fi - - # Empty $adList if it already exists, otherwise, create it - output=$( { : > "${adList}"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create empty ${adList}\\n ${output}" - gravity_Cleanup "error" - fi + # Store gravity domains in gravity database + gravity_store_in_database "gravity" "${piholeDir}/${preEventHorizon}" "${adList}" } # Create user-added blacklist entries @@ -632,9 +632,9 @@ gravity_ParseUserDomains() { if [[ ! -f "${blacklistFile}" ]]; then return 0 fi - # Copy the file over as /etc/pihole/black.list so dnsmasq can use it - cp "${blacklistFile}" "${blackList}" 2> /dev/null || \ - echo -e "\\n ${CROSS} Unable to move ${blacklistFile##*/} to ${piholeDir}" + + # Fill database table + gravity_store_in_database "blacklist" "${blacklistFile}" "${blackList}" } # Trap Ctrl-C From 710036adae672486abf35be122a5d571d681d428 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 13:34:08 +0100 Subject: [PATCH 006/366] Add DISABLED field to white- and blacklist defaulting to false. The two views respect the DISABLED fields for both lists. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.schema | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.schema b/advanced/Templates/gravity.db.schema index 1eee8491..2fd9a9d1 100644 --- a/advanced/Templates/gravity.db.schema +++ b/advanced/Templates/gravity.db.schema @@ -1,13 +1,14 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, comment TEXT, DateAdded DATETIME); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, comment TEXT, disabled BOOLEAN DEFAULT 0, DateAdded DATETIME); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, comment TEXT, DateAdded DATETIME); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, comment TEXT, disabled BOOLEAN DEFAULT 0, DateAdded DATETIME); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain FROM gravity a -WHERE a.domain NOT IN (SELECT domain from whitelist); +WHERE a.domain NOT IN (SELECT domain from whitelist WHERE disabled != 1); CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain FROM blacklist a -WHERE a.domain NOT IN (SELECT domain from whitelist); +WHERE a.disabled != 1 AND + a.domain NOT IN (SELECT domain from whitelist WHERE disabled != 1); From dcf0a605cf1b6e101b34206a3d5bd359f3c331a5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 14:14:14 +0100 Subject: [PATCH 007/366] Use views for all tables and set disabled column to false for those tables that support it Signed-off-by: DL6ER --- advanced/Templates/gravity.db.schema | 10 ++++++---- gravity.sh | 17 ++++++++++++++--- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/advanced/Templates/gravity.db.schema b/advanced/Templates/gravity.db.schema index 2fd9a9d1..83141f94 100644 --- a/advanced/Templates/gravity.db.schema +++ b/advanced/Templates/gravity.db.schema @@ -1,7 +1,5 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, comment TEXT, disabled BOOLEAN DEFAULT 0, DateAdded DATETIME); - -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, comment TEXT, disabled BOOLEAN DEFAULT 0, DateAdded DATETIME); - +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain @@ -12,3 +10,7 @@ CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain FROM blacklist a WHERE a.disabled != 1 AND a.domain NOT IN (SELECT domain from whitelist WHERE disabled != 1); + +CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain +FROM whitelist a +WHERE a.disabled != 1; diff --git a/gravity.sh b/gravity.sh index a8c92e8a..21338119 100755 --- a/gravity.sh +++ b/gravity.sh @@ -115,16 +115,27 @@ gravity_store_in_database() { gravity_Cleanup "error" fi - # Store domains in gravity database + # Store domains in gravity database table ${table} output=$( { sqlite3 "${gravityDBfile}" <<< ".import \"${source}\" ${table}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create ${table} database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to create ${table} in database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" fi - # Empty $adList if it already exists, otherwise, create it + if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ]; then + # Set disabled to false + output=$( { sqlite3 "${gravityDBfile}" <<< "UPDATE ${table} SET disabled = 0 WHERE disabled IS NULL;"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to set disabled states (${table}) in database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + fi + fi + + # Empty $template if it already exists, otherwise, create it output=$( { : > "${template}"; } 2>&1 ) status="$?" From 93f1859babd7a28de799edb84fdeadf39300daba Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 15:13:18 +0100 Subject: [PATCH 008/366] Store regex in gravity.db as well Signed-off-by: DL6ER --- advanced/Templates/gravity.db.schema | 5 +++++ gravity.sh | 20 +++++++++++++------- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/advanced/Templates/gravity.db.schema b/advanced/Templates/gravity.db.schema index 83141f94..b7b3808a 100644 --- a/advanced/Templates/gravity.db.schema +++ b/advanced/Templates/gravity.db.schema @@ -1,5 +1,6 @@ CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain @@ -14,3 +15,7 @@ WHERE a.disabled != 1 AND CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain FROM whitelist a WHERE a.disabled != 1; + +CREATE VIEW vw_regex AS SELECT DISTINCT a.domain +FROM regex a +WHERE a.disabled != 1; diff --git a/gravity.sh b/gravity.sh index 21338119..e3e78528 100755 --- a/gravity.sh +++ b/gravity.sh @@ -124,7 +124,7 @@ gravity_store_in_database() { gravity_Cleanup "error" fi - if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ]; then + if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then # Set disabled to false output=$( { sqlite3 "${gravityDBfile}" <<< "UPDATE ${table} SET disabled = 0 WHERE disabled IS NULL;"; } 2>&1 ) status="$?" @@ -135,13 +135,16 @@ gravity_store_in_database() { fi fi - # Empty $template if it already exists, otherwise, create it - output=$( { : > "${template}"; } 2>&1 ) - status="$?" + # Only create template file if asked to + if [ "${template}" != "-" ]; then + # Empty $template if it already exists, otherwise, create it + output=$( { : > "${template}"; } 2>&1 ) + status="$?" - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create empty ${template}\\n ${output}" - gravity_Cleanup "error" + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create empty ${template}\\n ${output}" + gravity_Cleanup "error" + fi fi } @@ -586,6 +589,9 @@ gravity_ShowBlockCount() { num=$(grep -cv "^#" "${regexFile}") echo -e " ${INFO} Number of regex filters: ${num}" fi + + # Store regex files in gravity database + gravity_store_in_database "regex" "${regexFile}" "-" } # Parse list of domains into hosts format From fc62cf7e2f043cc28d163386aaf7cd0134dce306 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 16:44:05 +0100 Subject: [PATCH 009/366] Only run VACUUM on "full" gravity runs Signed-off-by: DL6ER --- gravity.sh | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/gravity.sh b/gravity.sh index e3e78528..0a1d4e8d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -39,6 +39,7 @@ VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" gravityDBfile="${piholeDir}/gravity.db" gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.schema" +optimize_database=true domainsExtension="domains" matterAndLight="${basename}.0.matterandlight.txt" @@ -685,17 +686,19 @@ gravity_Cleanup() { echo -e "${OVER} ${TICK} ${str}" - str="Optimizing domains database" - echo -ne " ${INFO} ${str}..." - # Store - output=$( { sqlite3 "${gravityDBfile}" <<< "VACUUM;"; } 2>&1 ) - status="$?" + if ${optimize_database} ; then + str="Optimizing domains database" + echo -ne " ${INFO} ${str}..." + # Store + output=$( { sqlite3 "${gravityDBfile}" <<< "VACUUM;"; } 2>&1 ) + status="$?" - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" - else - echo -e "${OVER} ${TICK} ${str}" + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}" + gravity_Cleanup "error" + else + echo -e "${OVER} ${TICK} ${str}" + fi fi # Only restart DNS service if offline @@ -726,9 +729,9 @@ for var in "$@"; do "-f" | "--force" ) forceDelete=true;; "-h" | "--help" ) helpFunc;; "-sd" | "--skip-download" ) skipDownload=true;; - "-b" | "--blacklist-only" ) listType="blacklist";; - "-w" | "--whitelist-only" ) listType="whitelist";; - "-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";; + "-b" | "--blacklist-only" ) listType="blacklist"; optimize_database=false;; + "-w" | "--whitelist-only" ) listType="whitelist"; optimize_database=false;; + "-wild" | "--wildcard-only" ) listType="wildcard"; optimize_database=false; dnsRestartType="restart";; esac done From d5feffa117c2093e8dbd17f814bf95a7167ab2ba Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 16:45:19 +0100 Subject: [PATCH 010/366] Do not backup the database Signed-off-by: DL6ER --- gravity.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index 0a1d4e8d..0d312b3f 100755 --- a/gravity.sh +++ b/gravity.sh @@ -104,9 +104,6 @@ gravity_store_in_database() { generate_gravity_database fi - # Backup gravity database - cp "${gravityDBfile}" "${gravityDBfile}.bck" - # Empty domains output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) status="$?" From 1f9a7d0b0cdba219cc2e3ad7b5eb91e744ab915b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 17:01:38 +0100 Subject: [PATCH 011/366] Only run VACUUM on "pihole -g -o" Signed-off-by: DL6ER --- gravity.sh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/gravity.sh b/gravity.sh index 0d312b3f..d0b44c24 100755 --- a/gravity.sh +++ b/gravity.sh @@ -39,7 +39,7 @@ VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" gravityDBfile="${piholeDir}/gravity.db" gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.schema" -optimize_database=true +optimize_database=false domainsExtension="domains" matterAndLight="${basename}.0.matterandlight.txt" @@ -724,11 +724,12 @@ Options: for var in "$@"; do case "${var}" in "-f" | "--force" ) forceDelete=true;; + "-o" | "--optimize" ) optimize_database=true;; "-h" | "--help" ) helpFunc;; "-sd" | "--skip-download" ) skipDownload=true;; - "-b" | "--blacklist-only" ) listType="blacklist"; optimize_database=false;; - "-w" | "--whitelist-only" ) listType="whitelist"; optimize_database=false;; - "-wild" | "--wildcard-only" ) listType="wildcard"; optimize_database=false; dnsRestartType="restart";; + "-b" | "--blacklist-only" ) listType="blacklist";; + "-w" | "--whitelist-only" ) listType="whitelist";; + "-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";; esac done From b2f4385232f7293d16e2a24eee6b60e3e7011308 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 3 Feb 2019 17:05:00 +0100 Subject: [PATCH 012/366] Avoid infinity loop if gravity optimization fails Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index d0b44c24..06e75a90 100755 --- a/gravity.sh +++ b/gravity.sh @@ -692,7 +692,7 @@ gravity_Cleanup() { if [[ "${status}" -ne 0 ]]; then echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" + error="error" else echo -e "${OVER} ${TICK} ${str}" fi From dbbf21071b4d3a13522d63bdb873ead883f2e13f Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 5 Feb 2019 19:05:11 +0100 Subject: [PATCH 013/366] Invert logic to use ENABLED instead of DISABLED. We make sure to set "enabled = 1 WHERE enabled IS NULL" in gravity_store_in_database() Signed-off-by: DL6ER --- advanced/Templates/gravity.db.schema | 21 --------------------- advanced/Templates/gravity.db.sql | 21 +++++++++++++++++++++ gravity.sh | 8 ++++---- 3 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 advanced/Templates/gravity.db.schema create mode 100644 advanced/Templates/gravity.db.sql diff --git a/advanced/Templates/gravity.db.schema b/advanced/Templates/gravity.db.schema deleted file mode 100644 index b7b3808a..00000000 --- a/advanced/Templates/gravity.db.schema +++ /dev/null @@ -1,21 +0,0 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, disabled BOOLEAN DEFAULT 0, comment TEXT, DateAdded DATETIME); -CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); - -CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain -FROM gravity a -WHERE a.domain NOT IN (SELECT domain from whitelist WHERE disabled != 1); - -CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain -FROM blacklist a -WHERE a.disabled != 1 AND - a.domain NOT IN (SELECT domain from whitelist WHERE disabled != 1); - -CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain -FROM whitelist a -WHERE a.disabled != 1; - -CREATE VIEW vw_regex AS SELECT DISTINCT a.domain -FROM regex a -WHERE a.disabled != 1; diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql new file mode 100644 index 00000000..72d64bc1 --- /dev/null +++ b/advanced/Templates/gravity.db.sql @@ -0,0 +1,21 @@ +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); +CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); + +CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain +FROM gravity a +WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); + +CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain +FROM blacklist a +WHERE a.enabled == 1 AND + a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); + +CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain +FROM whitelist a +WHERE a.enabled == 1; + +CREATE VIEW vw_regex AS SELECT DISTINCT a.domain +FROM regex a +WHERE a.enabled == 1; diff --git a/gravity.sh b/gravity.sh index 06e75a90..c47f7487 100755 --- a/gravity.sh +++ b/gravity.sh @@ -38,7 +38,7 @@ VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" gravityDBfile="${piholeDir}/gravity.db" -gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.schema" +gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql" optimize_database=false domainsExtension="domains" @@ -123,12 +123,12 @@ gravity_store_in_database() { fi if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then - # Set disabled to false - output=$( { sqlite3 "${gravityDBfile}" <<< "UPDATE ${table} SET disabled = 0 WHERE disabled IS NULL;"; } 2>&1 ) + # Set enabled to true where it is unspecified + output=$( { sqlite3 "${gravityDBfile}" <<< "UPDATE ${table} SET enabled = 1 WHERE enabled IS NULL;"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to set disabled states (${table}) in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to set enabled states (${table}) in database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" fi fi From 8a2363621d084bff58ab160cc01633ae32aa4f19 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 5 Feb 2019 19:06:46 +0100 Subject: [PATCH 014/366] Rename fields from DateAdded to dateadded Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 72d64bc1..4470c21c 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,6 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, DateAdded DATETIME); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain From c3c60e10f18db58e6a3cb09beb573a4eef26f8c8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 5 Feb 2019 19:07:12 +0100 Subject: [PATCH 015/366] Rename regex field from domain to filter because this described better what it is Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 4470c21c..5290e83b 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,6 @@ CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); +CREATE TABLE regex (filter TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain From ec5b16ef851e25167997fe02fd23944be65c7b4d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 5 Feb 2019 19:08:08 +0100 Subject: [PATCH 016/366] Fix incomplete comment Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index c47f7487..c45e124e 100755 --- a/gravity.sh +++ b/gravity.sh @@ -686,7 +686,7 @@ gravity_Cleanup() { if ${optimize_database} ; then str="Optimizing domains database" echo -ne " ${INFO} ${str}..." - # Store + # Run VACUUM command on database to optimize it output=$( { sqlite3 "${gravityDBfile}" <<< "VACUUM;"; } 2>&1 ) status="$?" From 9984647ebbc053f21881c8e4473a4ac5220050cf Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 5 Feb 2019 19:41:11 +0100 Subject: [PATCH 017/366] After renaming column in regex, it also needs to be renamed in vw_regex Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 5290e83b..23efa64a 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -16,6 +16,6 @@ CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain FROM whitelist a WHERE a.enabled == 1; -CREATE VIEW vw_regex AS SELECT DISTINCT a.domain +CREATE VIEW vw_regex AS SELECT DISTINCT a.filter FROM regex a WHERE a.enabled == 1; From 644ec36e6462a9b46f2e63fc5185040e3a803ebd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 6 Feb 2019 18:57:48 +0100 Subject: [PATCH 018/366] Review comments Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 9 +++++--- gravity.sh | 34 ++++++++++++++++++++----------- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 23efa64a..3a2230c8 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,7 +1,10 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); -CREATE TABLE regex (filter TEXT UNIQUE NOT NULL, enabled BOOLEAN DEFAULT 1, comment TEXT, dateadded DATETIME); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); +CREATE TABLE regex (filter TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); +CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); + +INSERT INTO info VALUES("version","1"); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain FROM gravity a diff --git a/gravity.sh b/gravity.sh index c45e124e..0c3c8dee 100755 --- a/gravity.sh +++ b/gravity.sh @@ -20,6 +20,8 @@ source "${regexconverter}" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" +PIHOLE_USER="pihole" +PIHOLE_GROUP="pihole" piholeDir="/etc/${basename}" @@ -90,6 +92,7 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" + chown $PIHOLE_USER:$PIHOLE_GROUP "${gravityDBfile}" } # Import domains from file and store them in the specified database table @@ -113,25 +116,32 @@ gravity_store_in_database() { gravity_Cleanup "error" fi + local tmpFile=$(mktemp -p "/tmp" --suffix=".gravity") + if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then + # Apply format for white-, blacklist, and regex tables, prevent globbing + set -f + for domain in $(cat < "${source}"); do + echo "\"${domain}\",1,$(date --utc +'%s')," >> "${tmpFile}" + done + set +f + inputfile="${tmpFile}" + else + # No need to modify the input data for the gravity table + inputfile="${source}" + fi # Store domains in gravity database table ${table} - output=$( { sqlite3 "${gravityDBfile}" <<< ".import \"${source}\" ${table}"; } 2>&1 ) + # Use printf as .mode and .import need to be on separate lines + # see https://unix.stackexchange.com/a/445615/83260 + output=$( { printf ".mode csv\n.import \"${inputfile}\" ${table}\n" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create ${table} in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to fill table ${table} in database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" fi - if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then - # Set enabled to true where it is unspecified - output=$( { sqlite3 "${gravityDBfile}" <<< "UPDATE ${table} SET enabled = 1 WHERE enabled IS NULL;"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to set enabled states (${table}) in database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" - fi - fi + # Delete tmpfile + #rm "$tmpFile" > /dev/null 2>&1 || true # Only create template file if asked to if [ "${template}" != "-" ]; then From 491c828661ba203e679acc74df48a22970f2aee5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 6 Feb 2019 19:09:09 +0100 Subject: [PATCH 019/366] Improved code, ensure that gravity.sh passes shellcheck as a whole Signed-off-by: DL6ER --- gravity.sh | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/gravity.sh b/gravity.sh index 0c3c8dee..129d0ed4 100755 --- a/gravity.sh +++ b/gravity.sh @@ -116,14 +116,15 @@ gravity_store_in_database() { gravity_Cleanup "error" fi - local tmpFile=$(mktemp -p "/tmp" --suffix=".gravity") + local tmpFile + tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")" if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then - # Apply format for white-, blacklist, and regex tables, prevent globbing - set -f - for domain in $(cat < "${source}"); do + # Apply format for white-, blacklist, and regex tables + # Read file line by line + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do echo "\"${domain}\",1,$(date --utc +'%s')," >> "${tmpFile}" done - set +f inputfile="${tmpFile}" else # No need to modify the input data for the gravity table @@ -132,7 +133,7 @@ gravity_store_in_database() { # Store domains in gravity database table ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".mode csv\n.import \"${inputfile}\" ${table}\n" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".mode csv\n.import \"%s\" ${table}\n" "${inputfile}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -412,10 +413,10 @@ gravity_ParseFileIntoDomains() { # Last awk command takes non-commented lines and if they have 2 fields, take the right field (the domain) and leave # the left (IP address), otherwise grab the single field. - < ${source} awk -F '#' '{print $1}' | \ + < "${source}" awk -F '#' '{print $1}' | \ awk -F '/' '{print $1}' | \ awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' | \ - sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${destination} + sed -nr -e 's/\.{2,}/./g' -e '/\./p' > "${destination}" return 0 fi From f9250d91a52a0bf2c62223ed51b96acf4a80b692 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 6 Feb 2019 19:13:31 +0100 Subject: [PATCH 020/366] Explictly escape the escape. Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 129d0ed4..c742a699 100755 --- a/gravity.sh +++ b/gravity.sh @@ -133,7 +133,7 @@ gravity_store_in_database() { # Store domains in gravity database table ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".mode csv\n.import \"%s\" ${table}\n" "${inputfile}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".mode csv\\n.import \"%s\" ${table}\\n" "${inputfile}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then From 61840edb301005178101ed821da01b05a22e1c0d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 22 Feb 2019 22:46:19 +0100 Subject: [PATCH 021/366] Fill comment field with NULL (default) instead of empty string and use a variable for the timestamp instead of periodically calling date Signed-off-by: DL6ER --- gravity.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index c742a699..6ace8108 100755 --- a/gravity.sh +++ b/gravity.sh @@ -118,12 +118,14 @@ gravity_store_in_database() { local tmpFile tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")" + local timestamp + timestamp="$(date --utc +'%s')" if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then # Apply format for white-, blacklist, and regex tables # Read file line by line grep -v '^ *#' < "${source}" | while IFS= read -r domain do - echo "\"${domain}\",1,$(date --utc +'%s')," >> "${tmpFile}" + echo "\"${domain}\",1,${timestamp}" >> "${tmpFile}" done inputfile="${tmpFile}" else From bbc392caeb631e46f840d1e87d6bebe07442545a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 22 Feb 2019 22:49:02 +0100 Subject: [PATCH 022/366] More review comments Signed-off-by: DL6ER --- gravity.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index 6ace8108..b09ddc0c 100755 --- a/gravity.sh +++ b/gravity.sh @@ -120,6 +120,7 @@ gravity_store_in_database() { tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")" local timestamp timestamp="$(date --utc +'%s')" + local inputfile if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then # Apply format for white-, blacklist, and regex tables # Read file line by line @@ -135,7 +136,7 @@ gravity_store_in_database() { # Store domains in gravity database table ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".mode csv\\n.import \"%s\" ${table}\\n" "${inputfile}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -144,7 +145,7 @@ gravity_store_in_database() { fi # Delete tmpfile - #rm "$tmpFile" > /dev/null 2>&1 || true + rm "$tmpFile" > /dev/null 2>&1 || true # Only create template file if asked to if [ "${template}" != "-" ]; then From 54ae9d83b0a4ff09c2a40db079df9952cfd122af Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Sat, 23 Feb 2019 13:42:15 -0800 Subject: [PATCH 023/366] Stickler-lints Signed-off-by: Dan Schaper --- gravity.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index 6fe9be06..015764ac 100755 --- a/gravity.sh +++ b/gravity.sh @@ -340,11 +340,11 @@ gravity_ParseFileIntoDomains() { # 3) Remove lines containing "#" or "/" # 4) Remove leading tabs, spaces, etc. # 5) Delete lines not matching domain names - < ${source} tr -d '\r' | \ + < "${source}" tr -d '\r' | \ tr '[:upper:]' '[:lower:]' | \ sed -r '/(\/|#).*$/d' | \ sed -r 's/^.*\s+//g' | \ - sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > ${destination} + sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}" return 0 fi From de4456330169a3328c6e22ab4860a4f42a916c22 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 18:51:38 +0200 Subject: [PATCH 024/366] Remove addn-hosts from 01-pihole.conf Signed-off-by: DL6ER --- advanced/01-pihole.conf | 2 -- 1 file changed, 2 deletions(-) diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index 502293bf..40a117fe 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -18,8 +18,6 @@ # WITHIN /etc/dnsmasq.d/yourname.conf # ############################################################################### -addn-hosts=/etc/pihole/gravity.list -addn-hosts=/etc/pihole/black.list addn-hosts=/etc/pihole/local.list domain-needed From 6594a0a6e75ed60c5b15f422a9b319f14e312330 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 18:53:20 +0200 Subject: [PATCH 025/366] Fix detection of blocked query in pihole -t Signed-off-by: DL6ER --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index 84a5623f..6b572fa9 100755 --- a/pihole +++ b/pihole @@ -313,7 +313,7 @@ tailFunc() { # Colour everything else as gray tail -f /var/log/pihole.log | sed -E \ -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ - -e "s,(.*(gravity.list|black.list|regex.list| config ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ + -e "s,(.*(gravity |black |regex | config ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ -e "s,.*,${COL_GRAY}&${COL_NC}," exit 0 From c13ebd3a9d9cab33e0d6e8d389b5e69e8115d14d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 18:54:55 +0200 Subject: [PATCH 026/366] Replace Pi-hole ID string with another line we intend to keep Signed-off-by: DL6ER --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 110399f8..ffc1bdf6 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1224,7 +1224,7 @@ version_check_dnsmasq() { # Local, named variables local dnsmasq_conf="/etc/dnsmasq.conf" local dnsmasq_conf_orig="/etc/dnsmasq.conf.orig" - local dnsmasq_pihole_id_string="addn-hosts=/etc/pihole/gravity.list" + local dnsmasq_pihole_id_string="# Dnsmasq config for Pi-hole's FTLDNS" local dnsmasq_original_config="${PI_HOLE_LOCAL_REPO}/advanced/dnsmasq.conf.original" local dnsmasq_pihole_01_snippet="${PI_HOLE_LOCAL_REPO}/advanced/01-pihole.conf" local dnsmasq_pihole_01_location="/etc/dnsmasq.d/01-pihole.conf" @@ -1232,7 +1232,7 @@ version_check_dnsmasq() { # If the dnsmasq config file exists if [[ -f "${dnsmasq_conf}" ]]; then printf " %b Existing dnsmasq.conf found..." "${INFO}" - # If gravity.list is found within this file, we presume it's from older versions on Pi-hole, + # If a specific string is found within this file, we presume it's from older versions on Pi-hole, if grep -q ${dnsmasq_pihole_id_string} ${dnsmasq_conf}; then printf " it is from a previous Pi-hole install.\\n" printf " %b Backing up dnsmasq.conf to dnsmasq.conf.orig..." "${INFO}" From d7ee44960e25b3c3bb89c2e81578c1a8cff14087 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 18:56:44 +0200 Subject: [PATCH 027/366] Remove moving around of the gravity and blacklist files as we do not use them any longer Signed-off-by: DL6ER --- pihole | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/pihole b/pihole index 6b572fa9..71c286c6 100755 --- a/pihole +++ b/pihole @@ -10,10 +10,8 @@ # Please see LICENSE file for your rights under this license. readonly PI_HOLE_SCRIPT_DIR="/opt/pihole" -readonly gravitylist="/etc/pihole/gravity.list" -readonly blacklist="/etc/pihole/black.list" -# setupVars is not readonly here because in some funcitons (checkout), +# setupVars is not readonly here because in some functions (checkout), # it might get set again when the installer is sourced. This causes an # error due to modifying a readonly variable. setupVars="/etc/pihole/setupVars.conf" @@ -148,14 +146,6 @@ Time: echo -e " ${INFO} Blocking already disabled, nothing to do" exit 0 fi - if [[ -e "${gravitylist}" ]]; then - mv "${gravitylist}" "${gravitylist}.bck" - echo "" > "${gravitylist}" - fi - if [[ -e "${blacklist}" ]]; then - mv "${blacklist}" "${blacklist}.bck" - echo "" > "${blacklist}" - fi if [[ $# > 1 ]]; then local error=false if [[ "${2}" == *"s" ]]; then @@ -204,12 +194,6 @@ Time: echo -e " ${INFO} Enabling blocking" local str="Pi-hole Enabled" - if [[ -e "${gravitylist}.bck" ]]; then - mv "${gravitylist}.bck" "${gravitylist}" - fi - if [[ -e "${blacklist}.bck" ]]; then - mv "${blacklist}.bck" "${blacklist}" - fi sed -i "/BLOCKING_ENABLED=/d" "${setupVars}" echo "BLOCKING_ENABLED=true" >> "${setupVars}" fi From ee20164207508ca1e2dbb08e05b0858e074dd370 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 19:01:31 +0200 Subject: [PATCH 028/366] Remove detect_pihole_blocking_status() as we can modify the database at any time Signed-off-by: DL6ER --- gravity.sh | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/gravity.sh b/gravity.sh index b09ddc0c..b287653d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -160,20 +160,6 @@ gravity_store_in_database() { fi } -# Determine if Pi-hole blocking is disabled -# If this is the case, we want to update -# gravity.list.bck and black.list.bck instead of -# gravity.list and black.list -detect_pihole_blocking_status() { - if [[ "${BLOCKING_ENABLED}" == false ]]; then - echo -e " ${INFO} Pi-hole blocking is disabled" - adList="${adList}.bck" - blackList="${blackList}.bck" - else - echo -e " ${INFO} Pi-hole blocking is enabled" - fi -} - # Determine if DNS resolution is available before proceeding gravity_CheckDNSResolutionAvailable() { local lookupDomain="pi.hole" @@ -235,7 +221,7 @@ gravity_GetBlocklistUrls() { echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..." if [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then - # Remove superceded $adListDefault file + # Remove superseded $adListDefault file rm "${adListDefault}" 2> /dev/null || \ echo -e " ${CROSS} Unable to remove ${adListDefault}" fi @@ -758,8 +744,6 @@ if [[ "${forceDelete:-}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" fi -detect_pihole_blocking_status - # Determine which functions to run if [[ "${skipDownload}" == false ]]; then # Gravity needs to download blocklists From e57d0fb93c24e6d7f336f449dc9bd1fc7627062d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 19:05:08 +0200 Subject: [PATCH 029/366] Remove the empty templates gravity.list, white.list, black.list. The old source files whitelist.txt, blacklist.txt, and regex.list still exist and need to be removed in a follow-up commit Signed-off-by: DL6ER --- gravity.sh | 23 ++++------------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/gravity.sh b/gravity.sh index b287653d..6313b12c 100755 --- a/gravity.sh +++ b/gravity.sh @@ -32,10 +32,7 @@ whitelistFile="${piholeDir}/whitelist.txt" blacklistFile="${piholeDir}/blacklist.txt" regexFile="${piholeDir}/regex.list" -adList="${piholeDir}/gravity.list" -blackList="${piholeDir}/black.list" localList="${piholeDir}/local.list" -whiteList="${piholeDir}/white.list" VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" @@ -146,18 +143,6 @@ gravity_store_in_database() { # Delete tmpfile rm "$tmpFile" > /dev/null 2>&1 || true - - # Only create template file if asked to - if [ "${template}" != "-" ]; then - # Empty $template if it already exists, otherwise, create it - output=$( { : > "${template}"; } 2>&1 ) - status="$?" - - if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to create empty ${template}\\n ${output}" - gravity_Cleanup "error" - fi - fi } # Determine if DNS resolution is available before proceeding @@ -569,7 +554,7 @@ gravity_Whitelist() { echo -ne " ${INFO} ${str}..." # Store whitelisted files in gravity database - gravity_store_in_database "whitelist" "${whitelistFile}" "${whiteList}" + gravity_store_in_database "whitelist" "${whitelistFile}" echo -e "${OVER} ${INFO} ${str}" } @@ -589,7 +574,7 @@ gravity_ShowBlockCount() { fi # Store regex files in gravity database - gravity_store_in_database "regex" "${regexFile}" "-" + gravity_store_in_database "regex" "${regexFile}" } # Parse list of domains into hosts format @@ -639,7 +624,7 @@ gravity_ParseBlacklistDomains() { local output status # Store gravity domains in gravity database - gravity_store_in_database "gravity" "${piholeDir}/${preEventHorizon}" "${adList}" + gravity_store_in_database "gravity" "${piholeDir}/${preEventHorizon}" } # Create user-added blacklist entries @@ -649,7 +634,7 @@ gravity_ParseUserDomains() { fi # Fill database table - gravity_store_in_database "blacklist" "${blacklistFile}" "${blackList}" + gravity_store_in_database "blacklist" "${blacklistFile}" } # Trap Ctrl-C From 61cfd2f9f9ec4294e041937748f08368b80e88d5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 19:48:06 +0200 Subject: [PATCH 030/366] Add adlists table Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 3a2230c8..5442987f 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,7 @@ CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); CREATE TABLE regex (filter TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); +CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); From c1277705d897ce5ca15aa4d76b1ba253b42f7e6a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 24 Apr 2019 19:55:05 +0200 Subject: [PATCH 031/366] Copy existing whitelist.txt, blacklist.txt, regex.list, and adlists.list to the database. We remove the files afterwards as the content lives in the database now Signed-off-by: DL6ER --- gravity.sh | 62 ++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/gravity.sh b/gravity.sh index 6313b12c..4fb601b4 100755 --- a/gravity.sh +++ b/gravity.sh @@ -25,12 +25,11 @@ PIHOLE_GROUP="pihole" piholeDir="/etc/${basename}" -adListFile="${piholeDir}/adlists.list" -adListDefault="${piholeDir}/adlists.default" - +# Legacy (pre v5.0) list file locations whitelistFile="${piholeDir}/whitelist.txt" blacklistFile="${piholeDir}/blacklist.txt" regexFile="${piholeDir}/regex.list" +adListFile="${piholeDir}/adlists.list" localList="${piholeDir}/local.list" VPNList="/etc/openvpn/ipp.txt" @@ -93,18 +92,20 @@ generate_gravity_database() { } # Import domains from file and store them in the specified database table -gravity_store_in_database() { +database_table_from_file() { # Define locals local table="${1}" local source="${2}" - local template="${3}" # Create database file if not present if [ ! -e "${gravityDBfile}" ]; then + echo -e " ${INFO} Creating new gravity database" generate_gravity_database fi - # Empty domains + echo -e " ${INFO} Pi-hole upgrade: Moving content of ${source} into database" + + # Truncate table output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) status="$?" @@ -118,19 +119,19 @@ gravity_store_in_database() { local timestamp timestamp="$(date --utc +'%s')" local inputfile - if [ "$table" == "whitelist" ] || [ "$table" == "blacklist" ] || [ "$table" == "regex" ]; then - # Apply format for white-, blacklist, and regex tables + if [[ "${table}" == "gravity" ]]; then + # No need to modify the input data for the gravity table + inputfile="${source}" + else + # Apply format for white-, blacklist, regex, and adlists tables # Read file line by line grep -v '^ *#' < "${source}" | while IFS= read -r domain do echo "\"${domain}\",1,${timestamp}" >> "${tmpFile}" done inputfile="${tmpFile}" - else - # No need to modify the input data for the gravity table - inputfile="${source}" fi - # Store domains in gravity database table ${table} + # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 output=$( { printf ".mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) @@ -145,6 +146,31 @@ gravity_store_in_database() { rm "$tmpFile" > /dev/null 2>&1 || true } +migrate_to_database() { +ls -lh + # Migrate pre-v5.0 list files to database-based Pi-hole versions + if [[ -e "${whitelistFile}" ]]; then + # Store whitelisted domains in database + database_table_from_file "whitelist" "${whitelistFile}" + rm "${whitelistFile}" + fi + if [[ -e "${blacklistFile}" ]]; then + # Store blacklisted domains in database + database_table_from_file "blacklist" "${blacklistFile}" + rm "${blacklistFile}" + fi + if [[ -e "${regexFile}" ]]; then + # Store regex domains in database + database_table_from_file "regex" "${regexFile}" + rm "${regexFile}" + fi + if [[ -e "${adListFile}" ]]; then + # Store adlists domains in database + database_table_from_file "adlists" "${adListFile}" + rm "${adListFile}" + fi +} + # Determine if DNS resolution is available before proceeding gravity_CheckDNSResolutionAvailable() { local lookupDomain="pi.hole" @@ -553,9 +579,6 @@ gravity_Whitelist() { str="Number of whitelisted domains: ${num}" echo -ne " ${INFO} ${str}..." - # Store whitelisted files in gravity database - gravity_store_in_database "whitelist" "${whitelistFile}" - echo -e "${OVER} ${INFO} ${str}" } @@ -574,7 +597,7 @@ gravity_ShowBlockCount() { fi # Store regex files in gravity database - gravity_store_in_database "regex" "${regexFile}" + database_table_from_file "regex" "${regexFile}" } # Parse list of domains into hosts format @@ -624,7 +647,7 @@ gravity_ParseBlacklistDomains() { local output status # Store gravity domains in gravity database - gravity_store_in_database "gravity" "${piholeDir}/${preEventHorizon}" + database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}" } # Create user-added blacklist entries @@ -634,7 +657,7 @@ gravity_ParseUserDomains() { fi # Fill database table - gravity_store_in_database "blacklist" "${blacklistFile}" + database_table_from_file "blacklist" "${blacklistFile}" } # Trap Ctrl-C @@ -721,6 +744,9 @@ done # Trap Ctrl-C gravity_Trap +# Move possibly existing legacy files to the gravity database +migrate_to_database + if [[ "${forceDelete:-}" == true ]]; then str="Deleting existing list cache" echo -ne "${INFO} ${str}..." From 2664ac0efc6dab8720170d55b09c2ea50d857f18 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 10:18:37 +0200 Subject: [PATCH 032/366] Improve error reporting Signed-off-by: DL6ER --- gravity.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 4fb601b4..870027a1 100755 --- a/gravity.sh +++ b/gravity.sh @@ -143,7 +143,12 @@ database_table_from_file() { fi # Delete tmpfile - rm "$tmpFile" > /dev/null 2>&1 || true + rm "${tmpFile}" > /dev/null 2>&1 || \ + echo -e " ${CROSS} Unable to remove ${tmpFile}" + + # Delete source file + rm "${source}" 2> /dev/null || \ + echo -e " ${CROSS} Unable to remove ${source}" } migrate_to_database() { @@ -152,22 +157,18 @@ ls -lh if [[ -e "${whitelistFile}" ]]; then # Store whitelisted domains in database database_table_from_file "whitelist" "${whitelistFile}" - rm "${whitelistFile}" fi if [[ -e "${blacklistFile}" ]]; then # Store blacklisted domains in database database_table_from_file "blacklist" "${blacklistFile}" - rm "${blacklistFile}" fi if [[ -e "${regexFile}" ]]; then # Store regex domains in database database_table_from_file "regex" "${regexFile}" - rm "${regexFile}" fi if [[ -e "${adListFile}" ]]; then # Store adlists domains in database database_table_from_file "adlists" "${adListFile}" - rm "${adListFile}" fi } From 465a39427b1a0f34b83ede17a9dbfb000a19a3bb Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 10:46:18 +0200 Subject: [PATCH 033/366] Source adlists from gravity database Signed-off-by: DL6ER --- gravity.sh | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/gravity.sh b/gravity.sh index 870027a1..0333a25d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -151,9 +151,8 @@ database_table_from_file() { echo -e " ${CROSS} Unable to remove ${source}" } +# Migrate pre-v5.0 list files to database-based Pi-hole versions migrate_to_database() { -ls -lh - # Migrate pre-v5.0 list files to database-based Pi-hole versions if [[ -e "${whitelistFile}" ]]; then # Store whitelisted domains in database database_table_from_file "whitelist" "${whitelistFile}" @@ -232,15 +231,9 @@ gravity_CheckDNSResolutionAvailable() { gravity_GetBlocklistUrls() { echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..." - if [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then - # Remove superseded $adListDefault file - rm "${adListDefault}" 2> /dev/null || \ - echo -e " ${CROSS} Unable to remove ${adListDefault}" - fi - - # Retrieve source URLs from $adListFile - # Logic: Remove comments and empty lines - mapfile -t sources <<< "$(grep -v -E "^(#|$)" "${adListFile}" 2> /dev/null)" + # Retrieve source URLs from gravity database + # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true) + mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM adlists WHERE enabled = 1;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( From d94bbfca7f728258ed3f0574b9eb9bf783114652 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 11:18:54 +0200 Subject: [PATCH 034/366] Finish migrating gravity.db to using our new gravity database. This removes the --skip-download, --blacklist-only, --whitelist-only, and --wildcard-only options as it is not necessary to run gravity when modifying them from now on (gravity really only builds the gravity database and the local.list file). Signed-off-by: DL6ER --- gravity.sh | 171 +++++++++++++++-------------------------------------- 1 file changed, 49 insertions(+), 122 deletions(-) diff --git a/gravity.sh b/gravity.sh index 0333a25d..09e7b19c 100755 --- a/gravity.sh +++ b/gravity.sh @@ -44,8 +44,6 @@ matterAndLight="${basename}.0.matterandlight.txt" parsedMatter="${basename}.1.parsedmatter.txt" preEventHorizon="list.preEventHorizon" -skipDownload="false" - resolver="pihole-FTL" haveSourceUrls=true @@ -103,8 +101,6 @@ database_table_from_file() { generate_gravity_database fi - echo -e " ${INFO} Pi-hole upgrade: Moving content of ${source} into database" - # Truncate table output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) status="$?" @@ -153,22 +149,26 @@ database_table_from_file() { # Migrate pre-v5.0 list files to database-based Pi-hole versions migrate_to_database() { - if [[ -e "${whitelistFile}" ]]; then - # Store whitelisted domains in database - database_table_from_file "whitelist" "${whitelistFile}" + if [[ -e "${adListFile}" ]]; then + # Store adlists domains in database + echo -e " ${INFO} Pi-hole upgrade: Moving content of ${adListFile} into database" + database_table_from_file "adlists" "${adListFile}" fi if [[ -e "${blacklistFile}" ]]; then # Store blacklisted domains in database + echo -e " ${INFO} Pi-hole upgrade: Moving content of ${blacklistFile} into database" database_table_from_file "blacklist" "${blacklistFile}" fi + if [[ -e "${whitelistFile}" ]]; then + # Store whitelisted domains in database + echo -e " ${INFO} Pi-hole upgrade: Moving content of ${whitelistFile} into database" + database_table_from_file "whitelist" "${whitelistFile}" + fi if [[ -e "${regexFile}" ]]; then # Store regex domains in database + echo -e " ${INFO} Pi-hole upgrade: Moving content of ${regexFile} into database" database_table_from_file "regex" "${regexFile}" fi - if [[ -e "${adListFile}" ]]; then - # Store adlists domains in database - database_table_from_file "adlists" "${adListFile}" - fi } # Determine if DNS resolution is available before proceeding @@ -282,11 +282,9 @@ gravity_SetDownloadOptions() { *) cmd_ext="";; esac - if [[ "${skipDownload}" == false ]]; then - echo -e " ${INFO} Target: ${domain} (${url##*/})" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" - echo "" - fi + echo -e " ${INFO} Target: ${domain} (${url##*/})" + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" + echo "" done gravity_Blackbody=true } @@ -543,55 +541,35 @@ gravity_SortAndFilterConsolidatedList() { if [[ "${haveSourceUrls}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" fi - echo -e " ${INFO} Number of domains being pulled in by gravity: ${COL_BLUE}${num}${COL_NC}" + echo -e " ${INFO} Gravity pulled in ${COL_BLUE}${num}${COL_NC} domains" str="Removing duplicate domains" - if [[ "${haveSourceUrls}" == true ]]; then - echo -ne " ${INFO} ${str}..." - fi - + echo -ne " ${INFO} ${str}..." sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" + echo -e "${OVER} ${TICK} ${str}" - if [[ "${haveSourceUrls}" == true ]]; then - echo -e "${OVER} ${TICK} ${str}" - # Format $preEventHorizon line total as currency - num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") - echo -e " ${INFO} Number of unique domains trapped in the Event Horizon: ${COL_BLUE}${num}${COL_NC}" - fi -} - -# Whitelist user-defined domains -gravity_Whitelist() { - local num str - - if [[ ! -f "${whitelistFile}" ]]; then - echo -e " ${INFO} Nothing to whitelist!" - return 0 - fi - - num=$(wc -l < "${whitelistFile}") - str="Number of whitelisted domains: ${num}" + # Format $preEventHorizon line total as currency + num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") + str="Storing ${COL_BLUE}${num}${COL_NC} unique blocking domains in database" echo -ne " ${INFO} ${str}..." - - echo -e "${OVER} ${INFO} ${str}" + database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}" + echo -e "${OVER} ${TICK} ${str}" } -# Output count of blacklisted domains and regex filters -gravity_ShowBlockCount() { +# Report number of entries in a table +gravity_Table_Count() { + local table="${1}" + local str="${2}" local num + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} WHERE enabled = 1;")" + echo -e " ${INFO} Number of ${str}: ${num}" +} - if [[ -f "${blacklistFile}" ]]; then - num=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")") - echo -e " ${INFO} Number of blacklisted domains: ${num}" - fi - - if [[ -f "${regexFile}" ]]; then - num=$(grep -cv "^#" "${regexFile}") - echo -e " ${INFO} Number of regex filters: ${num}" - fi - - # Store regex files in gravity database - database_table_from_file "regex" "${regexFile}" +# Output count of blacklisted domains and regex filters +gravity_ShowCount() { + gravity_Table_Count "blacklist" "blacklisted domains" + gravity_Table_Count "whitelist" "whitelisted domains" + gravity_Table_Count "regex" "regex filters" } # Parse list of domains into hosts format @@ -611,7 +589,7 @@ gravity_ParseDomainsIntoHosts() { } # Create "localhost" entries into hosts format -gravity_ParseLocalDomains() { +gravity_generateLocalList() { local hostname if [[ -s "/etc/hostname" ]]; then @@ -636,24 +614,6 @@ gravity_ParseLocalDomains() { fi } -# Create primary blacklist entries -gravity_ParseBlacklistDomains() { - local output status - - # Store gravity domains in gravity database - database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}" -} - -# Create user-added blacklist entries -gravity_ParseUserDomains() { - if [[ ! -f "${blacklistFile}" ]]; then - return 0 - fi - - # Fill database table - database_table_from_file "blacklist" "${blacklistFile}" -} - # Trap Ctrl-C gravity_Trap() { trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT @@ -689,7 +649,7 @@ gravity_Cleanup() { str="Optimizing domains database" echo -ne " ${INFO} ${str}..." # Run VACUUM command on database to optimize it - output=$( { sqlite3 "${gravityDBfile}" <<< "VACUUM;"; } 2>&1 ) + output=$( { sqlite3 "${gravityDBfile}" "VACUUM;"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -728,10 +688,6 @@ for var in "$@"; do "-f" | "--force" ) forceDelete=true;; "-o" | "--optimize" ) optimize_database=true;; "-h" | "--help" ) helpFunc;; - "-sd" | "--skip-download" ) skipDownload=true;; - "-b" | "--blacklist-only" ) listType="blacklist";; - "-w" | "--whitelist-only" ) listType="whitelist";; - "-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";; esac done @@ -749,54 +705,25 @@ if [[ "${forceDelete:-}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" fi -# Determine which functions to run -if [[ "${skipDownload}" == false ]]; then - # Gravity needs to download blocklists - gravity_CheckDNSResolutionAvailable - gravity_GetBlocklistUrls - if [[ "${haveSourceUrls}" == true ]]; then - gravity_SetDownloadOptions - fi - gravity_ConsolidateDownloadedBlocklists - gravity_SortAndFilterConsolidatedList -else - # Gravity needs to modify Blacklist/Whitelist/Wildcards - echo -e " ${INFO} Using cached Event Horizon list..." - numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") - echo -e " ${INFO} ${COL_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon" -fi - -# Perform when downloading blocklists, or modifying the whitelist -if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "whitelist" ]]; then - gravity_Whitelist +# Gravity downloads blocklists next +gravity_CheckDNSResolutionAvailable +gravity_GetBlocklistUrls +if [[ "${haveSourceUrls}" == true ]]; then +gravity_SetDownloadOptions fi +# Build preEventHorizon +gravity_ConsolidateDownloadedBlocklists +gravity_SortAndFilterConsolidatedList -convert_wildcard_to_regex -gravity_ShowBlockCount - -# Perform when downloading blocklists, or modifying the white/blacklist (not wildcards) -if [[ "${skipDownload}" == false ]] || [[ "${listType}" == *"list" ]]; then - str="Parsing domains" - echo -ne " ${INFO} ${str}..." - - gravity_ParseUserDomains - - # Perform when downloading blocklists - if [[ ! "${listType:-}" == "blacklist" ]]; then - gravity_ParseLocalDomains - gravity_ParseBlacklistDomains - fi - - echo -e "${OVER} ${TICK} ${str}" - - gravity_Cleanup -fi +# Create local.list +gravity_generateLocalList +gravity_ShowCount +gravity_Cleanup echo "" # Determine if DNS has been restarted by this instance of gravity if [[ -z "${dnsWasOffline:-}" ]]; then - # Use "force-reload" when restarting dnsmasq for everything but Wildcards - "${PIHOLE_COMMAND}" restartdns "${dnsRestartType:-force-reload}" + "${PIHOLE_COMMAND}" restartdns reload fi "${PIHOLE_COMMAND}" status From e5d1cb5a2ef11ab4642bd3b8f80cb2edcd9cb2b5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 12:10:42 +0200 Subject: [PATCH 035/366] Rewrite list.sh to operate on database-based lists Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 253 +++++++++++++++++---------------------- 1 file changed, 107 insertions(+), 146 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index c1d95aae..1a2094de 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -11,10 +11,8 @@ # Globals basename=pihole piholeDir=/etc/"${basename}" -whitelist="${piholeDir}"/whitelist.txt -blacklist="${piholeDir}"/blacklist.txt +gravityDBfile="${piholeDir}/gravity.db" -readonly regexlist="/etc/pihole/regex.list" reload=false addmode=true verbose=true @@ -22,35 +20,34 @@ wildcard=false domList=() -listMain="" -listAlt="" +listType="" colfile="/opt/pihole/COL_TABLE" source ${colfile} helpFunc() { - if [[ "${listMain}" == "${whitelist}" ]]; then + if [[ "${listType}" == "whitelist" ]]; then param="w" - type="white" - elif [[ "${listMain}" == "${regexlist}" && "${wildcard}" == true ]]; then + type="whitelist" + elif [[ "${listType}" == "regex" && "${wildcard}" == true ]]; then param="-wild" - type="wildcard black" - elif [[ "${listMain}" == "${regexlist}" ]]; then + type="wildcard blacklist" + elif [[ "${listType}" == "regex" ]]; then param="-regex" - type="regex black" + type="regex filter" else param="b" - type="black" + type="blacklist" fi echo "Usage: pihole -${param} [options] Example: 'pihole -${param} site.com', or 'pihole -${param} site1.com site2.com' -${type^}list one or more domains +${type^} one or more domains Options: - -d, --delmode Remove domain(s) from the ${type}list - -nr, --noreload Update ${type}list without refreshing dnsmasq + -d, --delmode Remove domain(s) from the ${type} + -nr, --noreload Update ${type} without reloading the DNS server -q, --quiet Make output less verbose -h, --help Show this help dialog -l, --list Display all your ${type}listed domains @@ -73,7 +70,7 @@ HandleOther() { # Check validity of domain (don't check for regex entries) if [[ "${#domain}" -le 253 ]]; then - if [[ "${listMain}" == "${regexlist}" && "${wildcard}" == false ]]; then + if [[ "${listType}" == "regex" && "${wildcard}" == false ]]; then validDomain="${domain}" else validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check @@ -88,169 +85,134 @@ HandleOther() { fi } -PoplistFile() { - # Check whitelist file exists, and if not, create it - if [[ ! -f "${whitelist}" ]]; then - touch "${whitelist}" - fi - - # Check blacklist file exists, and if not, create it - if [[ ! -f "${blacklist}" ]]; then - touch "${blacklist}" - fi - +ProcessDomainList() { for dom in "${domList[@]}"; do # Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the other if ${addmode}; then - AddDomain "${dom}" "${listMain}" + AddDomain "${dom}" "${listType}" RemoveDomain "${dom}" "${listAlt}" else - RemoveDomain "${dom}" "${listMain}" + RemoveDomain "${dom}" "${listType}" fi done } AddDomain() { + local domain list listname sqlitekey num + domain="$1" list="$2" - domain=$(EscapeRegexp "$1") - - [[ "${list}" == "${whitelist}" ]] && listname="whitelist" - [[ "${list}" == "${blacklist}" ]] && listname="blacklist" - - if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then - [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only" - [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only" - bool=true - # Is the domain in the list we want to add it to? - grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false - - if [[ "${bool}" == false ]]; then - # Domain not found in the whitelist file, add it! - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Adding ${1} to ${listname}..." - fi - reload=true - # Add it to the list we want to add it to - echo "$1" >> "${list}" - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" - fi - fi - elif [[ "${list}" == "${regexlist}" ]]; then - [[ -z "${type}" ]] && type="--wildcard-only" - bool=true - domain="${1}" - [[ "${wildcard}" == true ]] && domain="(^|\\.)${domain//\./\\.}$" + if [[ "${list}" == "regex" ]]; then + listname="regex filters" + sqlitekey="filter" + if [[ "${wildcard}" == true ]]; then + domain="(^|\\.)${domain//\./\\.}$" + fi + else + # Whitelist / Blacklist + listname="${list}list" + sqlitekey="domain" + fi - # Is the domain in the list? - # Search only for exactly matching lines - grep -Fx "${domain}" "${regexlist}" > /dev/null 2>&1 || bool=false + # Is the domain in the list we want to add it to? + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" - if [[ "${bool}" == false ]]; then - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Adding ${domain} to regex list..." - fi - reload="restart" - echo "$domain" >> "${regexlist}" - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${domain} already exists in regex list, no need to add!" - fi + if [[ "${num}" -eq 0 ]]; then + # Domain not found in the file, add it! + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} Adding ${1} to ${listname}..." + fi + reload=true + # Add it to the list we want to add it to + local timestamp + timestamp="$(date --utc +'%s')" + sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (${sqlitekey},enabled,date_added) VALUES (\"${domain}\",1,${timestamp});" + else + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" fi fi } RemoveDomain() { + local domain list listname sqlitekey num + domain="$1" list="$2" - domain=$(EscapeRegexp "$1") - - [[ "${list}" == "${whitelist}" ]] && listname="whitelist" - [[ "${list}" == "${blacklist}" ]] && listname="blacklist" - - if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then - bool=true - [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only" - [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only" - # Is it in the list? Logic follows that if its whitelisted it should not be blacklisted and vice versa - grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false - if [[ "${bool}" == true ]]; then - # Remove it from the other one - echo -e " ${INFO} Removing $1 from ${listname}..." - # /I flag: search case-insensitive - sed -i "/${domain}/Id" "${list}" - reload=true - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!" - fi - fi - elif [[ "${list}" == "${regexlist}" ]]; then - [[ -z "${type}" ]] && type="--wildcard-only" - domain="${1}" - - [[ "${wildcard}" == true ]] && domain="(^|\\.)${domain//\./\\.}$" - - bool=true - # Is it in the list? - grep -Fx "${domain}" "${regexlist}" > /dev/null 2>&1 || bool=false - if [[ "${bool}" == true ]]; then - # Remove it from the other one - echo -e " ${INFO} Removing $domain from regex list..." - local lineNumber - lineNumber=$(grep -Fnx "$domain" "${list}" | cut -f1 -d:) - sed -i "${lineNumber}d" "${list}" - reload=true - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${domain} does not exist in regex list, no need to remove!" - fi + + if [[ "${list}" == "regex" ]]; then + listname="regex filters" + sqlitekey="filter" + if [[ "${wildcard}" == true ]]; then + domain="(^|\\.)${domain//\./\\.}$" fi + else + # Whitelist / Blacklist + listname="${list}list" + sqlitekey="domain" fi -} -# Update Gravity -Reload() { - echo "" - pihole -g --skip-download "${type:-}" + # Is the domain in the list we want to remove it from? + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" + + if [[ "${num}" -ne 0 ]]; then + # Domain found in the file, remove it! + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} Removing ${1} from ${listname}..." + fi + reload=true + # Remove it from the current list + local timestamp + timestamp="$(date --utc +'%s')" + sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE ${sqlitekey} = \"${domain}\";" + else + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!" + fi + fi } Displaylist() { - if [[ -f ${listMain} ]]; then - if [[ "${listMain}" == "${whitelist}" ]]; then - string="gravity resistant domains" - else - string="domains caught in the sinkhole" - fi - verbose=false - echo -e "Displaying $string:\n" + local domain list listname count status + + if [[ "${listType}" == "regex" ]]; then + listname="regex filters list" + else + # Whitelist / Blacklist + listname="${listType}" + fi + data="$(sqlite3 "${gravityDBfile}" "SELECT * FROM ${listType};" 2> /dev/null)" + + if [[ -z $data ]]; then + echo -e "Not showing empty ${listname}" + else + echo -e "Displaying ${listname}:" count=1 - while IFS= read -r RD || [ -n "${RD}" ]; do - echo " ${count}: ${RD}" + while IFS= read -r line + do + domain="$(cut -d'|' -f1 <<< "${line}")" + enabled="$(cut -d'|' -f2 <<< "${line}")" + if [[ "${enabled}" -eq 1 ]]; then + status="enabled" + else + status="disabled" + fi + echo " ${count}: ${domain} (${status})" count=$((count+1)) - done < "${listMain}" - else - echo -e " ${COL_LIGHT_RED}${listMain} does not exist!${COL_NC}" + done <<< "${data}" + exit 0; fi - exit 0; } NukeList() { - if [[ -f "${listMain}" ]]; then - # Back up original list - cp "${listMain}" "${listMain}.bck~" - # Empty out file - echo "" > "${listMain}" - fi + sqlite3 "${gravityDBfile}" "DELETE FROM ${listType};" } for var in "$@"; do case "${var}" in - "-w" | "whitelist" ) listMain="${whitelist}"; listAlt="${blacklist}";; - "-b" | "blacklist" ) listMain="${blacklist}"; listAlt="${whitelist}";; - "--wild" | "wildcard" ) listMain="${regexlist}"; wildcard=true;; - "--regex" | "regex" ) listMain="${regexlist}";; + "-w" | "whitelist" ) listType="whitelist"; listAlt="blacklist";; + "-b" | "blacklist" ) listType="blacklist"; listAlt="whitelist";; + "--wild" | "wildcard" ) listType="regex"; wildcard=true;; + "--regex" | "regex" ) listType="regex";; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; @@ -267,9 +229,8 @@ if [[ $# = 0 ]]; then helpFunc fi -PoplistFile +ProcessDomainList if [[ "${reload}" != false ]]; then - # Ensure that "restart" is used for Wildcard updates - Reload "${reload}" + pihole restartdns reload fi From 788cd783212828229fb8708f38841b4a89472267 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 12:30:38 +0200 Subject: [PATCH 036/366] Reduce code duplication and add special handling of | character as it might appear in regex filter string Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 70 +++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 1a2094de..727919fe 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -21,9 +21,11 @@ wildcard=false domList=() listType="" +listname="" +sqlitekey="" -colfile="/opt/pihole/COL_TABLE" -source ${colfile} +# shellcheck source=/opt/pihole/COL_TABLE +source "/opt/pihole/COL_TABLE" helpFunc() { @@ -61,7 +63,7 @@ EscapeRegexp() { # string in our regular expressions # This sed is intentionally executed in three steps to ease maintainability # The first sed removes any amount of leading dots - echo $* | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g" + echo "$@" | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g" } HandleOther() { @@ -86,11 +88,28 @@ HandleOther() { } ProcessDomainList() { + if [[ "${listType}" == "regex" ]]; then + # Regex filter list + listname="regex filters" + sqlitekey="filter" + else + # Whitelist / Blacklist + listname="${listType}" + sqlitekey="domain" + fi + for dom in "${domList[@]}"; do - # Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the other + # Format domain into regex filter if requested + if [[ "${wildcard}" == true ]]; then + dom="(^|\\.)${dom//\./\\.}$" + fi + + # Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the othe if ${addmode}; then AddDomain "${dom}" "${listType}" - RemoveDomain "${dom}" "${listAlt}" + if [[ ! "${listType}" == "regex" ]]; then + RemoveDomain "${dom}" "${listAlt}" + fi else RemoveDomain "${dom}" "${listType}" fi @@ -98,22 +117,10 @@ ProcessDomainList() { } AddDomain() { - local domain list listname sqlitekey num + local domain list num domain="$1" list="$2" - if [[ "${list}" == "regex" ]]; then - listname="regex filters" - sqlitekey="filter" - if [[ "${wildcard}" == true ]]; then - domain="(^|\\.)${domain//\./\\.}$" - fi - else - # Whitelist / Blacklist - listname="${list}list" - sqlitekey="domain" - fi - # Is the domain in the list we want to add it to? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" @@ -135,22 +142,10 @@ AddDomain() { } RemoveDomain() { - local domain list listname sqlitekey num + local domain list num domain="$1" list="$2" - if [[ "${list}" == "regex" ]]; then - listname="regex filters" - sqlitekey="filter" - if [[ "${wildcard}" == true ]]; then - domain="(^|\\.)${domain//\./\\.}$" - fi - else - # Whitelist / Blacklist - listname="${list}list" - sqlitekey="domain" - fi - # Is the domain in the list we want to remove it from? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" @@ -172,7 +167,7 @@ RemoveDomain() { } Displaylist() { - local domain list listname count status + local list listname count num_pipes domain enabled status if [[ "${listType}" == "regex" ]]; then listname="regex filters list" @@ -189,13 +184,20 @@ Displaylist() { count=1 while IFS= read -r line do - domain="$(cut -d'|' -f1 <<< "${line}")" - enabled="$(cut -d'|' -f2 <<< "${line}")" + # Count number of pipes seen in this line + # This is necessary because we can only detect the pipe separating the fields + # from the end backwards as the domain (which is the first field) may contain + # pipe symbols as they are perfectly valid regex filter control characters + num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")" + + domain="$(cut -d'|' -f"-$((num_pipes-2))" <<< "${line}")" + enabled="$(cut -d'|' -f$((num_pipes-1)) <<< "${line}")" if [[ "${enabled}" -eq 1 ]]; then status="enabled" else status="disabled" fi + echo " ${count}: ${domain} (${status})" count=$((count+1)) done <<< "${data}" From 43bced7997772d5604d1302e1c8fbc6369ec45e1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 12:45:08 +0200 Subject: [PATCH 037/366] Add more comments Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 727919fe..5fd24f6b 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -24,8 +24,8 @@ listType="" listname="" sqlitekey="" -# shellcheck source=/opt/pihole/COL_TABLE -source "/opt/pihole/COL_TABLE" +colfile="/opt/pihole/COL_TABLE" +source ${colfile} helpFunc() { @@ -63,7 +63,7 @@ EscapeRegexp() { # string in our regular expressions # This sed is intentionally executed in three steps to ease maintainability # The first sed removes any amount of leading dots - echo "$@" | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g" + echo $* | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g" } HandleOther() { @@ -190,8 +190,11 @@ Displaylist() { # pipe symbols as they are perfectly valid regex filter control characters num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")" + # Extract domain and enabled status based on the obtained number of pipe characters domain="$(cut -d'|' -f"-$((num_pipes-2))" <<< "${line}")" - enabled="$(cut -d'|' -f$((num_pipes-1)) <<< "${line}")" + enabled="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" + + # Translate boolean status into human readable string if [[ "${enabled}" -eq 1 ]]; then status="enabled" else From cce66a13b69ece62037b9dd2a01cf4aab18a3abe Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 14:54:41 +0200 Subject: [PATCH 038/366] Fix output in list.sh when adding a domain that is already present in the target list Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 5fd24f6b..1ad99468 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -161,7 +161,7 @@ RemoveDomain() { sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE ${sqlitekey} = \"${domain}\";" else if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!" + echo -e " ${INFO} ${1} does not exist in ${list}, no need to remove!" fi fi } From a2a7e8000795d2b58071d7393b86cf2bc3e894fc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 25 Apr 2019 15:01:13 +0200 Subject: [PATCH 039/366] Add --web for sending a confirmation string when executed from the web interface. This is necessary as the "Reloading DNS service" message does not always propagate through to the web interface when reloading pihole-FTL takes a while. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 1ad99468..969e7e44 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -17,6 +17,7 @@ reload=false addmode=true verbose=true wildcard=false +web=false domList=() @@ -224,6 +225,7 @@ for var in "$@"; do "-h" | "--help" ) helpFunc;; "-l" | "--list" ) Displaylist;; "--nuke" ) NukeList;; + "--web" ) web=true;; * ) HandleOther "${var}";; esac done @@ -236,6 +238,11 @@ fi ProcessDomainList +# Used on web interface +if $web; then +echo "DONE" +fi + if [[ "${reload}" != false ]]; then pihole restartdns reload fi From 0efc46260f0726edf0e2986df536c0f4e0c93fdc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 16:16:30 +0200 Subject: [PATCH 040/366] Use both old and new strings for detecting whether this is a Pi-hole dnsmasq config file Signed-off-by: DL6ER --- automated install/basic-install.sh | 6 ++++-- gravity.sh | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 6bcfb564..06facdf7 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1224,7 +1224,8 @@ version_check_dnsmasq() { # Local, named variables local dnsmasq_conf="/etc/dnsmasq.conf" local dnsmasq_conf_orig="/etc/dnsmasq.conf.orig" - local dnsmasq_pihole_id_string="# Dnsmasq config for Pi-hole's FTLDNS" + local dnsmasq_pihole_id_string="addn-hosts=/etc/pihole/gravity.list" + local dnsmasq_pihole_id_string2="# Dnsmasq config for Pi-hole's FTLDNS" local dnsmasq_original_config="${PI_HOLE_LOCAL_REPO}/advanced/dnsmasq.conf.original" local dnsmasq_pihole_01_snippet="${PI_HOLE_LOCAL_REPO}/advanced/01-pihole.conf" local dnsmasq_pihole_01_location="/etc/dnsmasq.d/01-pihole.conf" @@ -1233,7 +1234,8 @@ version_check_dnsmasq() { if [[ -f "${dnsmasq_conf}" ]]; then printf " %b Existing dnsmasq.conf found..." "${INFO}" # If a specific string is found within this file, we presume it's from older versions on Pi-hole, - if grep -q ${dnsmasq_pihole_id_string} ${dnsmasq_conf}; then + if grep -q "${dnsmasq_pihole_id_string}" "${dnsmasq_conf}" || + grep -q "${dnsmasq_pihole_id_string2}" "${dnsmasq_conf}"; then printf " it is from a previous Pi-hole install.\\n" printf " %b Backing up dnsmasq.conf to dnsmasq.conf.orig..." "${INFO}" # so backup the original file diff --git a/gravity.sh b/gravity.sh index 09e7b19c..75057eec 100755 --- a/gravity.sh +++ b/gravity.sh @@ -20,8 +20,9 @@ source "${regexconverter}" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" -PIHOLE_USER="pihole" -PIHOLE_GROUP="pihole" +DATABASE_USER="pihole" +DATABASE_GROUP="www-data" +DATABASE_PERMISSIONS="0660" piholeDir="/etc/${basename}" @@ -86,7 +87,8 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" - chown $PIHOLE_USER:$PIHOLE_GROUP "${gravityDBfile}" + chown ${DATABASE_USER}:${DATABASE_GROUP} "${piholeDir}" "${gravityDBfile}" + chmod ${DATABASE_PERMISSIONS} "${piholeDir}" "${gravityDBfile}" } # Import domains from file and store them in the specified database table From 08ee9526adde9fa3bd18cffa8b58ef148c2ccc52 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:06:01 +0200 Subject: [PATCH 041/366] Review comments Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 76 +++++++++++++++++++++------------------- 1 file changed, 39 insertions(+), 37 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 969e7e44..af44013e 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -105,7 +105,8 @@ ProcessDomainList() { dom="(^|\\.)${dom//\./\\.}$" fi - # Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the othe + # Logic: If addmode then add to desired list and remove from the other; + # if delmode then remove from desired list but do not add to the other if ${addmode}; then AddDomain "${dom}" "${listType}" if [[ ! "${listType}" == "regex" ]]; then @@ -119,63 +120,64 @@ ProcessDomainList() { AddDomain() { local domain list num - domain="$1" + # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input + domain="$(printf "%q" "$1")" list="$2" # Is the domain in the list we want to add it to? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" - if [[ "${num}" -eq 0 ]]; then - # Domain not found in the file, add it! - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Adding ${1} to ${listname}..." - fi - reload=true - # Add it to the list we want to add it to - local timestamp - timestamp="$(date --utc +'%s')" - sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (${sqlitekey},enabled,date_added) VALUES (\"${domain}\",1,${timestamp});" - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" - fi + if [[ "${num}" -ne 0 ]]; then + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" + fi + return fi + + # Domain not found in the table, add it! + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} Adding ${1} to ${listname}..." + fi + reload=true + # Add it to the list we want to add it to + local timestamp + timestamp="$(date --utc +'%s')" + # Insert only domain and date_added here. The enabled fields will be filled + # with its default value is true. + sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (${sqlitekey},date_added) VALUES (\"${domain}\",${timestamp});" } RemoveDomain() { local domain list num - domain="$1" + # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input + domain="$(printf "%q" "$1")" list="$2" # Is the domain in the list we want to remove it from? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" - if [[ "${num}" -ne 0 ]]; then - # Domain found in the file, remove it! - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Removing ${1} from ${listname}..." - fi - reload=true - # Remove it from the current list - local timestamp - timestamp="$(date --utc +'%s')" - sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE ${sqlitekey} = \"${domain}\";" - else - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} does not exist in ${list}, no need to remove!" - fi + if [[ "${num}" -eq 0 ]]; then + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} does not exist in ${list}, no need to remove!" + fi + return fi + + # Domain found in the table, remove it! + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} Removing ${1} from ${listname}..." + fi + reload=true + # Remove it from the current list + local timestamp + timestamp="$(date --utc +'%s')" + sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE ${sqlitekey} = \"${domain}\";" } Displaylist() { local list listname count num_pipes domain enabled status - if [[ "${listType}" == "regex" ]]; then - listname="regex filters list" - else - # Whitelist / Blacklist - listname="${listType}" - fi + listname="${listType}" data="$(sqlite3 "${gravityDBfile}" "SELECT * FROM ${listType};" 2> /dev/null)" if [[ -z $data ]]; then From 6b1d2523e8bea9310e5095208f0d4732cef14cb9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:07:16 +0200 Subject: [PATCH 042/366] Also print datetime a domain was added when listing the domains on the CLI Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index af44013e..d11ab96c 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -175,7 +175,7 @@ RemoveDomain() { } Displaylist() { - local list listname count num_pipes domain enabled status + local list listname count num_pipes domain enabled status nicedate listname="${listType}" data="$(sqlite3 "${gravityDBfile}" "SELECT * FROM ${listType};" 2> /dev/null)" @@ -196,7 +196,9 @@ Displaylist() { # Extract domain and enabled status based on the obtained number of pipe characters domain="$(cut -d'|' -f"-$((num_pipes-2))" <<< "${line}")" enabled="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" + dateadded="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")" + echo "${dateadded}" # Translate boolean status into human readable string if [[ "${enabled}" -eq 1 ]]; then status="enabled" @@ -204,7 +206,10 @@ Displaylist() { status="disabled" fi - echo " ${count}: ${domain} (${status})" + # Get nice representation of numerical date stored in database + nicedate=$(date --rfc-2822 -d "@${dateadded}") + + echo " ${count}: ${domain} (${status}, added ${nicedate})" count=$((count+1)) done <<< "${data}" exit 0; From a891d6439533f64a42baacdb409df4725ac7461e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:13:36 +0200 Subject: [PATCH 043/366] We should always exit after displaying a list, even if it is empty Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index d11ab96c..cd9dce52 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -212,8 +212,8 @@ Displaylist() { echo " ${count}: ${domain} (${status}, added ${nicedate})" count=$((count+1)) done <<< "${data}" - exit 0; fi + exit 0; } NukeList() { From 5bb816367422c5c6394543ff88ed8b22f31cf60f Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:17:21 +0200 Subject: [PATCH 044/366] Change type of whitelist, blacklist, regex, and adlists time column to INTEGER and insert a DEFAULT clause that returns the current Unix time as such an integer Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 5442987f..cd62c94e 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,7 +1,7 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); -CREATE TABLE regex (filter TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); -CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, comment TEXT); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); From 53e3ff2b2421a491badb24be578717e3bc11e485 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:32:05 +0200 Subject: [PATCH 045/366] Only add non-empty lines when moving file content into database tables Signed-off-by: DL6ER --- gravity.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 75057eec..1d94424f 100755 --- a/gravity.sh +++ b/gravity.sh @@ -125,7 +125,10 @@ database_table_from_file() { # Read file line by line grep -v '^ *#' < "${source}" | while IFS= read -r domain do - echo "\"${domain}\",1,${timestamp}" >> "${tmpFile}" + # Only add non-empty lines + if [[ ! -z "${domain}" ]]; then + echo "\"${domain}\",1,${timestamp}" >> "${tmpFile}" + fi done inputfile="${tmpFile}" fi From bd0215524ba0191b5364a596e769027a22be88fd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:36:00 +0200 Subject: [PATCH 046/366] Add vw_adlists view which returns only enabled adlists Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 6 +++++- gravity.sh | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index cd62c94e..ebe97955 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -20,6 +20,10 @@ CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain FROM whitelist a WHERE a.enabled == 1; -CREATE VIEW vw_regex AS SELECT DISTINCT a.filter +CREATE VIEW vw_regex AS SELECT DISTINCT a.domain FROM regex a WHERE a.enabled == 1; + +CREATE VIEW vw_adlists AS SELECT DISTINCT a.address +FROM adlists a +WHERE a.enabled == 1; diff --git a/gravity.sh b/gravity.sh index 1d94424f..a0652ced 100755 --- a/gravity.sh +++ b/gravity.sh @@ -238,7 +238,7 @@ gravity_GetBlocklistUrls() { # Retrieve source URLs from gravity database # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true) - mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM adlists WHERE enabled = 1;" 2> /dev/null)" + mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( From 3dcb9722d35fb42d8a63903d5808a493e0b8507c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:39:35 +0200 Subject: [PATCH 047/366] Remove global haveSourceUrls variable and just skip downloading and processing adlists if there aren't any to be downloaded Signed-off-by: DL6ER --- gravity.sh | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/gravity.sh b/gravity.sh index a0652ced..3543ac15 100755 --- a/gravity.sh +++ b/gravity.sh @@ -47,8 +47,6 @@ preEventHorizon="list.preEventHorizon" resolver="pihole-FTL" -haveSourceUrls=true - # Source setupVars from install script setupVars="${piholeDir}/setupVars.conf" if [[ -f "${setupVars}" ]];then @@ -255,11 +253,12 @@ gravity_GetBlocklistUrls() { if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then echo -e "${OVER} ${TICK} ${str}" + return 0 else echo -e "${OVER} ${CROSS} ${str}" echo -e " ${INFO} No source list found, or it is empty" echo "" - haveSourceUrls=false + return 1 fi } @@ -503,9 +502,7 @@ gravity_ConsolidateDownloadedBlocklists() { local str lastLine str="Consolidating blocklists" - if [[ "${haveSourceUrls}" == true ]]; then - echo -ne " ${INFO} ${str}..." - fi + echo -ne " ${INFO} ${str}..." # Empty $matterAndLight if it already exists, otherwise, create it : > "${piholeDir}/${matterAndLight}" @@ -524,9 +521,8 @@ gravity_ConsolidateDownloadedBlocklists() { fi fi done - if [[ "${haveSourceUrls}" == true ]]; then - echo -e "${OVER} ${TICK} ${str}" - fi + echo -e "${OVER} ${TICK} ${str}" + } # Parse consolidated list into (filtered, unique) domains-only format @@ -534,18 +530,15 @@ gravity_SortAndFilterConsolidatedList() { local str num str="Extracting domains from blocklists" - if [[ "${haveSourceUrls}" == true ]]; then - echo -ne " ${INFO} ${str}..." - fi + echo -ne " ${INFO} ${str}..." # Parse into file gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}" # Format $parsedMatter line total as currency num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")") - if [[ "${haveSourceUrls}" == true ]]; then - echo -e "${OVER} ${TICK} ${str}" - fi + + echo -e "${OVER} ${TICK} ${str}" echo -e " ${INFO} Gravity pulled in ${COL_BLUE}${num}${COL_NC} domains" str="Removing duplicate domains" @@ -713,12 +706,12 @@ fi # Gravity downloads blocklists next gravity_CheckDNSResolutionAvailable gravity_GetBlocklistUrls -if [[ "${haveSourceUrls}" == true ]]; then -gravity_SetDownloadOptions +if gravity_GetBlocklistUrls; then + gravity_SetDownloadOptions + # Build preEventHorizon + gravity_ConsolidateDownloadedBlocklists + gravity_SortAndFilterConsolidatedList fi -# Build preEventHorizon -gravity_ConsolidateDownloadedBlocklists -gravity_SortAndFilterConsolidatedList # Create local.list gravity_generateLocalList From e04dc9d2bb20400912067d1ff39c6dcc2873f76c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:44:42 +0200 Subject: [PATCH 048/366] Remove sqlitekey variable as the first column of the regex table is now called domain as well Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index cd9dce52..dcd6f580 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -23,7 +23,6 @@ domList=() listType="" listname="" -sqlitekey="" colfile="/opt/pihole/COL_TABLE" source ${colfile} @@ -92,11 +91,9 @@ ProcessDomainList() { if [[ "${listType}" == "regex" ]]; then # Regex filter list listname="regex filters" - sqlitekey="filter" else # Whitelist / Blacklist listname="${listType}" - sqlitekey="domain" fi for dom in "${domList[@]}"; do @@ -125,7 +122,7 @@ AddDomain() { list="$2" # Is the domain in the list we want to add it to? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = \"${domain}\";")" if [[ "${num}" -ne 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -144,7 +141,7 @@ AddDomain() { timestamp="$(date --utc +'%s')" # Insert only domain and date_added here. The enabled fields will be filled # with its default value is true. - sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (${sqlitekey},date_added) VALUES (\"${domain}\",${timestamp});" + sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain,date_added) VALUES (\"${domain}\",${timestamp});" } RemoveDomain() { @@ -154,7 +151,7 @@ RemoveDomain() { list="$2" # Is the domain in the list we want to remove it from? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE ${sqlitekey} = \"${domain}\";")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = \"${domain}\";")" if [[ "${num}" -eq 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -171,7 +168,7 @@ RemoveDomain() { # Remove it from the current list local timestamp timestamp="$(date --utc +'%s')" - sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE ${sqlitekey} = \"${domain}\";" + sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE domain = \"${domain}\";" } Displaylist() { From 8524aecfed5eb4e3984fe938d2049c47976741c7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 17:56:11 +0200 Subject: [PATCH 049/366] Don't set gravity database permissions Signed-off-by: DL6ER --- gravity.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 3543ac15..98db04b8 100755 --- a/gravity.sh +++ b/gravity.sh @@ -20,9 +20,6 @@ source "${regexconverter}" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" -DATABASE_USER="pihole" -DATABASE_GROUP="www-data" -DATABASE_PERMISSIONS="0660" piholeDir="/etc/${basename}" @@ -85,8 +82,6 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" - chown ${DATABASE_USER}:${DATABASE_GROUP} "${piholeDir}" "${gravityDBfile}" - chmod ${DATABASE_PERMISSIONS} "${piholeDir}" "${gravityDBfile}" } # Import domains from file and store them in the specified database table From 3fe43ce1d923f825b73a4f9c367b3a7f1bf344c9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 18:15:53 +0200 Subject: [PATCH 050/366] Rely on default value (database provided) for date_added when adding a new domain Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index dcd6f580..053def02 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -133,15 +133,12 @@ AddDomain() { # Domain not found in the table, add it! if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Adding ${1} to ${listname}..." + echo -e " ${INFO} Adding ${1} to the ${listname}..." fi reload=true - # Add it to the list we want to add it to - local timestamp - timestamp="$(date --utc +'%s')" - # Insert only domain and date_added here. The enabled fields will be filled - # with its default value is true. - sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain,date_added) VALUES (\"${domain}\",${timestamp});" + # Insert only the domain here. The enabled and date_added fields will be filled + # with their default values (enabled = true, date_added = current timestamp) + sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain) VALUES (\"${domain}\");" } RemoveDomain() { @@ -162,12 +159,10 @@ RemoveDomain() { # Domain found in the table, remove it! if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Removing ${1} from ${listname}..." + echo -e " ${INFO} Removing ${1} from the ${listname}..." fi reload=true # Remove it from the current list - local timestamp - timestamp="$(date --utc +'%s')" sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE domain = \"${domain}\";" } @@ -195,7 +190,6 @@ Displaylist() { enabled="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" dateadded="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")" - echo "${dateadded}" # Translate boolean status into human readable string if [[ "${enabled}" -eq 1 ]]; then status="enabled" From b4ae142149f60150d583ec49876dfdef7dd3d259 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 26 Apr 2019 20:54:01 +0200 Subject: [PATCH 051/366] Remove printf escaping (we will realize it differently) and ensure we're using single quotes for strings (although double quotes are possible, too) Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 053def02..374f888b 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -118,11 +118,11 @@ ProcessDomainList() { AddDomain() { local domain list num # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input - domain="$(printf "%q" "$1")" + domain="$1" list="$2" # Is the domain in the list we want to add it to? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = \"${domain}\";")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = '${domain}';")" if [[ "${num}" -ne 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -138,17 +138,17 @@ AddDomain() { reload=true # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain) VALUES (\"${domain}\");" + sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain) VALUES ('${domain}');" } RemoveDomain() { local domain list num # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input - domain="$(printf "%q" "$1")" + domain="$1" list="$2" # Is the domain in the list we want to remove it from? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = \"${domain}\";")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = '${domain}';")" if [[ "${num}" -eq 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -163,7 +163,7 @@ RemoveDomain() { fi reload=true # Remove it from the current list - sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE domain = \"${domain}\";" + sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE domain = '${domain}';" } Displaylist() { From 126f7101ffaa56b42891d9db1bed12e270222bd0 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 28 Apr 2019 14:42:52 +0200 Subject: [PATCH 052/366] Add (optional) date_modified field and use vw_whitelist view in vw_blacklist Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 6 +++--- advanced/Templates/gravity.db.sql | 17 ++++++++--------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 374f888b..73cb2878 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -186,9 +186,9 @@ Displaylist() { num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")" # Extract domain and enabled status based on the obtained number of pipe characters - domain="$(cut -d'|' -f"-$((num_pipes-2))" <<< "${line}")" - enabled="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" - dateadded="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")" + domain="$(cut -d'|' -f"-$((num_pipes-3))" <<< "${line}")" + enabled="$(cut -d'|' -f"$((num_pipes-2))" <<< "${line}")" + dateadded="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" # Translate boolean status into human readable string if [[ "${enabled}" -eq 1 ]]; then diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index ebe97955..31fbfd9f 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,7 +1,7 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); +CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); @@ -11,15 +11,14 @@ CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain FROM gravity a WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); -CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain -FROM blacklist a -WHERE a.enabled == 1 AND - a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); - CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain FROM whitelist a WHERE a.enabled == 1; +CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain +FROM blacklist a +WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist; + CREATE VIEW vw_regex AS SELECT DISTINCT a.domain FROM regex a WHERE a.enabled == 1; From efeba594ae791000df0461b00e82e02251892038 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 28 Apr 2019 21:39:06 +0200 Subject: [PATCH 053/366] Add support for manipulating adlists from the CLI Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index c6852896..6ac351bc 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -17,6 +17,8 @@ readonly FTLconf="/etc/pihole/pihole-FTL.conf" # 03 -> wildcards readonly dhcpstaticconfig="/etc/dnsmasq.d/04-pihole-static-dhcp.conf" +readonly gravityDBfile="/etc/pihole/gravity.db" + coltable="/opt/pihole/COL_TABLE" if [[ -f ${coltable} ]]; then source ${coltable} @@ -385,19 +387,17 @@ SetWebUILayout() { } CustomizeAdLists() { - list="/etc/pihole/adlists.list" + local address + address="${args[3]}" if [[ "${args[2]}" == "enable" ]]; then - sed -i "\\@${args[3]}@s/^#http/http/g" "${list}" + sqlite3 "${gravityDBfile}" "UPDATE adlists SET enabled = 1 WHERE address = '${address}'" elif [[ "${args[2]}" == "disable" ]]; then - sed -i "\\@${args[3]}@s/^http/#http/g" "${list}" + sqlite3 "${gravityDBfile}" "UPDATE adlists SET enabled = 0 WHERE address = '${address}'" elif [[ "${args[2]}" == "add" ]]; then - if [[ $(grep -c "^${args[3]}$" "${list}") -eq 0 ]] ; then - echo "${args[3]}" >> ${list} - fi + sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlists (address) VALUES ('${address}')" elif [[ "${args[2]}" == "del" ]]; then - var=$(echo "${args[3]}" | sed 's/\//\\\//g') - sed -i "/${var}/Id" "${list}" + sqlite3 "${gravityDBfile}" "DELETE FROM adlists WHERE address = '${address}'" else echo "Not permitted" return 1 From 4946b5907bf4d45fda807558e2e0234ed70db784 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 28 Apr 2019 22:15:47 +0200 Subject: [PATCH 054/366] Move migrated files to backup directory instead of deleting them. This has the benefit that gravity.db can be recreated at any time by: 1. deleting gravity.db, 2. copying all files from the migration_backup directory back to /etc/pihole, 3. running pihole -g again. Signed-off-by: DL6ER --- gravity.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index a3456fb4..6e238e68 100755 --- a/gravity.sh +++ b/gravity.sh @@ -89,6 +89,8 @@ database_table_from_file() { # Define locals local table="${1}" local source="${2}" + local backup_path="${piholeDir}/migration_backup" + local backup_file="${backup_path}/$(basename "${2}")" # Create database file if not present if [ ! -e "${gravityDBfile}" ]; then @@ -140,8 +142,9 @@ database_table_from_file() { rm "${tmpFile}" > /dev/null 2>&1 || \ echo -e " ${CROSS} Unable to remove ${tmpFile}" - # Delete source file - rm "${source}" 2> /dev/null || \ + # Move source file to backup directory, create directory if not existing + mkdir -p "${backup_path}" + mv "${source}" "${backup_file}" 2> /dev/null || \ echo -e " ${CROSS} Unable to remove ${source}" } @@ -702,7 +705,6 @@ fi # Gravity downloads blocklists next gravity_CheckDNSResolutionAvailable -gravity_GetBlocklistUrls if gravity_GetBlocklistUrls; then gravity_SetDownloadOptions # Build preEventHorizon From a932209143af5eb9e3acd4991a5bb203375ba461 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 29 Apr 2019 17:19:02 +0200 Subject: [PATCH 055/366] Set default for date_modified to be the current time Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- gravity.sh | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 31fbfd9f..c1066f33 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,7 +1,7 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); -CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER, comment TEXT); +CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); diff --git a/gravity.sh b/gravity.sh index 6e238e68..331b632d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -122,7 +122,7 @@ database_table_from_file() { do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - echo "\"${domain}\",1,${timestamp}" >> "${tmpFile}" + echo "\"${domain}\",1,${timestamp},${timestamp}" >> "${tmpFile}" fi done inputfile="${tmpFile}" @@ -145,7 +145,7 @@ database_table_from_file() { # Move source file to backup directory, create directory if not existing mkdir -p "${backup_path}" mv "${source}" "${backup_file}" 2> /dev/null || \ - echo -e " ${CROSS} Unable to remove ${source}" + echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}" } # Migrate pre-v5.0 list files to database-based Pi-hole versions From 2180531a84516f6720985f034fc8d0cdb589e6a7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 29 Apr 2019 19:43:35 +0200 Subject: [PATCH 056/366] Add trigger for automatic updates of the date_modified column and improve formating in the gravity.db template Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 37 ++++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index c1066f33..dc0f23ed 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -8,21 +8,38 @@ CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); INSERT INTO info VALUES("version","1"); CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain -FROM gravity a -WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); + FROM gravity a + WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain -FROM whitelist a -WHERE a.enabled == 1; + FROM whitelist a + WHERE a.enabled == 1; +CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist + BEGIN + UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain -FROM blacklist a -WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist; + FROM blacklist a + WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist; +CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist + BEGIN + UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; CREATE VIEW vw_regex AS SELECT DISTINCT a.domain -FROM regex a -WHERE a.enabled == 1; + FROM regex a + WHERE a.enabled == 1; +CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex + BEGIN + UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; CREATE VIEW vw_adlists AS SELECT DISTINCT a.address -FROM adlists a -WHERE a.enabled == 1; + FROM adlists a + WHERE a.enabled == 1; +CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists + BEGIN + UPDATE adlists SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; + END; + From 5c575e73c7e3d9efd84a86740d7c09b0316089ff Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Tue, 30 Apr 2019 22:41:12 +0200 Subject: [PATCH 057/366] Adds world readable attribute to files created by Pi-hole to circumvent #2724 Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- advanced/Scripts/list.sh | 5 ++++- advanced/Scripts/piholeCheckout.sh | 3 +++ advanced/Scripts/piholeLogFlush.sh | 4 +++- advanced/Scripts/updatecheck.sh | 3 +++ advanced/Scripts/webpage.sh | 3 +++ automated install/basic-install.sh | 35 +++++++++++++++++++++++++----- automated install/uninstall.sh | 2 +- gravity.sh | 8 ++++++- pihole | 4 ++++ 9 files changed, 58 insertions(+), 9 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index c1d95aae..e0455e0c 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -92,11 +92,13 @@ PoplistFile() { # Check whitelist file exists, and if not, create it if [[ ! -f "${whitelist}" ]]; then touch "${whitelist}" + chmod a+r "${whitelist}" fi # Check blacklist file exists, and if not, create it if [[ ! -f "${blacklist}" ]]; then touch "${blacklist}" + chmod a+r "${blacklist}" fi for dom in "${domList[@]}"; do @@ -239,9 +241,10 @@ Displaylist() { NukeList() { if [[ -f "${listMain}" ]]; then # Back up original list - cp "${listMain}" "${listMain}.bck~" + cp -p "${listMain}" "${listMain}.bck~" # Empty out file echo "" > "${listMain}" + chmod a+r "${listMain}" fi } diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh index 1bfe5e21..a982289f 100644 --- a/advanced/Scripts/piholeCheckout.sh +++ b/advanced/Scripts/piholeCheckout.sh @@ -90,6 +90,7 @@ checkout() { local path path="development/${binary}" echo "development" > /etc/pihole/ftlbranch + chmod a+r /etc/pihole/ftlbranch elif [[ "${1}" == "master" ]] ; then # Shortcut to check out master branches echo -e " ${INFO} Shortcut \"master\" detected - checking out master branches..." @@ -104,6 +105,7 @@ checkout() { local path path="master/${binary}" echo "master" > /etc/pihole/ftlbranch + chmod a+r /etc/pihole/ftlbranch elif [[ "${1}" == "core" ]] ; then str="Fetching branches from ${piholeGitUrl}" echo -ne " ${INFO} $str" @@ -166,6 +168,7 @@ checkout() { if check_download_exists "$path"; then echo " ${TICK} Branch ${2} exists" echo "${2}" > /etc/pihole/ftlbranch + chmod a+r /etc/pihole/ftlbranch FTLinstall "${binary}" restart_service pihole-FTL enable_service pihole-FTL diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh index 561fbce7..1e4b7abe 100755 --- a/advanced/Scripts/piholeLogFlush.sh +++ b/advanced/Scripts/piholeLogFlush.sh @@ -39,8 +39,9 @@ if [[ "$@" == *"once"* ]]; then # Note that moving the file is not an option, as # dnsmasq would happily continue writing into the # moved file (it will have the same file handler) - cp /var/log/pihole.log /var/log/pihole.log.1 + cp -p /var/log/pihole.log /var/log/pihole.log.1 echo " " > /var/log/pihole.log + chmod a+r /var/log/pihole.log fi else # Manual flushing @@ -53,6 +54,7 @@ else echo " " > /var/log/pihole.log if [ -f /var/log/pihole.log.1 ]; then echo " " > /var/log/pihole.log.1 + chmod a+r /var/log/pihole.log.1 fi fi # Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history) diff --git a/advanced/Scripts/updatecheck.sh b/advanced/Scripts/updatecheck.sh index 257c1929..55fce328 100755 --- a/advanced/Scripts/updatecheck.sh +++ b/advanced/Scripts/updatecheck.sh @@ -51,6 +51,7 @@ if [[ "$2" == "remote" ]]; then GITHUB_CORE_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/pi-hole/releases/latest' 2> /dev/null)")" echo -n "${GITHUB_CORE_VERSION}" > "${GITHUB_VERSION_FILE}" + chmod a+r "${GITHUB_VERSION_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then GITHUB_WEB_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/AdminLTE/releases/latest' 2> /dev/null)")" @@ -66,6 +67,7 @@ else CORE_BRANCH="$(get_local_branch /etc/.pihole)" echo -n "${CORE_BRANCH}" > "${LOCAL_BRANCH_FILE}" + chmod a+r "${LOCAL_BRANCH_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then WEB_BRANCH="$(get_local_branch /var/www/html/admin)" @@ -79,6 +81,7 @@ else CORE_VERSION="$(get_local_version /etc/.pihole)" echo -n "${CORE_VERSION}" > "${LOCAL_VERSION_FILE}" + chmod a+r "${LOCAL_VERSION_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then WEB_VERSION="$(get_local_version /var/www/html/admin)" diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index c6852896..eba539a6 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -322,6 +322,7 @@ dhcp-option=option:router,${DHCP_ROUTER} dhcp-leasefile=/etc/pihole/dhcp.leases #quiet-dhcp " > "${dhcpconfig}" + chmod a+r "${dhcpconfig}" if [[ "${PIHOLE_DOMAIN}" != "none" ]]; then echo "domain=${PIHOLE_DOMAIN}" >> "${dhcpconfig}" @@ -541,11 +542,13 @@ addAudit() do echo "${var}" >> /etc/pihole/auditlog.list done + chmod a+r /etc/pihole/auditlog.list } clearAudit() { echo -n "" > /etc/pihole/auditlog.list + chmod a+r /etc/pihole/auditlog.list } SetPrivacyLevel() { diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 94377647..2d85d854 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -194,7 +194,7 @@ if is_command apt-get ; then exit # exit the installer else printf " %b Enabling universe package repository for Ubuntu Bionic\\n" "${INFO}" - cp ${APT_SOURCES} ${APT_SOURCES}.backup # Backup current repo list + cp -p ${APT_SOURCES} ${APT_SOURCES}.backup # Backup current repo list printf " %b Backed up current configuration to %s\\n" "${TICK}" "${APT_SOURCES}.backup" add-apt-repository universe printf " %b Enabled %s\\n" "${TICK}" "'universe' repository" @@ -847,7 +847,7 @@ setIFCFG() { # Put the IP in variables without the CIDR notation printf -v CIDR "%s" "${IPV4_ADDRESS##*/}" # Backup existing interface configuration: - cp "${IFCFG_FILE}" "${IFCFG_FILE}".pihole.orig + cp -p "${IFCFG_FILE}" "${IFCFG_FILE}".pihole.orig # Build Interface configuration file using the GLOBAL variables we have { echo "# Configured via Pi-hole installer" @@ -1241,7 +1241,8 @@ version_check_dnsmasq() { printf "%b %b Backing up dnsmasq.conf to dnsmasq.conf.orig...\\n" "${OVER}" "${TICK}" printf " %b Restoring default dnsmasq.conf..." "${INFO}" # and replace it with the default - cp ${dnsmasq_original_config} ${dnsmasq_conf} + cp -p ${dnsmasq_original_config} ${dnsmasq_conf} + chmod a+r ${dnsmasq_conf} printf "%b %b Restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" # Otherwise, else @@ -1252,7 +1253,7 @@ version_check_dnsmasq() { # If a file cannot be found, printf " %b No dnsmasq.conf found... restoring default dnsmasq.conf..." "${INFO}" # restore the default one - cp ${dnsmasq_original_config} ${dnsmasq_conf} + cp -p ${dnsmasq_original_config} ${dnsmasq_conf} printf "%b %b No dnsmasq.conf found... restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" fi @@ -1260,9 +1261,11 @@ version_check_dnsmasq() { # Check to see if dnsmasq directory exists (it may not due to being a fresh install and dnsmasq no longer being a dependency) if [[ ! -d "/etc/dnsmasq.d" ]];then mkdir "/etc/dnsmasq.d" + chmod 755 "/etc/dnsmasq.d" fi # Copy the new Pi-hole DNS config file into the dnsmasq.d directory cp ${dnsmasq_pihole_01_snippet} ${dnsmasq_pihole_01_location} + chmod a+r ${dnsmasq_pihole_01_location} printf "%b %b Copying 01-pihole.conf to /etc/dnsmasq.d/01-pihole.conf\\n" "${OVER}" "${TICK}" # Replace our placeholder values with the GLOBAL DNS variables that we populated earlier # First, swap in the interface to listen on @@ -1382,6 +1385,7 @@ installConfigs() { mkdir /etc/lighttpd # and set the owners chown "${USER}":root /etc/lighttpd + chmod 755 /etc/lighttpd # Otherwise, if the config file already exists elif [[ -f "/etc/lighttpd/lighttpd.conf" ]]; then # back up the original @@ -1389,8 +1393,10 @@ installConfigs() { fi # and copy in the config file Pi-hole needs cp ${PI_HOLE_LOCAL_REPO}/advanced/${LIGHTTPD_CFG} /etc/lighttpd/lighttpd.conf + chmod a+r /etc/lighttpd/lighttpd.conf # Make sure the external.conf file exists, as lighttpd v1.4.50 crashes without it touch /etc/lighttpd/external.conf + chmod a+r /etc/lighttpd/external.conf # if there is a custom block page in the html/pihole directory, replace 404 handler in lighttpd config if [[ -f "${PI_HOLE_BLOCKPAGE_DIR}/custom.php" ]]; then sed -i 's/^\(server\.error-handler-404\s*=\s*\).*$/\1"pihole\/custom\.php"/' /etc/lighttpd/lighttpd.conf @@ -1422,15 +1428,26 @@ install_manpage() { if [[ ! -d "/usr/local/share/man/man8" ]]; then # if not present, create man8 directory mkdir /usr/local/share/man/man8 + chown root:staff /usr/local/share/man/man8 + chmod a+r /usr/local/share/man/man8 + chmod a+x /usr/local/share/man/man8 + chmod g+s /usr/local/share/man/man8 fi if [[ ! -d "/usr/local/share/man/man5" ]]; then - # if not present, create man8 directory + # if not present, create man5 directory mkdir /usr/local/share/man/man5 + chown root:staff /usr/local/share/man/man5 + chmod a+r /usr/local/share/man/man5 + chmod a+x /usr/local/share/man/man5 + chmod g+s /usr/local/share/man/man5 fi # Testing complete, copy the files & update the man db cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 + chmod a+r /usr/local/share/man/man8/pihole.8 cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 + chmod a+r /usr/local/share/man/man8/pihole-FTL.8 cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5 + chmod a+r /usr/local/share/man/man5/pihole-FTL.conf.5 if mandb -q &>/dev/null; then # Updated successfully printf "%b %b man pages installed and database updated\\n" "${OVER}" "${TICK}" @@ -1711,6 +1728,9 @@ installCron() { printf "\\n %b %s..." "${INFO}" "${str}" # Copy the cron file over from the local repo cp ${PI_HOLE_LOCAL_REPO}/advanced/Templates/pihole.cron /etc/cron.d/pihole + # File must not be world or group writeable and must be owned by root + chmod 644 /etc/cron.d/pihole + chown root:root /etc/cron.d/pihole # Randomize gravity update time sed -i "s/59 1 /$((1 + RANDOM % 58)) $((3 + RANDOM % 2))/" /etc/cron.d/pihole # Randomize update checker time @@ -1818,6 +1838,7 @@ finalExports() { echo "INSTALL_WEB_INTERFACE=${INSTALL_WEB_INTERFACE}" echo "LIGHTTPD_ENABLED=${LIGHTTPD_ENABLED}" }>> "${setupVars}" + chmod 744 "${setupVars}" # Set the privacy level sed -i '/PRIVACYLEVEL/d' "${PI_HOLE_CONFIG_DIR}/pihole-FTL.conf" @@ -1841,6 +1862,7 @@ installLogrotate() { printf "\\n %b %s..." "${INFO}" "${str}" # Copy the file over from the local repo cp ${PI_HOLE_LOCAL_REPO}/advanced/Templates/logrotate /etc/pihole/logrotate + chmod a+r /etc/pihole/logrotate # Different operating systems have different user / group # settings for logrotate that makes it impossible to create # a static logrotate file that will work with e.g. @@ -2431,6 +2453,7 @@ copy_to_install_log() { # Copy the contents of file descriptor 3 into the install log # Since we use color codes such as '\e[1;33m', they should be removed sed 's/\[[0-9;]\{1,5\}m//g' < /proc/$$/fd/3 > "${installLogLoc}" + chmod a+r "${installLogLoc}" } main() { @@ -2516,6 +2539,8 @@ main() { welcomeDialogs # Create directory for Pi-hole storage mkdir -p /etc/pihole/ + chmod a+r /ect/pihole/ + chmod a+x /etc/pihole/ # Determine available interfaces get_available_interfaces # Find interfaces and let the user choose one diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index d0a6dcf0..732fc246 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -153,7 +153,7 @@ removeNoPurge() { # Restore Resolved if [[ -e /etc/systemd/resolved.conf.orig ]]; then - ${SUDO} cp /etc/systemd/resolved.conf.orig /etc/systemd/resolved.conf + ${SUDO} cp -p /etc/systemd/resolved.conf.orig /etc/systemd/resolved.conf systemctl reload-or-restart systemd-resolved fi diff --git a/gravity.sh b/gravity.sh index 015764ac..6ae0e337 100755 --- a/gravity.sh +++ b/gravity.sh @@ -485,6 +485,7 @@ gravity_SortAndFilterConsolidatedList() { fi sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" + chmod a+r "${piholeDir}/${preEventHorizon}" if [[ "${haveSourceUrls}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" @@ -509,6 +510,7 @@ gravity_Whitelist() { # Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile comm -23 "${piholeDir}/${preEventHorizon}" <(sort "${whitelistFile}") > "${piholeDir}/${whitelistMatter}" + chmod a+r "${piholeDir}/${whitelistMatter}" echo -e "${OVER} ${INFO} ${str}" } @@ -561,6 +563,7 @@ gravity_ParseLocalDomains() { # Empty $localList if it already exists, otherwise, create it : > "${localList}" + chmod a+r "${localList}" gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}" @@ -581,8 +584,9 @@ gravity_ParseBlacklistDomains() { mv "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}" else # There was no whitelist file, so use preEventHorizon instead of whitelistMatter. - cp "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}" + cp -p "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}" fi + chmod a+r "${piholeDir}/${accretionDisc}" # Move the file over as /etc/pihole/gravity.list so dnsmasq can use it output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 ) @@ -592,6 +596,7 @@ gravity_ParseBlacklistDomains() { echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}" gravity_Cleanup "error" fi + chmod a+r "${adList}" } # Create user-added blacklist entries @@ -602,6 +607,7 @@ gravity_ParseUserDomains() { # Copy the file over as /etc/pihole/black.list so dnsmasq can use it cp "${blacklistFile}" "${blackList}" 2> /dev/null || \ echo -e "\\n ${CROSS} Unable to move ${blacklistFile##*/} to ${piholeDir}" + chmod a+r "${blackList}" } # Trap Ctrl-C diff --git a/pihole b/pihole index 84a5623f..59a99af1 100755 --- a/pihole +++ b/pihole @@ -151,10 +151,12 @@ Time: if [[ -e "${gravitylist}" ]]; then mv "${gravitylist}" "${gravitylist}.bck" echo "" > "${gravitylist}" + chmod a+r "${gravitylist}" fi if [[ -e "${blacklist}" ]]; then mv "${blacklist}" "${blacklist}.bck" echo "" > "${blacklist}" + chmod a+r "${blacklist}" fi if [[ $# > 1 ]]; then local error=false @@ -206,9 +208,11 @@ Time: if [[ -e "${gravitylist}.bck" ]]; then mv "${gravitylist}.bck" "${gravitylist}" + chmod a+r "${gravitylist}" fi if [[ -e "${blacklist}.bck" ]]; then mv "${blacklist}.bck" "${blacklist}" + chmod a+r "${blacklist}" fi sed -i "/BLOCKING_ENABLED=/d" "${setupVars}" echo "BLOCKING_ENABLED=true" >> "${setupVars}" From 8a92fb24c4d4e3caff4286ea8698becdfe986a58 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Wed, 1 May 2019 11:20:26 +0200 Subject: [PATCH 058/366] Incorporates changes requested by @Mcat12 Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- advanced/Scripts/list.sh | 6 +++--- advanced/Scripts/piholeCheckout.sh | 6 +++--- advanced/Scripts/piholeLogFlush.sh | 4 ++-- advanced/Scripts/updatecheck.sh | 6 +++--- advanced/Scripts/webpage.sh | 6 +++--- automated install/basic-install.sh | 33 ++++++++++++------------------ gravity.sh | 12 +++++------ pihole | 8 ++++---- 8 files changed, 37 insertions(+), 44 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index e0455e0c..b4fffbaf 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -92,13 +92,13 @@ PoplistFile() { # Check whitelist file exists, and if not, create it if [[ ! -f "${whitelist}" ]]; then touch "${whitelist}" - chmod a+r "${whitelist}" + chmod 644 "${whitelist}" fi # Check blacklist file exists, and if not, create it if [[ ! -f "${blacklist}" ]]; then touch "${blacklist}" - chmod a+r "${blacklist}" + chmod 644 "${blacklist}" fi for dom in "${domList[@]}"; do @@ -244,7 +244,7 @@ NukeList() { cp -p "${listMain}" "${listMain}.bck~" # Empty out file echo "" > "${listMain}" - chmod a+r "${listMain}" + chmod 644 "${listMain}" fi } diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh index a982289f..13fa3402 100644 --- a/advanced/Scripts/piholeCheckout.sh +++ b/advanced/Scripts/piholeCheckout.sh @@ -90,7 +90,7 @@ checkout() { local path path="development/${binary}" echo "development" > /etc/pihole/ftlbranch - chmod a+r /etc/pihole/ftlbranch + chmod 644 /etc/pihole/ftlbranch elif [[ "${1}" == "master" ]] ; then # Shortcut to check out master branches echo -e " ${INFO} Shortcut \"master\" detected - checking out master branches..." @@ -105,7 +105,7 @@ checkout() { local path path="master/${binary}" echo "master" > /etc/pihole/ftlbranch - chmod a+r /etc/pihole/ftlbranch + chmod 644 /etc/pihole/ftlbranch elif [[ "${1}" == "core" ]] ; then str="Fetching branches from ${piholeGitUrl}" echo -ne " ${INFO} $str" @@ -168,7 +168,7 @@ checkout() { if check_download_exists "$path"; then echo " ${TICK} Branch ${2} exists" echo "${2}" > /etc/pihole/ftlbranch - chmod a+r /etc/pihole/ftlbranch + chmod 644 /etc/pihole/ftlbranch FTLinstall "${binary}" restart_service pihole-FTL enable_service pihole-FTL diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh index 1e4b7abe..51e94d7c 100755 --- a/advanced/Scripts/piholeLogFlush.sh +++ b/advanced/Scripts/piholeLogFlush.sh @@ -41,7 +41,7 @@ if [[ "$@" == *"once"* ]]; then # moved file (it will have the same file handler) cp -p /var/log/pihole.log /var/log/pihole.log.1 echo " " > /var/log/pihole.log - chmod a+r /var/log/pihole.log + chmod 644 /var/log/pihole.log fi else # Manual flushing @@ -54,7 +54,7 @@ else echo " " > /var/log/pihole.log if [ -f /var/log/pihole.log.1 ]; then echo " " > /var/log/pihole.log.1 - chmod a+r /var/log/pihole.log.1 + chmod 644 /var/log/pihole.log.1 fi fi # Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history) diff --git a/advanced/Scripts/updatecheck.sh b/advanced/Scripts/updatecheck.sh index 55fce328..972ab585 100755 --- a/advanced/Scripts/updatecheck.sh +++ b/advanced/Scripts/updatecheck.sh @@ -51,7 +51,7 @@ if [[ "$2" == "remote" ]]; then GITHUB_CORE_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/pi-hole/releases/latest' 2> /dev/null)")" echo -n "${GITHUB_CORE_VERSION}" > "${GITHUB_VERSION_FILE}" - chmod a+r "${GITHUB_VERSION_FILE}" + chmod 644 "${GITHUB_VERSION_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then GITHUB_WEB_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/AdminLTE/releases/latest' 2> /dev/null)")" @@ -67,7 +67,7 @@ else CORE_BRANCH="$(get_local_branch /etc/.pihole)" echo -n "${CORE_BRANCH}" > "${LOCAL_BRANCH_FILE}" - chmod a+r "${LOCAL_BRANCH_FILE}" + chmod 644 "${LOCAL_BRANCH_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then WEB_BRANCH="$(get_local_branch /var/www/html/admin)" @@ -81,7 +81,7 @@ else CORE_VERSION="$(get_local_version /etc/.pihole)" echo -n "${CORE_VERSION}" > "${LOCAL_VERSION_FILE}" - chmod a+r "${LOCAL_VERSION_FILE}" + chmod 644 "${LOCAL_VERSION_FILE}" if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then WEB_VERSION="$(get_local_version /var/www/html/admin)" diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index eba539a6..467eebd6 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -322,7 +322,7 @@ dhcp-option=option:router,${DHCP_ROUTER} dhcp-leasefile=/etc/pihole/dhcp.leases #quiet-dhcp " > "${dhcpconfig}" - chmod a+r "${dhcpconfig}" + chmod 644 "${dhcpconfig}" if [[ "${PIHOLE_DOMAIN}" != "none" ]]; then echo "domain=${PIHOLE_DOMAIN}" >> "${dhcpconfig}" @@ -542,13 +542,13 @@ addAudit() do echo "${var}" >> /etc/pihole/auditlog.list done - chmod a+r /etc/pihole/auditlog.list + chmod 644 /etc/pihole/auditlog.list } clearAudit() { echo -n "" > /etc/pihole/auditlog.list - chmod a+r /etc/pihole/auditlog.list + chmod 644 /etc/pihole/auditlog.list } SetPrivacyLevel() { diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 2d85d854..c1f69efb 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1242,7 +1242,7 @@ version_check_dnsmasq() { printf " %b Restoring default dnsmasq.conf..." "${INFO}" # and replace it with the default cp -p ${dnsmasq_original_config} ${dnsmasq_conf} - chmod a+r ${dnsmasq_conf} + chmod 644 ${dnsmasq_conf} printf "%b %b Restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" # Otherwise, else @@ -1265,7 +1265,7 @@ version_check_dnsmasq() { fi # Copy the new Pi-hole DNS config file into the dnsmasq.d directory cp ${dnsmasq_pihole_01_snippet} ${dnsmasq_pihole_01_location} - chmod a+r ${dnsmasq_pihole_01_location} + chmod 644 ${dnsmasq_pihole_01_location} printf "%b %b Copying 01-pihole.conf to /etc/dnsmasq.d/01-pihole.conf\\n" "${OVER}" "${TICK}" # Replace our placeholder values with the GLOBAL DNS variables that we populated earlier # First, swap in the interface to listen on @@ -1393,10 +1393,10 @@ installConfigs() { fi # and copy in the config file Pi-hole needs cp ${PI_HOLE_LOCAL_REPO}/advanced/${LIGHTTPD_CFG} /etc/lighttpd/lighttpd.conf - chmod a+r /etc/lighttpd/lighttpd.conf + chmod 644 /etc/lighttpd/lighttpd.conf # Make sure the external.conf file exists, as lighttpd v1.4.50 crashes without it touch /etc/lighttpd/external.conf - chmod a+r /etc/lighttpd/external.conf + chmod 644 /etc/lighttpd/external.conf # if there is a custom block page in the html/pihole directory, replace 404 handler in lighttpd config if [[ -f "${PI_HOLE_BLOCKPAGE_DIR}/custom.php" ]]; then sed -i 's/^\(server\.error-handler-404\s*=\s*\).*$/\1"pihole\/custom\.php"/' /etc/lighttpd/lighttpd.conf @@ -1428,26 +1428,20 @@ install_manpage() { if [[ ! -d "/usr/local/share/man/man8" ]]; then # if not present, create man8 directory mkdir /usr/local/share/man/man8 - chown root:staff /usr/local/share/man/man8 - chmod a+r /usr/local/share/man/man8 - chmod a+x /usr/local/share/man/man8 - chmod g+s /usr/local/share/man/man8 + chmod 755 /usr/local/share/man/man8 fi if [[ ! -d "/usr/local/share/man/man5" ]]; then # if not present, create man5 directory mkdir /usr/local/share/man/man5 - chown root:staff /usr/local/share/man/man5 - chmod a+r /usr/local/share/man/man5 - chmod a+x /usr/local/share/man/man5 - chmod g+s /usr/local/share/man/man5 + chmod 755 /usr/local/share/man/man5 fi # Testing complete, copy the files & update the man db cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 - chmod a+r /usr/local/share/man/man8/pihole.8 + chmod 644 /usr/local/share/man/man8/pihole.8 cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 - chmod a+r /usr/local/share/man/man8/pihole-FTL.8 + chmod 644 /usr/local/share/man/man8/pihole-FTL.8 cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5 - chmod a+r /usr/local/share/man/man5/pihole-FTL.conf.5 + chmod 644 /usr/local/share/man/man5/pihole-FTL.conf.5 if mandb -q &>/dev/null; then # Updated successfully printf "%b %b man pages installed and database updated\\n" "${OVER}" "${TICK}" @@ -1838,7 +1832,7 @@ finalExports() { echo "INSTALL_WEB_INTERFACE=${INSTALL_WEB_INTERFACE}" echo "LIGHTTPD_ENABLED=${LIGHTTPD_ENABLED}" }>> "${setupVars}" - chmod 744 "${setupVars}" + chmod 644 "${setupVars}" # Set the privacy level sed -i '/PRIVACYLEVEL/d' "${PI_HOLE_CONFIG_DIR}/pihole-FTL.conf" @@ -1862,7 +1856,7 @@ installLogrotate() { printf "\\n %b %s..." "${INFO}" "${str}" # Copy the file over from the local repo cp ${PI_HOLE_LOCAL_REPO}/advanced/Templates/logrotate /etc/pihole/logrotate - chmod a+r /etc/pihole/logrotate + chmod 644 /etc/pihole/logrotate # Different operating systems have different user / group # settings for logrotate that makes it impossible to create # a static logrotate file that will work with e.g. @@ -2453,7 +2447,7 @@ copy_to_install_log() { # Copy the contents of file descriptor 3 into the install log # Since we use color codes such as '\e[1;33m', they should be removed sed 's/\[[0-9;]\{1,5\}m//g' < /proc/$$/fd/3 > "${installLogLoc}" - chmod a+r "${installLogLoc}" + chmod 644 "${installLogLoc}" } main() { @@ -2539,8 +2533,7 @@ main() { welcomeDialogs # Create directory for Pi-hole storage mkdir -p /etc/pihole/ - chmod a+r /ect/pihole/ - chmod a+x /etc/pihole/ + chmod 755 /ect/pihole/ # Determine available interfaces get_available_interfaces # Find interfaces and let the user choose one diff --git a/gravity.sh b/gravity.sh index 6ae0e337..a2f2c782 100755 --- a/gravity.sh +++ b/gravity.sh @@ -485,7 +485,7 @@ gravity_SortAndFilterConsolidatedList() { fi sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" - chmod a+r "${piholeDir}/${preEventHorizon}" + chmod 644 "${piholeDir}/${preEventHorizon}" if [[ "${haveSourceUrls}" == true ]]; then echo -e "${OVER} ${TICK} ${str}" @@ -510,7 +510,7 @@ gravity_Whitelist() { # Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile comm -23 "${piholeDir}/${preEventHorizon}" <(sort "${whitelistFile}") > "${piholeDir}/${whitelistMatter}" - chmod a+r "${piholeDir}/${whitelistMatter}" + chmod 644 "${piholeDir}/${whitelistMatter}" echo -e "${OVER} ${INFO} ${str}" } @@ -563,7 +563,7 @@ gravity_ParseLocalDomains() { # Empty $localList if it already exists, otherwise, create it : > "${localList}" - chmod a+r "${localList}" + chmod 644 "${localList}" gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}" @@ -586,7 +586,7 @@ gravity_ParseBlacklistDomains() { # There was no whitelist file, so use preEventHorizon instead of whitelistMatter. cp -p "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}" fi - chmod a+r "${piholeDir}/${accretionDisc}" + chmod 644 "${piholeDir}/${accretionDisc}" # Move the file over as /etc/pihole/gravity.list so dnsmasq can use it output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 ) @@ -596,7 +596,7 @@ gravity_ParseBlacklistDomains() { echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}" gravity_Cleanup "error" fi - chmod a+r "${adList}" + chmod 644 "${adList}" } # Create user-added blacklist entries @@ -607,7 +607,7 @@ gravity_ParseUserDomains() { # Copy the file over as /etc/pihole/black.list so dnsmasq can use it cp "${blacklistFile}" "${blackList}" 2> /dev/null || \ echo -e "\\n ${CROSS} Unable to move ${blacklistFile##*/} to ${piholeDir}" - chmod a+r "${blackList}" + chmod 644 "${blackList}" } # Trap Ctrl-C diff --git a/pihole b/pihole index 59a99af1..541434a0 100755 --- a/pihole +++ b/pihole @@ -151,12 +151,12 @@ Time: if [[ -e "${gravitylist}" ]]; then mv "${gravitylist}" "${gravitylist}.bck" echo "" > "${gravitylist}" - chmod a+r "${gravitylist}" + chmod 644 "${gravitylist}" fi if [[ -e "${blacklist}" ]]; then mv "${blacklist}" "${blacklist}.bck" echo "" > "${blacklist}" - chmod a+r "${blacklist}" + chmod 644 "${blacklist}" fi if [[ $# > 1 ]]; then local error=false @@ -208,11 +208,11 @@ Time: if [[ -e "${gravitylist}.bck" ]]; then mv "${gravitylist}.bck" "${gravitylist}" - chmod a+r "${gravitylist}" + chmod 644 "${gravitylist}" fi if [[ -e "${blacklist}.bck" ]]; then mv "${blacklist}.bck" "${blacklist}" - chmod a+r "${blacklist}" + chmod 644 "${blacklist}" fi sed -i "/BLOCKING_ENABLED=/d" "${setupVars}" echo "BLOCKING_ENABLED=true" >> "${setupVars}" From 487718512dabdbdb4e3858eeed98c711f3de6fa7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 16:37:23 +0200 Subject: [PATCH 059/366] Remove DISTINCT attribute from views. They are not needed as the corresponding data is obtained from fields with UNIQUE attribute so duplications are already prevented at domain insertion. The elimination of the duplication check on imort through the views slightly reduces the load during import (I observed about 8 percent time saved on my NanoPi NEO). Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index dc0f23ed..dac0a50c 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -7,11 +7,11 @@ CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); INSERT INTO info VALUES("version","1"); -CREATE VIEW vw_gravity AS SELECT DISTINCT a.domain +CREATE VIEW vw_gravity AS SELECT a.domain FROM gravity a WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); -CREATE VIEW vw_whitelist AS SELECT DISTINCT a.domain +CREATE VIEW vw_whitelist AS SELECT a.domain FROM whitelist a WHERE a.enabled == 1; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -19,7 +19,7 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_blacklist AS SELECT DISTINCT a.domain +CREATE VIEW vw_blacklist AS SELECT a.domain FROM blacklist a WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -27,7 +27,7 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_regex AS SELECT DISTINCT a.domain +CREATE VIEW vw_regex AS SELECT a.domain FROM regex a WHERE a.enabled == 1; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex @@ -35,7 +35,7 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_adlists AS SELECT DISTINCT a.address +CREATE VIEW vw_adlists AS SELECT a.address FROM adlists a WHERE a.enabled == 1; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists From d5781fb1105d0c0ca1eda09c8e92f19ac135ff0c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 16:38:41 +0200 Subject: [PATCH 060/366] Fix extra space in front on END statement in CREATE TRIGGER commands Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index dac0a50c..0960018b 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -17,7 +17,7 @@ CREATE VIEW vw_whitelist AS SELECT a.domain CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist BEGIN UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; + END; CREATE VIEW vw_blacklist AS SELECT a.domain FROM blacklist a @@ -25,7 +25,7 @@ CREATE VIEW vw_blacklist AS SELECT a.domain CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist BEGIN UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; + END; CREATE VIEW vw_regex AS SELECT a.domain FROM regex a @@ -33,7 +33,7 @@ CREATE VIEW vw_regex AS SELECT a.domain CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex BEGIN UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; + END; CREATE VIEW vw_adlists AS SELECT a.address FROM adlists a @@ -41,5 +41,5 @@ CREATE VIEW vw_adlists AS SELECT a.address CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists BEGIN UPDATE adlists SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; - END; + END; From bc9b62363895b8b6483e0f518ba46607b75f9909 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 16:56:16 +0200 Subject: [PATCH 061/366] Modify query.sh to use the gravity database Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) mode change 100644 => 100755 advanced/Scripts/query.sh diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh old mode 100644 new mode 100755 index 69a3c7a4..8eb7c404 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -11,7 +11,7 @@ # Globals piholeDir="/etc/pihole" -adListsList="$piholeDir/adlists.list" +gravityDBfile="${piholeDir}/gravity.db" wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf" options="$*" adlist="" @@ -73,11 +73,6 @@ Options: exit 0 fi -if [[ ! -e "$adListsList" ]]; then - echo -e "${COL_LIGHT_RED}The file $adListsList was not found${COL_NC}" - exit 1 -fi - # Handle valid options if [[ "${options}" == *"-bp"* ]]; then exact="exact"; blockpage=true @@ -185,12 +180,9 @@ if [[ -z "${exact}" ]]; then fi # Get adlist file content as array -if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then - for adlistUrl in $(< "${adListsList}"); do - if [[ "${adlistUrl:0:4}" =~ (http|www.) ]]; then - adlists+=("${adlistUrl}") - fi - done +if [[ -n "${blockpage}" ]]; then + # Retrieve source URLs from gravity database + mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)" fi # Print "Exact matches for" title From b372f808ddf1ff0fed03b1f314c0a85e487766cd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:06:14 +0200 Subject: [PATCH 062/366] Fix lint errors Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 2 +- gravity.sh | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 8eb7c404..a4ac895b 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -180,7 +180,7 @@ if [[ -z "${exact}" ]]; then fi # Get adlist file content as array -if [[ -n "${blockpage}" ]]; then +if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then # Retrieve source URLs from gravity database mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)" fi diff --git a/gravity.sh b/gravity.sh index 331b632d..9f4065e7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -87,10 +87,11 @@ generate_gravity_database() { # Import domains from file and store them in the specified database table database_table_from_file() { # Define locals - local table="${1}" - local source="${2}" - local backup_path="${piholeDir}/migration_backup" - local backup_file="${backup_path}/$(basename "${2}")" + local table source backup_path backup_file + table="${1}" + source="${2}" + backup_path="${piholeDir}/migration_backup" + backup_file="${backup_path}/$(basename "${2}")" # Create database file if not present if [ ! -e "${gravityDBfile}" ]; then From 983117d788bd32ef1521a5c2da3088a2039e23bf Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:31:34 +0200 Subject: [PATCH 063/366] Add id (primary key, autoincrement) field to lists tables Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 28 ++++++++++++++++++++++++---- gravity.sh | 6 +++++- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 0960018b..65bd7448 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,7 +1,27 @@ -CREATE TABLE whitelist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE blacklist (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE regex (domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE adlists (address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); +CREATE TABLE whitelist (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT); +CREATE TABLE blacklist (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT); +CREATE TABLE regex (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT); +CREATE TABLE adlists (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + address TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT); CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); diff --git a/gravity.sh b/gravity.sh index 9f4065e7..c1eb0720 100755 --- a/gravity.sh +++ b/gravity.sh @@ -118,12 +118,16 @@ database_table_from_file() { inputfile="${source}" else # Apply format for white-, blacklist, regex, and adlists tables + local rowid + declare -i rowid + rowid=1 # Read file line by line grep -v '^ *#' < "${source}" | while IFS= read -r domain do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - echo "\"${domain}\",1,${timestamp},${timestamp}" >> "${tmpFile}" + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp}" >> "${tmpFile}" + rowid+=1 fi done inputfile="${tmpFile}" From 5e26e8245ba4f849577bb1ba232389ba00e59247 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:50:36 +0200 Subject: [PATCH 064/366] Order results of queries to list views by id key Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 65bd7448..d1651619 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -33,7 +33,9 @@ CREATE VIEW vw_gravity AS SELECT a.domain CREATE VIEW vw_whitelist AS SELECT a.domain FROM whitelist a - WHERE a.enabled == 1; + WHERE a.enabled == 1 + ORDER BY a.id; + CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist BEGIN UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; @@ -41,7 +43,9 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist CREATE VIEW vw_blacklist AS SELECT a.domain FROM blacklist a - WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist; + WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist + ORDER BY a.id; + CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist BEGIN UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; @@ -49,7 +53,9 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist CREATE VIEW vw_regex AS SELECT a.domain FROM regex a - WHERE a.enabled == 1; + WHERE a.enabled == 1 + ORDER BY a.id; + CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex BEGIN UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; @@ -57,7 +63,9 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex CREATE VIEW vw_adlists AS SELECT a.address FROM adlists a - WHERE a.enabled == 1; + WHERE a.enabled == 1 + ORDER BY a.id; + CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists BEGIN UPDATE adlists SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; From 540c57f755c0e6255c2416738478491494f692b4 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:51:25 +0200 Subject: [PATCH 065/366] Add comment for automigrated list entries Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index c1eb0720..afd01aad 100755 --- a/gravity.sh +++ b/gravity.sh @@ -126,7 +126,7 @@ database_table_from_file() { do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp}" >> "${tmpFile}" + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" rowid+=1 fi done From 6c1e3a17f7534bb1ed7ea448401d11adc4b66406 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:54:29 +0200 Subject: [PATCH 066/366] Remove NOT NULL constraint on list ids as this is implied by PRIMARY KEY (even though most, if not all, examples on the web still use both attributes at the same time) Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index d1651619..4e78e026 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,22 +1,22 @@ -CREATE TABLE whitelist (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, +CREATE TABLE whitelist (id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE blacklist (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, +CREATE TABLE blacklist (id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE regex (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, +CREATE TABLE regex (id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT); -CREATE TABLE adlists (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, +CREATE TABLE adlists (id INTEGER PRIMARY KEY AUTOINCREMENT, address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), From 106f079afc24dd4f43699ca0a9f494f5e72dc850 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 17:57:23 +0200 Subject: [PATCH 067/366] Improve indentation (single tab) in SQLite template Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 71 ++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 26 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 4e78e026..a8d48608 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,29 +1,48 @@ -CREATE TABLE whitelist (id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT); -CREATE TABLE blacklist (id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT); -CREATE TABLE regex (id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT); -CREATE TABLE adlists (id INTEGER PRIMARY KEY AUTOINCREMENT, - address TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT); -CREATE TABLE gravity (domain TEXT UNIQUE NOT NULL); -CREATE TABLE info (property TEXT NOT NULL, value TEXT NOT NULL); +CREATE TABLE whitelist +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); +CREATE TABLE blacklist +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); +CREATE TABLE regex +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); +CREATE TABLE adlists +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + address TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); +CREATE TABLE gravity +( + domain TEXT UNIQUE NOT NULL +); +CREATE TABLE info +( + property TEXT NOT NULL, + value TEXT NOT NULL +); INSERT INTO info VALUES("version","1"); From 2f698904e406fc978cb00bf4759aebbc22b14479 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 18:03:35 +0200 Subject: [PATCH 068/366] Use PRIMARY KEY instead of NOT NULL also in gravity and info tables Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index a8d48608..fd6d2b28 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -36,11 +36,11 @@ CREATE TABLE adlists ); CREATE TABLE gravity ( - domain TEXT UNIQUE NOT NULL + domain TEXT UNIQUE PRIMARY KEY ); CREATE TABLE info ( - property TEXT NOT NULL, + property TEXT PRIMARY KEY, value TEXT NOT NULL ); From 008e88b84be1f818fe6518d954ddc8b4a7aca63c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 18:04:31 +0200 Subject: [PATCH 069/366] Only migrate legacy list files when we create gravity.db in the same gravity run Signed-off-by: DL6ER --- gravity.sh | 51 ++++++++++++++++++++++++++------------------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/gravity.sh b/gravity.sh index afd01aad..02f41f86 100755 --- a/gravity.sh +++ b/gravity.sh @@ -93,12 +93,6 @@ database_table_from_file() { backup_path="${piholeDir}/migration_backup" backup_file="${backup_path}/$(basename "${2}")" - # Create database file if not present - if [ ! -e "${gravityDBfile}" ]; then - echo -e " ${INFO} Creating new gravity database" - generate_gravity_database - fi - # Truncate table output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) status="$?" @@ -155,25 +149,32 @@ database_table_from_file() { # Migrate pre-v5.0 list files to database-based Pi-hole versions migrate_to_database() { - if [[ -e "${adListFile}" ]]; then - # Store adlists domains in database - echo -e " ${INFO} Pi-hole upgrade: Moving content of ${adListFile} into database" - database_table_from_file "adlists" "${adListFile}" - fi - if [[ -e "${blacklistFile}" ]]; then - # Store blacklisted domains in database - echo -e " ${INFO} Pi-hole upgrade: Moving content of ${blacklistFile} into database" - database_table_from_file "blacklist" "${blacklistFile}" - fi - if [[ -e "${whitelistFile}" ]]; then - # Store whitelisted domains in database - echo -e " ${INFO} Pi-hole upgrade: Moving content of ${whitelistFile} into database" - database_table_from_file "whitelist" "${whitelistFile}" - fi - if [[ -e "${regexFile}" ]]; then - # Store regex domains in database - echo -e " ${INFO} Pi-hole upgrade: Moving content of ${regexFile} into database" - database_table_from_file "regex" "${regexFile}" + # Create database file if not present + if [ ! -e "${gravityDBfile}" ]; then + echo -e " ${INFO} Creating new gravity database" + generate_gravity_database + + # Migrate list files to new database + if [[ -e "${adListFile}" ]]; then + # Store adlists domains in database + echo -e " ${INFO} Migrating content of ${adListFile} into new database" + database_table_from_file "adlists" "${adListFile}" + fi + if [[ -e "${blacklistFile}" ]]; then + # Store blacklisted domains in database + echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" + database_table_from_file "blacklist" "${blacklistFile}" + fi + if [[ -e "${whitelistFile}" ]]; then + # Store whitelisted domains in database + echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" + database_table_from_file "whitelist" "${whitelistFile}" + fi + if [[ -e "${regexFile}" ]]; then + # Store regex domains in database + echo -e " ${INFO} Migrating content of ${regexFile} into new database" + database_table_from_file "regex" "${regexFile}" + fi fi } From 0cfecd31d3501c5a760207bd1780c855a90be2f9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 21:12:22 +0200 Subject: [PATCH 070/366] Invert logic to reduce nesting Signed-off-by: DL6ER --- gravity.sh | 54 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 28 insertions(+), 26 deletions(-) diff --git a/gravity.sh b/gravity.sh index 02f41f86..76d42f32 100755 --- a/gravity.sh +++ b/gravity.sh @@ -149,32 +149,34 @@ database_table_from_file() { # Migrate pre-v5.0 list files to database-based Pi-hole versions migrate_to_database() { - # Create database file if not present - if [ ! -e "${gravityDBfile}" ]; then - echo -e " ${INFO} Creating new gravity database" - generate_gravity_database - - # Migrate list files to new database - if [[ -e "${adListFile}" ]]; then - # Store adlists domains in database - echo -e " ${INFO} Migrating content of ${adListFile} into new database" - database_table_from_file "adlists" "${adListFile}" - fi - if [[ -e "${blacklistFile}" ]]; then - # Store blacklisted domains in database - echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" - database_table_from_file "blacklist" "${blacklistFile}" - fi - if [[ -e "${whitelistFile}" ]]; then - # Store whitelisted domains in database - echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" - database_table_from_file "whitelist" "${whitelistFile}" - fi - if [[ -e "${regexFile}" ]]; then - # Store regex domains in database - echo -e " ${INFO} Migrating content of ${regexFile} into new database" - database_table_from_file "regex" "${regexFile}" - fi + # Create database file only if not present + if [ -e "${gravityDBfile}" ]; then + return 0 + fi + + echo -e " ${INFO} Creating new gravity database" + generate_gravity_database + + # Migrate list files to new database + if [[ -e "${adListFile}" ]]; then + # Store adlists domains in database + echo -e " ${INFO} Migrating content of ${adListFile} into new database" + database_table_from_file "adlists" "${adListFile}" + fi + if [[ -e "${blacklistFile}" ]]; then + # Store blacklisted domains in database + echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" + database_table_from_file "blacklist" "${blacklistFile}" + fi + if [[ -e "${whitelistFile}" ]]; then + # Store whitelisted domains in database + echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" + database_table_from_file "whitelist" "${whitelistFile}" + fi + if [[ -e "${regexFile}" ]]; then + # Store regex domains in database + echo -e " ${INFO} Migrating content of ${regexFile} into new database" + database_table_from_file "regex" "${regexFile}" fi } From d46ff1d2d79dbd2cb6f2ee2f919e793a6371757b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 May 2019 22:08:30 +0200 Subject: [PATCH 071/366] Remove UNIQUE on gravity table as the only column is already PRIMARY KEY. Primary keys must contain UNIQUE values, and cannot contain NULL values Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index fd6d2b28..372a4a29 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -36,7 +36,7 @@ CREATE TABLE adlists ); CREATE TABLE gravity ( - domain TEXT UNIQUE PRIMARY KEY + domain TEXT PRIMARY KEY ); CREATE TABLE info ( From 0bc112ce52e717467835f052353c6c8283e8db20 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 2 May 2019 17:11:39 +0200 Subject: [PATCH 072/366] Query only those columns we actually use when showing domains on the CLI. Show date of last modification instead of addition date for domains. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 73cb2878..fa81348b 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -170,7 +170,7 @@ Displaylist() { local list listname count num_pipes domain enabled status nicedate listname="${listType}" - data="$(sqlite3 "${gravityDBfile}" "SELECT * FROM ${listType};" 2> /dev/null)" + data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM ${listType};" 2> /dev/null)" if [[ -z $data ]]; then echo -e "Not showing empty ${listname}" @@ -186,9 +186,9 @@ Displaylist() { num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")" # Extract domain and enabled status based on the obtained number of pipe characters - domain="$(cut -d'|' -f"-$((num_pipes-3))" <<< "${line}")" - enabled="$(cut -d'|' -f"$((num_pipes-2))" <<< "${line}")" - dateadded="$(cut -d'|' -f"$((num_pipes-1))" <<< "${line}")" + domain="$(cut -d'|' -f"-$((num_pipes-1))" <<< "${line}")" + enabled="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")" + datemod="$(cut -d'|' -f"$((num_pipes+1))" <<< "${line}")" # Translate boolean status into human readable string if [[ "${enabled}" -eq 1 ]]; then @@ -198,9 +198,9 @@ Displaylist() { fi # Get nice representation of numerical date stored in database - nicedate=$(date --rfc-2822 -d "@${dateadded}") + nicedate=$(date --rfc-2822 -d "@${datemod}") - echo " ${count}: ${domain} (${status}, added ${nicedate})" + echo " ${count}: ${domain} (${status}, last modified ${nicedate})" count=$((count+1)) done <<< "${data}" fi From 2c3a27e9c7e03ac0464808ae0faf346568f135bc Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Fri, 3 May 2019 12:27:56 +0200 Subject: [PATCH 073/366] Replaces cp chmod with install and add read permission to repository files after checkout and pull Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 56 +++++++++++++----------------- 1 file changed, 24 insertions(+), 32 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c1f69efb..0d574f20 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -414,6 +414,8 @@ make_repo() { fi # Clone the repo and return the return code from this command git clone -q --depth 1 "${remoteRepo}" "${directory}" &> /dev/null || return $? + # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) + chmod -R a+r "${directory}" # Show a colored message showing it's status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Always return 0? Not sure this is correct @@ -447,6 +449,8 @@ update_repo() { git pull --quiet &> /dev/null || return $? # Show a completion message printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" + # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) + chmod -R a+r "${directory}" # Move back into the original directory cd "${curdir}" &> /dev/null || return 1 return 0 @@ -494,6 +498,8 @@ resetRepo() { printf " %b %s..." "${INFO}" "${str}" # Use git to remove the local changes git reset --hard &> /dev/null || return $? + # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) + chmod -R a+r "${directory}" # And show the status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Returning success anyway? @@ -1241,8 +1247,7 @@ version_check_dnsmasq() { printf "%b %b Backing up dnsmasq.conf to dnsmasq.conf.orig...\\n" "${OVER}" "${TICK}" printf " %b Restoring default dnsmasq.conf..." "${INFO}" # and replace it with the default - cp -p ${dnsmasq_original_config} ${dnsmasq_conf} - chmod 644 ${dnsmasq_conf} + install -D -m 644 -T ${dnsmasq_original_config} ${dnsmasq_conf} printf "%b %b Restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" # Otherwise, else @@ -1253,19 +1258,17 @@ version_check_dnsmasq() { # If a file cannot be found, printf " %b No dnsmasq.conf found... restoring default dnsmasq.conf..." "${INFO}" # restore the default one - cp -p ${dnsmasq_original_config} ${dnsmasq_conf} + install -D -m 644 -T ${dnsmasq_original_config} ${dnsmasq_conf} printf "%b %b No dnsmasq.conf found... restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" fi printf " %b Copying 01-pihole.conf to /etc/dnsmasq.d/01-pihole.conf..." "${INFO}" # Check to see if dnsmasq directory exists (it may not due to being a fresh install and dnsmasq no longer being a dependency) if [[ ! -d "/etc/dnsmasq.d" ]];then - mkdir "/etc/dnsmasq.d" - chmod 755 "/etc/dnsmasq.d" + install -d -m 755 "/etc/dnsmasq.d" fi # Copy the new Pi-hole DNS config file into the dnsmasq.d directory - cp ${dnsmasq_pihole_01_snippet} ${dnsmasq_pihole_01_location} - chmod 644 ${dnsmasq_pihole_01_location} + install -D -m 644 -T ${dnsmasq_pihole_01_snippet} ${dnsmasq_pihole_01_location} printf "%b %b Copying 01-pihole.conf to /etc/dnsmasq.d/01-pihole.conf\\n" "${OVER}" "${TICK}" # Replace our placeholder values with the GLOBAL DNS variables that we populated earlier # First, swap in the interface to listen on @@ -1381,19 +1384,15 @@ installConfigs() { if [[ "${INSTALL_WEB_SERVER}" == true ]]; then # and if the Web server conf directory does not exist, if [[ ! -d "/etc/lighttpd" ]]; then - # make it - mkdir /etc/lighttpd - # and set the owners - chown "${USER}":root /etc/lighttpd - chmod 755 /etc/lighttpd + # make it and set the owners + install -d -m 755 -o "${USER}" -g root /etc/lighttpd # Otherwise, if the config file already exists elif [[ -f "/etc/lighttpd/lighttpd.conf" ]]; then # back up the original mv /etc/lighttpd/lighttpd.conf /etc/lighttpd/lighttpd.conf.orig fi # and copy in the config file Pi-hole needs - cp ${PI_HOLE_LOCAL_REPO}/advanced/${LIGHTTPD_CFG} /etc/lighttpd/lighttpd.conf - chmod 644 /etc/lighttpd/lighttpd.conf + install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/advanced/${LIGHTTPD_CFG} /etc/lighttpd/lighttpd.conf # Make sure the external.conf file exists, as lighttpd v1.4.50 crashes without it touch /etc/lighttpd/external.conf chmod 644 /etc/lighttpd/external.conf @@ -1427,21 +1426,16 @@ install_manpage() { fi if [[ ! -d "/usr/local/share/man/man8" ]]; then # if not present, create man8 directory - mkdir /usr/local/share/man/man8 - chmod 755 /usr/local/share/man/man8 + install -d -m 755 /usr/local/share/man/man8 fi if [[ ! -d "/usr/local/share/man/man5" ]]; then # if not present, create man5 directory - mkdir /usr/local/share/man/man5 - chmod 755 /usr/local/share/man/man5 + install -d -m 755 /usr/local/share/man/man5 fi # Testing complete, copy the files & update the man db - cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 - chmod 644 /usr/local/share/man/man8/pihole.8 - cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 - chmod 644 /usr/local/share/man/man8/pihole-FTL.8 - cp ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5 - chmod 644 /usr/local/share/man/man5/pihole-FTL.conf.5 + install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole.8 /usr/local/share/man/man8/pihole.8 + install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.8 /usr/local/share/man/man8/pihole-FTL.8 + install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/manpages/pihole-FTL.conf.5 /usr/local/share/man/man5/pihole-FTL.conf.5 if mandb -q &>/dev/null; then # Updated successfully printf "%b %b man pages installed and database updated\\n" "${OVER}" "${TICK}" @@ -1671,7 +1665,7 @@ installPiholeWeb() { # Install the directory install -d -m 0755 ${PI_HOLE_BLOCKPAGE_DIR} # and the blockpage - install -D ${PI_HOLE_LOCAL_REPO}/advanced/{index,blockingpage}.* ${PI_HOLE_BLOCKPAGE_DIR}/ + install -D -m 644 ${PI_HOLE_LOCAL_REPO}/advanced/{index,blockingpage}.* ${PI_HOLE_BLOCKPAGE_DIR}/ # Remove superseded file if [[ -e "${PI_HOLE_BLOCKPAGE_DIR}/index.js" ]]; then @@ -1721,10 +1715,8 @@ installCron() { local str="Installing latest Cron script" printf "\\n %b %s..." "${INFO}" "${str}" # Copy the cron file over from the local repo - cp ${PI_HOLE_LOCAL_REPO}/advanced/Templates/pihole.cron /etc/cron.d/pihole # File must not be world or group writeable and must be owned by root - chmod 644 /etc/cron.d/pihole - chown root:root /etc/cron.d/pihole + install -D -m 644 -T -o root -g root ${PI_HOLE_LOCAL_REPO}/advanced/Templates/pihole.cron /etc/cron.d/pihole # Randomize gravity update time sed -i "s/59 1 /$((1 + RANDOM % 58)) $((3 + RANDOM % 2))/" /etc/cron.d/pihole # Randomize update checker time @@ -1855,8 +1847,7 @@ installLogrotate() { local str="Installing latest logrotate script" printf "\\n %b %s..." "${INFO}" "${str}" # Copy the file over from the local repo - cp ${PI_HOLE_LOCAL_REPO}/advanced/Templates/logrotate /etc/pihole/logrotate - chmod 644 /etc/pihole/logrotate + install -D -m 644 -T ${PI_HOLE_LOCAL_REPO}/advanced/Templates/logrotate /etc/pihole/logrotate # Different operating systems have different user / group # settings for logrotate that makes it impossible to create # a static logrotate file that will work with e.g. @@ -2120,6 +2111,8 @@ checkout_pull_branch() { printf " %b %s" "${INFO}" "$str" git checkout "${branch}" --quiet || return 1 printf "%b %b %s\\n" "${OVER}" "${TICK}" "$str" + # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) + chmod -R a+r "${directory}" git_pull=$(git pull || return 1) @@ -2532,8 +2525,7 @@ main() { # Display welcome dialogs welcomeDialogs # Create directory for Pi-hole storage - mkdir -p /etc/pihole/ - chmod 755 /ect/pihole/ + install -d -m 755 /etc/pihole/ # Determine available interfaces get_available_interfaces # Find interfaces and let the user choose one From 3aa838bbe47aee2f4ec804873fc2b267422d8f3c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 4 May 2019 12:47:25 +0200 Subject: [PATCH 074/366] Implement black- and whitelist searching with SQL statements. We use the ESCAPE clause in the LIKE query as the underscore "_" wildcard matches any single character but we want to suppress this behavior (underscores can be legitimate part of domains) Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 70 +++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 21 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a4ac895b..9b7a0fab 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -102,29 +102,57 @@ if [[ -n "${str:-}" ]]; then exit 1 fi -# Scan Whitelist and Blacklist -lists="whitelist.txt blacklist.txt" -mapfile -t results <<< "$(scanList "${domainQuery}" "${lists}" "${exact}")" -if [[ -n "${results[*]}" ]]; then - wbMatch=true - # Loop through each result in order to print unique file title once - for result in "${results[@]}"; do - fileName="${result%%.*}" - if [[ -n "${blockpage}" ]]; then - echo "π ${result}" - exit 0 - elif [[ -n "${exact}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" - else - # Only print filename title once per file - if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then +scanDatabaseTable() { + local domain table type querystr result + domain="${1}" + table="${2}" + type="${3:-}" + + # As underscores are legitimate parts of domains, we escape possible them when using the LIKE operator. + # Underscores are a SQLite wildcard matching exactly one character. We obviously want to suppress this + # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched + # as a literal underscore character. + case "${type}" in + "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; + * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\'";; + esac + + # Send prepared query to gravity database + result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null + if [[ -n "${result}" ]]; then + # Prepend listname (separated by a colon) if we found at least one result + # and output result + results="$(sed "s/^/${table}:/g;" <<< "${result}")" + else + # Output empty string as the database query didn't return any result + return + fi + mapfile -t results <<< "${results}" + if [[ -n "${results[*]}" ]]; then + wbMatch=true + # Loop through each result in order to print unique file title once + for result in "${results[@]}"; do + fileName="${result%%:*}" + if [[ -n "${blockpage}" ]]; then + echo "π ${result}" + exit 0 + elif [[ -n "${exact}" ]]; then echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" - fileName_prev="${fileName}" + else + # Only print filename title once per file + if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" + fileName_prev="${fileName}" + fi + echo " ${result#*:}" fi - echo " ${result#*:}" - fi - done -fi + done + fi +} + +# Scan Whitelist and Blacklist +scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" +scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" # Scan Wildcards if [[ -e "${wildcardlist}" ]]; then From 6ba58896d23c5ac7bf497413192a1fdd225e03b1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 4 May 2019 13:15:30 +0200 Subject: [PATCH 075/366] Simplify code Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 48 +++++++++++++++++---------------------- 1 file changed, 21 insertions(+), 27 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 9b7a0fab..66b55e87 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -103,7 +103,7 @@ if [[ -n "${str:-}" ]]; then fi scanDatabaseTable() { - local domain table type querystr result + local domain table type querystr result table_prev domain="${1}" table="${2}" type="${3:-}" @@ -119,35 +119,29 @@ scanDatabaseTable() { # Send prepared query to gravity database result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null - if [[ -n "${result}" ]]; then - # Prepend listname (separated by a colon) if we found at least one result - # and output result - results="$(sed "s/^/${table}:/g;" <<< "${result}")" - else - # Output empty string as the database query didn't return any result + if [[ -z "${result}" ]]; then + # Return early when we have no results return fi - mapfile -t results <<< "${results}" - if [[ -n "${results[*]}" ]]; then - wbMatch=true - # Loop through each result in order to print unique file title once - for result in "${results[@]}"; do - fileName="${result%%:*}" - if [[ -n "${blockpage}" ]]; then - echo "π ${result}" - exit 0 - elif [[ -n "${exact}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" - else - # Only print filename title once per file - if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" - fileName_prev="${fileName}" - fi - echo " ${result#*:}" + + wbMatch=true + mapfile -t results <<< "${result}" + # Loop through each result + for result in "${results[@]}"; do + if [[ -n "${blockpage}" ]]; then + echo "π ${result}" + exit 0 + elif [[ -n "${exact}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + else + # Only print table name once + if [[ ! "${table}" == "${table_prev:-}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + table_prev="${table}" fi - done - fi + echo " ${result}" + fi + done } # Scan Whitelist and Blacklist From f80fdd7e83b4bb23edb1671316f986b9ed791b68 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 4 May 2019 13:19:50 +0200 Subject: [PATCH 076/366] Improve comments Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 66b55e87..a7cc9dfb 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -108,10 +108,10 @@ scanDatabaseTable() { table="${2}" type="${3:-}" - # As underscores are legitimate parts of domains, we escape possible them when using the LIKE operator. - # Underscores are a SQLite wildcard matching exactly one character. We obviously want to suppress this + # As underscores are legitimate parts of domains, we escape them when using the LIKE operator. + # Underscores are SQLite wildcards matching exactly one character. We obviously want to suppress this # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched - # as a literal underscore character. + # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. case "${type}" in "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\'";; @@ -120,13 +120,14 @@ scanDatabaseTable() { # Send prepared query to gravity database result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null if [[ -z "${result}" ]]; then - # Return early when we have no results + # Return early when there are no matches in this table return fi + # Mark domain as having been white-/blacklist matched (global variable) wbMatch=true - mapfile -t results <<< "${result}" # Loop through each result + mapfile -t results <<< "${result}" for result in "${results[@]}"; do if [[ -n "${blockpage}" ]]; then echo "π ${result}" From 5246b3e49672edb7e397418160ca2c327cef0ccc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 4 May 2019 13:24:36 +0200 Subject: [PATCH 077/366] Explicitly escape backslash in ESCAPE clause. This has been suggested by Stickler bot. Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a7cc9dfb..d4de380c 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -114,7 +114,7 @@ scanDatabaseTable() { # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. case "${type}" in "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; - * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\'";; + * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac # Send prepared query to gravity database From a904c183dfc2de7a503466f5ff39d1af3a4d1aca Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 4 May 2019 18:25:11 +0200 Subject: [PATCH 078/366] Use printf to escape domain content. This prevents possible SQL injection issues Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index d4de380c..d2e16e79 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -104,7 +104,7 @@ fi scanDatabaseTable() { local domain table type querystr result table_prev - domain="${1}" + domain="$(printf "%q" "${1}")" table="${2}" type="${3:-}" From 69c06ba6fe18e07f3b5611e8d849a996ff1931f1 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sun, 5 May 2019 00:32:10 +0200 Subject: [PATCH 079/366] Sets permissions for dnsmaq.conf Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 0d574f20..208bfe7c 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1366,6 +1366,7 @@ installConfigs() { # Format: Name;Primary IPv4;Secondary IPv4;Primary IPv6;Secondary IPv6 # Some values may be empty (for example: DNS servers without IPv6 support) echo "${DNS_SERVERS}" > "${PI_HOLE_CONFIG_DIR}/dns-servers.conf" + chmod 644 "${PI_HOLE_CONFIG_DIR}/dns-servers.conf" # Install empty file if it does not exist if [[ ! -r "${PI_HOLE_CONFIG_DIR}/pihole-FTL.conf" ]]; then @@ -1692,7 +1693,7 @@ installPiholeWeb() { local str="Installing sudoer file" printf "\\n %b %s..." "${INFO}" "${str}" # Make the .d directory if it doesn't exist - mkdir -p /etc/sudoers.d/ + install -d -m 755 /etc/sudoers.d/ # and copy in the pihole sudoers file install -m 0640 ${PI_HOLE_LOCAL_REPO}/advanced/Templates/pihole.sudo /etc/sudoers.d/pihole # Add lighttpd user (OS dependent) to sudoers file @@ -2259,6 +2260,7 @@ disable_dnsmasq() { fi # Create /etc/dnsmasq.conf echo "conf-dir=/etc/dnsmasq.d" > "${conffile}" + chmod 644 "${conffile}" } get_binary_name() { From a25f331e83c94320b634b7954d071a0688ff4730 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sun, 5 May 2019 09:20:49 +0200 Subject: [PATCH 080/366] Adds chmod 644 for /var/www and /var/www/html Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 208bfe7c..79a6c94a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -867,6 +867,8 @@ setIFCFG() { echo "DNS2=$PIHOLE_DNS_2" echo "USERCTL=no" }> "${IFCFG_FILE}" + chmod 644 "${IFCFG_FILE}" + chown root:root "${IFCFG_FILE}" # Use ip to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # If NetworkMangler command line interface exists and ready to mangle, @@ -1896,6 +1898,8 @@ installPihole() { # make the Web directory if necessary install -d -m 0755 ${webroot} fi + chmod a+rx /var/www + chmod a+rx /var/www/html if [[ "${INSTALL_WEB_SERVER}" == true ]]; then # Set the owner and permissions From e19adccd9c623eeba20e46a6347108e917af133e Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sun, 5 May 2019 22:34:13 +0200 Subject: [PATCH 081/366] Moves chmod for /var/www/html into INSTALL_WEB_SERVER block Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 79a6c94a..da1a6f9b 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1898,13 +1898,14 @@ installPihole() { # make the Web directory if necessary install -d -m 0755 ${webroot} fi - chmod a+rx /var/www - chmod a+rx /var/www/html if [[ "${INSTALL_WEB_SERVER}" == true ]]; then # Set the owner and permissions chown ${LIGHTTPD_USER}:${LIGHTTPD_GROUP} ${webroot} chmod 0775 ${webroot} + # Repair permissions if /var/www/html is not world readable + chmod a+rx /var/www + chmod a+rx /var/www/html # Give pihole access to the Web server group usermod -a -G ${LIGHTTPD_GROUP} pihole # If the lighttpd command is executable, From 4d23b3267f1ff16a4392c481f7e84473dd42649c Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Tue, 7 May 2019 10:46:16 +0200 Subject: [PATCH 082/366] Adds X to chmod for git operations Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index da1a6f9b..585896c1 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -415,7 +415,7 @@ make_repo() { # Clone the repo and return the return code from this command git clone -q --depth 1 "${remoteRepo}" "${directory}" &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+r "${directory}" + chmod -R a+rX "${directory}" # Show a colored message showing it's status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Always return 0? Not sure this is correct @@ -450,7 +450,7 @@ update_repo() { # Show a completion message printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+r "${directory}" + chmod -R a+rX "${directory}" # Move back into the original directory cd "${curdir}" &> /dev/null || return 1 return 0 @@ -499,7 +499,7 @@ resetRepo() { # Use git to remove the local changes git reset --hard &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+r "${directory}" + chmod -R a+rX "${directory}" # And show the status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Returning success anyway? @@ -2118,7 +2118,7 @@ checkout_pull_branch() { git checkout "${branch}" --quiet || return 1 printf "%b %b %s\\n" "${OVER}" "${TICK}" "$str" # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+r "${directory}" + chmod -R a+rX "${directory}" git_pull=$(git pull || return 1) From 2e6198077d8f505415b8f494357eb40cdec12277 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Thu, 9 May 2019 12:23:44 +0200 Subject: [PATCH 083/366] chmod 644 adlists.list Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 585896c1..00c5c95a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1193,6 +1193,7 @@ chooseBlocklists() { do appendToListsFile "${choice}" done + chmod 644 "${adlistFile}" } # Accept a string parameter, it must be one of the default lists From a3d2a1062fe12f4da89c21274b8228c133dd5bfa Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Fri, 10 May 2019 11:13:19 +0200 Subject: [PATCH 084/366] Makes blocklists world readable Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- gravity.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/gravity.sh b/gravity.sh index a2f2c782..445274ce 100755 --- a/gravity.sh +++ b/gravity.sh @@ -328,6 +328,7 @@ gravity_DownloadBlocklistFromUrl() { # Parse source files into domains format gravity_ParseFileIntoDomains() { local source="${1}" destination="${2}" firstLine abpFilter + chmod 644 "${source}" # Determine if we are parsing a consolidated list if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then @@ -345,6 +346,7 @@ gravity_ParseFileIntoDomains() { sed -r '/(\/|#).*$/d' | \ sed -r 's/^.*\s+//g' | \ sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}" + chmod 644 "${destination}" return 0 fi @@ -375,6 +377,7 @@ gravity_ParseFileIntoDomains() { if($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } if($0) { print $0 } }' "${source}" > "${destination}" + chmod 644 "${destination}" # Determine if there are Adblock exception rules # https://adblockplus.org/filters @@ -391,6 +394,7 @@ gravity_ParseFileIntoDomains() { # Remove exceptions comm -23 "${destination}" <(sort "${destination}.exceptionsFile.tmp") > "${source}" + chmod 644 "${source}" mv "${source}" "${destination}" fi @@ -427,6 +431,7 @@ gravity_ParseFileIntoDomains() { gravity_Cleanup "error" fi fi + chmod 644 "${destination}" } # Create (unfiltered) "Matter and Light" consolidated list @@ -440,6 +445,7 @@ gravity_ConsolidateDownloadedBlocklists() { # Empty $matterAndLight if it already exists, otherwise, create it : > "${piholeDir}/${matterAndLight}" + chmod 644 "${piholeDir}/${matterAndLight}" # Loop through each *.domains file for i in "${activeDomains[@]}"; do From e076db5a4e1e15cb9d8e8707bf2a5da7be125489 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Fri, 10 May 2019 11:19:28 +0200 Subject: [PATCH 085/366] Changes ownership of macvendor.db to pihole Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 00c5c95a..a00aee16 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2216,6 +2216,8 @@ FTLinstall() { # Before stopping FTL, we download the macvendor database curl -sSL "https://ftl.pi-hole.net/macvendor.db" -o "${PI_HOLE_CONFIG_DIR}/macvendor.db" || true + chmod 644 "${PI_HOLE_CONFIG_DIR}/macvendor.db" + chown pihole:pihole "${PI_HOLE_CONFIG_DIR}/macvendor.db" # Stop pihole-FTL service if available stop_service pihole-FTL &> /dev/null From 225285cb977ced4047de542978749c35a3ee43bf Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sat, 11 May 2019 11:14:07 +0200 Subject: [PATCH 086/366] Removes chmod for ${source} and superfluous chmod for ${destination} Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- gravity.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index 445274ce..8fdfa5d7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -328,7 +328,6 @@ gravity_DownloadBlocklistFromUrl() { # Parse source files into domains format gravity_ParseFileIntoDomains() { local source="${1}" destination="${2}" firstLine abpFilter - chmod 644 "${source}" # Determine if we are parsing a consolidated list if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then @@ -377,7 +376,6 @@ gravity_ParseFileIntoDomains() { if($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } if($0) { print $0 } }' "${source}" > "${destination}" - chmod 644 "${destination}" # Determine if there are Adblock exception rules # https://adblockplus.org/filters @@ -394,7 +392,6 @@ gravity_ParseFileIntoDomains() { # Remove exceptions comm -23 "${destination}" <(sort "${destination}.exceptionsFile.tmp") > "${source}" - chmod 644 "${source}" mv "${source}" "${destination}" fi From 3f90261520aae689e213637bc27d0754ab0ce97b Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sun, 12 May 2019 11:25:48 +0200 Subject: [PATCH 087/366] Moves chmod after every creation of ${destination} instead of end of function Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- gravity.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 8fdfa5d7..2a22b792 100755 --- a/gravity.sh +++ b/gravity.sh @@ -376,6 +376,7 @@ gravity_ParseFileIntoDomains() { if($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } if($0) { print $0 } }' "${source}" > "${destination}" + chmod 644 "${destination}" # Determine if there are Adblock exception rules # https://adblockplus.org/filters @@ -393,6 +394,7 @@ gravity_ParseFileIntoDomains() { # Remove exceptions comm -23 "${destination}" <(sort "${destination}.exceptionsFile.tmp") > "${source}" mv "${source}" "${destination}" + chmod 644 "${destination}" fi echo -e "${OVER} ${TICK} Format: Adblock" @@ -416,11 +418,13 @@ gravity_ParseFileIntoDomains() { # Print if nonempty length { print } ' "${source}" 2> /dev/null > "${destination}" + chmod 644 "${destination}" echo -e "${OVER} ${TICK} Format: URL" else # Default: Keep hosts/domains file in same format as it was downloaded output=$( { mv "${source}" "${destination}"; } 2>&1 ) + chmod 644 "${destination}" if [[ ! -e "${destination}" ]]; then echo -e "\\n ${CROSS} Unable to move tmp file to ${piholeDir} @@ -428,7 +432,6 @@ gravity_ParseFileIntoDomains() { gravity_Cleanup "error" fi fi - chmod 644 "${destination}" } # Create (unfiltered) "Matter and Light" consolidated list From efbd42f7b5e7a9532aeefa13ff1965cb62d67981 Mon Sep 17 00:00:00 2001 From: pvogt09 <50047961+pvogt09@users.noreply.github.com> Date: Sun, 12 May 2019 11:27:25 +0200 Subject: [PATCH 088/366] Moves creation of pihole user before FTL installation Signed-off-by: pvogt09 <50047961+pvogt09@users.noreply.github.com> --- automated install/basic-install.sh | 5 ++--- test/test_automated_install.py | 9 +++++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index a00aee16..9520c44c 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1890,9 +1890,6 @@ accountForRefactor() { # Install base files and web interface installPihole() { - # Create the pihole user - create_pihole_user - # If the user wants to install the Web interface, if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then if [[ ! -d "${webroot}" ]]; then @@ -2591,6 +2588,8 @@ main() { else LIGHTTPD_ENABLED=false fi + # Create the pihole user + create_pihole_user # Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole if ! FTLdetect; then printf " %b FTL Engine not installed\\n" "${CROSS}" diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 853048d1..282c627d 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -398,6 +398,7 @@ def test_FTL_detect_aarch64_no_errors(Pihole): ) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect ''') expected_stdout = info_box + ' FTL Checks...' @@ -418,6 +419,7 @@ def test_FTL_detect_armv6l_no_errors(Pihole): mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect ''') expected_stdout = info_box + ' FTL Checks...' @@ -439,6 +441,7 @@ def test_FTL_detect_armv7l_no_errors(Pihole): mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect ''') expected_stdout = info_box + ' FTL Checks...' @@ -455,6 +458,7 @@ def test_FTL_detect_x86_64_no_errors(Pihole): ''' detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect ''') expected_stdout = info_box + ' FTL Checks...' @@ -471,6 +475,7 @@ def test_FTL_detect_unknown_no_errors(Pihole): mock_command('uname', {'-m': ('mips', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect ''') expected_stdout = 'Not able to detect architecture (unknown: mips)' @@ -484,6 +489,7 @@ def test_FTL_download_aarch64_no_errors(Pihole): download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh binary="pihole-FTL-aarch64-linux-gnu" + create_pihole_user FTLinstall ''') expected_stdout = tick_box + ' Downloading and Installing FTL' @@ -498,6 +504,7 @@ def test_FTL_download_unknown_fails_no_errors(Pihole): download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh binary="pihole-FTL-mips" + create_pihole_user FTLinstall ''') expected_stdout = cross_box + ' Downloading and Installing FTL' @@ -514,6 +521,7 @@ def test_FTL_download_binary_unset_no_errors(Pihole): ''' download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLinstall ''') expected_stdout = cross_box + ' Downloading and Installing FTL' @@ -530,6 +538,7 @@ def test_FTL_binary_installed_and_responsive_no_errors(Pihole): ''' installed_binary = Pihole.run(''' source /opt/pihole/basic-install.sh + create_pihole_user FTLdetect pihole-FTL version ''') From ab2f8a0c09ecdb1ce1499f48267c05021e2bb29b Mon Sep 17 00:00:00 2001 From: David Haguenauer Date: Fri, 10 May 2019 10:13:23 -0400 Subject: [PATCH 089/366] Quote variables in basic-install.sh This greatly reduces the number of warnings emitted by ShellCheck, and in turn should make it more likely that errors are caught in the future. Signed-off-by: David Haguenauer --- automated install/basic-install.sh | 116 ++++++++++++++--------------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 18c47485..35d3305c 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -118,7 +118,7 @@ done # If the color table file exists, if [[ -f "${coltable}" ]]; then # source it - source ${coltable} + source "${coltable}" # Otherwise, else # Set these values so the installer can still run in color @@ -182,14 +182,14 @@ if is_command apt-get ; then # A variable to store the command used to update the package cache UPDATE_PKG_CACHE="${PKG_MANAGER} update" # An array for something... - PKG_INSTALL=(${PKG_MANAGER} --yes --no-install-recommends install) + PKG_INSTALL=("${PKG_MANAGER}" --yes --no-install-recommends install) # grep -c will return 1 retVal on 0 matches, block this throwing the set -e with an OR TRUE PKG_COUNT="${PKG_MANAGER} -s -o Debug::NoLocking=true upgrade | grep -c ^Inst || true" # Some distros vary slightly so these fixes for dependencies may apply # on Ubuntu 18.04.1 LTS we need to add the universe repository to gain access to dialog and dhcpcd5 APT_SOURCES="/etc/apt/sources.list" if awk 'BEGIN{a=1;b=0}/bionic main/{a=0}/bionic.*universe/{b=1}END{exit a + b}' ${APT_SOURCES}; then - if ! whiptail --defaultno --title "Dependencies Require Update to Allowed Repositories" --yesno "Would you like to enable 'universe' repository?\\n\\nThis repository is required by the following packages:\\n\\n- dhcpcd5\\n- dialog" ${r} ${c}; then + if ! whiptail --defaultno --title "Dependencies Require Update to Allowed Repositories" --yesno "Would you like to enable 'universe' repository?\\n\\nThis repository is required by the following packages:\\n\\n- dhcpcd5\\n- dialog" "${r}" "${c}"; then printf " %b Aborting installation: dependencies could not be installed.\\n" "${CROSS}" exit # exit the installer else @@ -201,7 +201,7 @@ if is_command apt-get ; then fi fi # Debian 7 doesn't have iproute2 so if the dry run install is successful, - if ${PKG_MANAGER} install --dry-run iproute2 > /dev/null 2>&1; then + if "${PKG_MANAGER}" install --dry-run iproute2 > /dev/null 2>&1; then # we can install it iproute_pkg="iproute2" # Otherwise, @@ -222,7 +222,7 @@ if is_command apt-get ; then # Check if installed php is v 7.0, or newer to determine packages to install if [[ "$phpInsNewer" != true ]]; then # Prefer the php metapackage if it's there - if ${PKG_MANAGER} install --dry-run php > /dev/null 2>&1; then + if "${PKG_MANAGER}" install --dry-run php > /dev/null 2>&1; then phpVer="php" # fall back on the php5 packages else @@ -233,19 +233,19 @@ if is_command apt-get ; then phpVer="php$phpInsMajor.$phpInsMinor" fi # We also need the correct version for `php-sqlite` (which differs across distros) - if ${PKG_MANAGER} install --dry-run ${phpVer}-sqlite3 > /dev/null 2>&1; then + if "${PKG_MANAGER}" install --dry-run "${phpVer}"-sqlite3 > /dev/null 2>&1; then phpSqlite="sqlite3" else phpSqlite="sqlite" fi # Since our install script is so large, we need several other programs to successfully get a machine provisioned # These programs are stored in an array so they can be looped through later - INSTALLER_DEPS=(apt-utils dialog debconf dhcpcd5 git ${iproute_pkg} whiptail) + INSTALLER_DEPS=(apt-utils dialog debconf dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd ${phpVer}-common ${phpVer}-cgi ${phpVer}-${phpSqlite}) + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}"-common "${phpVer}"-cgi "${phpVer}-${phpSqlite}") # The Web server user, LIGHTTPD_USER="www-data" # group, @@ -281,7 +281,7 @@ elif is_command rpm ; then # Fedora and family update cache on every PKG_INSTALL call, no need for a separate update. UPDATE_PKG_CACHE=":" - PKG_INSTALL=(${PKG_MANAGER} install -y) + PKG_INSTALL=("${PKG_MANAGER}" install -y) PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(dialog git iproute newt procps-ng which) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) @@ -319,7 +319,7 @@ elif is_command rpm ; then # The default php on CentOS 7.x is 5.4 which is EOL # Check if the version of PHP available via installed repositories is >= to PHP 7 - AVAILABLE_PHP_VERSION=$(${PKG_MANAGER} info php | grep -i version | grep -o '[0-9]\+' | head -1) + AVAILABLE_PHP_VERSION=$("${PKG_MANAGER}" info php | grep -i version | grep -o '[0-9]\+' | head -1) if [[ $AVAILABLE_PHP_VERSION -ge $SUPPORTED_CENTOS_PHP_VERSION ]]; then # Since PHP 7 is available by default, install via default PHP package names : # do nothing as PHP is current @@ -329,7 +329,7 @@ elif is_command rpm ; then rpm -q ${REMI_PKG} &> /dev/null || rc=$? if [[ $rc -ne 0 ]]; then # The PHP version available via default repositories is older than version 7 - if ! whiptail --defaultno --title "PHP 7 Update (recommended)" --yesno "PHP 7.x is recommended for both security and language features.\\nWould you like to install PHP7 via Remi's RPM repository?\\n\\nSee: https://rpms.remirepo.net for more information" ${r} ${c}; then + if ! whiptail --defaultno --title "PHP 7 Update (recommended)" --yesno "PHP 7.x is recommended for both security and language features.\\nWould you like to install PHP7 via Remi's RPM repository?\\n\\nSee: https://rpms.remirepo.net for more information" "${r}" "${c}"; then # User decided to NOT update PHP from REMI, attempt to install the default available PHP version printf " %b User opt-out of PHP 7 upgrade on CentOS. Deprecated PHP may be in use.\\n" "${INFO}" : # continue with unsupported php version @@ -352,7 +352,7 @@ elif is_command rpm ; then fi else # Warn user of unsupported version of Fedora or CentOS - if ! whiptail --defaultno --title "Unsupported RPM based distribution" --yesno "Would you like to continue installation on an unsupported RPM based distribution?\\n\\nPlease ensure the following packages have been installed manually:\\n\\n- lighttpd\\n- lighttpd-fastcgi\\n- PHP version 7+" ${r} ${c}; then + if ! whiptail --defaultno --title "Unsupported RPM based distribution" --yesno "Would you like to continue installation on an unsupported RPM based distribution?\\n\\nPlease ensure the following packages have been installed manually:\\n\\n- lighttpd\\n- lighttpd-fastcgi\\n- PHP version 7+" "${r}" "${c}"; then printf " %b Aborting installation due to unsupported RPM based distribution\\n" "${CROSS}" exit # exit the installer else @@ -543,15 +543,15 @@ get_available_interfaces() { # A function for displaying the dialogs the user sees when first running the installer welcomeDialogs() { # Display the welcome dialog using an appropriately sized window via the calculation conducted earlier in the script - whiptail --msgbox --backtitle "Welcome" --title "Pi-hole automated installer" "\\n\\nThis installer will transform your device into a network-wide ad blocker!" ${r} ${c} + whiptail --msgbox --backtitle "Welcome" --title "Pi-hole automated installer" "\\n\\nThis installer will transform your device into a network-wide ad blocker!" "${r}" "${c}" # Request that users donate if they enjoy the software since we all work on it in our free time - whiptail --msgbox --backtitle "Plea" --title "Free and open source" "\\n\\nThe Pi-hole is free, but powered by your donations: http://pi-hole.net/donate" ${r} ${c} + whiptail --msgbox --backtitle "Plea" --title "Free and open source" "\\n\\nThe Pi-hole is free, but powered by your donations: http://pi-hole.net/donate" "${r}" "${c}" # Explain the need for a static address whiptail --msgbox --backtitle "Initiating network interface" --title "Static IP Needed" "\\n\\nThe Pi-hole is a SERVER so it needs a STATIC IP ADDRESS to function properly. -In the next section, you can choose to use your current network settings (DHCP) or to manually edit them." ${r} ${c} +In the next section, you can choose to use your current network settings (DHCP) or to manually edit them." "${r}" "${c}" } # We need to make sure there is enough space before installing, so there is a function to check this @@ -638,7 +638,7 @@ chooseInterface() { # Feed the available interfaces into this while loop done <<< "${availableInterfaces}" # The whiptail command that will be run, stored in a variable - chooseInterfaceCmd=(whiptail --separate-output --radiolist "Choose An Interface (press space to select)" ${r} ${c} ${interfaceCount}) + chooseInterfaceCmd=(whiptail --separate-output --radiolist "Choose An Interface (press space to select)" "${r}" "${c}" "${interfaceCount}") # Now run the command using the interfaces saved into the array chooseInterfaceOptions=$("${chooseInterfaceCmd[@]}" "${interfacesArray[@]}" 2>&1 >/dev/tty) || \ # If the user chooses Cancel, exit @@ -719,7 +719,7 @@ useIPv6dialog() { # If the IPV6_ADDRESS contains a value if [[ ! -z "${IPV6_ADDRESS}" ]]; then # Display that IPv6 is supported and will be used - whiptail --msgbox --backtitle "IPv6..." --title "IPv6 Supported" "$IPV6_ADDRESS will be used to block ads." ${r} ${c} + whiptail --msgbox --backtitle "IPv6..." --title "IPv6 Supported" "$IPV6_ADDRESS will be used to block ads." "${r}" "${c}" fi } @@ -729,7 +729,7 @@ use4andor6() { local useIPv4 local useIPv6 # Let use select IPv4 and/or IPv6 via a checklist - cmd=(whiptail --separate-output --checklist "Select Protocols (press space to select)" ${r} ${c} 2) + cmd=(whiptail --separate-output --checklist "Select Protocols (press space to select)" "${r}" "${c}" 2) # In an array, show the options available: # IPv4 (on by default) options=(IPv4 "Block ads over IPv4" on @@ -778,11 +778,11 @@ getStaticIPv4Settings() { # This is useful for users that are using DHCP reservations; then we can just use the information gathered via our functions if whiptail --backtitle "Calibrating network interface" --title "Static IP Address" --yesno "Do you want to use your current network settings as a static address? IP address: ${IPV4_ADDRESS} - Gateway: ${IPv4gw}" ${r} ${c}; then + Gateway: ${IPv4gw}" "${r}" "${c}"; then # If they choose yes, let the user know that the IP address will not be available via DHCP and may cause a conflict. whiptail --msgbox --backtitle "IP information" --title "FYI: IP Conflict" "It is possible your router could still try to assign this IP to a device, which would cause a conflict. But in most cases the router is smart enough to not do that. If you are worried, either manually set the address, or modify the DHCP reservation pool so it does not include the IP you want. -It is also possible to use a DHCP reservation, but if you are going to do that, you might as well set a static address." ${r} ${c} +It is also possible to use a DHCP reservation, but if you are going to do that, you might as well set a static address." "${r}" "${c}" # Nothing else to do since the variables are already set above else # Otherwise, we need to ask the user to input their desired settings. @@ -791,13 +791,13 @@ It is also possible to use a DHCP reservation, but if you are going to do that, until [[ "${ipSettingsCorrect}" = True ]]; do # Ask for the IPv4 address - IPV4_ADDRESS=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 address" --inputbox "Enter your desired IPv4 address" ${r} ${c} "${IPV4_ADDRESS}" 3>&1 1>&2 2>&3) || \ + IPV4_ADDRESS=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 address" --inputbox "Enter your desired IPv4 address" "${r}" "${c}" "${IPV4_ADDRESS}" 3>&1 1>&2 2>&3) || \ # Cancelling IPv4 settings window { ipSettingsCorrect=False; echo -e " ${COL_LIGHT_RED}Cancel was selected, exiting installer${COL_NC}"; exit 1; } printf " %b Your static IPv4 address: %s\\n" "${INFO}" "${IPV4_ADDRESS}" # Ask for the gateway - IPv4gw=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 gateway (router)" --inputbox "Enter your desired IPv4 default gateway" ${r} ${c} "${IPv4gw}" 3>&1 1>&2 2>&3) || \ + IPv4gw=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 gateway (router)" --inputbox "Enter your desired IPv4 default gateway" "${r}" "${c}" "${IPv4gw}" 3>&1 1>&2 2>&3) || \ # Cancelling gateway settings window { ipSettingsCorrect=False; echo -e " ${COL_LIGHT_RED}Cancel was selected, exiting installer${COL_NC}"; exit 1; } printf " %b Your static IPv4 gateway: %s\\n" "${INFO}" "${IPv4gw}" @@ -805,7 +805,7 @@ It is also possible to use a DHCP reservation, but if you are going to do that, # Give the user a chance to review their settings before moving on if whiptail --backtitle "Calibrating network interface" --title "Static IP Address" --yesno "Are these settings correct? IP address: ${IPV4_ADDRESS} - Gateway: ${IPv4gw}" ${r} ${c}; then + Gateway: ${IPv4gw}" "${r}" "${c}"; then # After that's done, the loop ends and we move on ipSettingsCorrect=True else @@ -933,7 +933,7 @@ valid_ip() { # and set the new one to a dot (period) IFS='.' # Put the IP into an array - ip=(${ip}) + ip=("${ip}") # Restore the IFS to what it was IFS=${OIFS} ## Evaluate each octet by checking if it's less than or equal to 255 (the max for each octet) @@ -943,7 +943,7 @@ valid_ip() { stat=$? fi # Return the exit code - return ${stat} + return "${stat}" } # A function to choose the upstream DNS provider(s) @@ -973,7 +973,7 @@ setDNS() { # Restore the IFS to what it was IFS=${OIFS} # In a whiptail dialog, show the options - DNSchoices=$(whiptail --separate-output --menu "Select Upstream DNS Provider. To use your own, select Custom." ${r} ${c} 7 \ + DNSchoices=$(whiptail --separate-output --menu "Select Upstream DNS Provider. To use your own, select Custom." "${r}" "${c}" 7 \ "${DNSChooseOptions[@]}" 2>&1 >/dev/tty) || \ # exit if Cancel is selected { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } @@ -1003,7 +1003,7 @@ setDNS() { fi # Dialog for the user to enter custom upstream servers - piholeDNS=$(whiptail --backtitle "Specify Upstream DNS Provider(s)" --inputbox "Enter your desired upstream DNS provider(s), separated by a comma.\\n\\nFor example '8.8.8.8, 8.8.4.4'" ${r} ${c} "${prePopulate}" 3>&1 1>&2 2>&3) || \ + piholeDNS=$(whiptail --backtitle "Specify Upstream DNS Provider(s)" --inputbox "Enter your desired upstream DNS provider(s), separated by a comma.\\n\\nFor example '8.8.8.8, 8.8.4.4'" "${r}" "${c}" "${prePopulate}" 3>&1 1>&2 2>&3) || \ { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } # Clean user input and replace whitespace with comma. piholeDNS=$(sed 's/[, \t]\+/,/g' <<< "${piholeDNS}") @@ -1036,7 +1036,7 @@ setDNS() { # Otherwise, else # Show the settings - if (whiptail --backtitle "Specify Upstream DNS Provider(s)" --title "Upstream DNS Provider(s)" --yesno "Are these settings correct?\\n DNS Server 1: $PIHOLE_DNS_1\\n DNS Server 2: ${PIHOLE_DNS_2}" ${r} ${c}); then + if (whiptail --backtitle "Specify Upstream DNS Provider(s)" --title "Upstream DNS Provider(s)" --yesno "Are these settings correct?\\n DNS Server 1: $PIHOLE_DNS_1\\n DNS Server 2: ${PIHOLE_DNS_2}" "${r}" "${c}"); then # and break from the loop since the servers are valid DNSSettingsCorrect=True # Otherwise, @@ -1127,7 +1127,7 @@ setAdminFlag() { local WebChoices # Similar to the logging function, ask what the user wants - WebToggleCommand=(whiptail --separate-output --radiolist "Do you wish to install the web admin interface?" ${r} ${c} 6) + WebToggleCommand=(whiptail --separate-output --radiolist "Do you wish to install the web admin interface?" "${r}" "${c}" 6) # with the default being enabled WebChooseOptions=("On (Recommended)" "" on Off "" off) @@ -1248,11 +1248,11 @@ version_check_dnsmasq() { printf " it is from a previous Pi-hole install.\\n" printf " %b Backing up dnsmasq.conf to dnsmasq.conf.orig..." "${INFO}" # so backup the original file - mv -f ${dnsmasq_conf} ${dnsmasq_conf_orig} + mv -f "${dnsmasq_conf}" "${dnsmasq_conf_orig}" printf "%b %b Backing up dnsmasq.conf to dnsmasq.conf.orig...\\n" "${OVER}" "${TICK}" printf " %b Restoring default dnsmasq.conf..." "${INFO}" # and replace it with the default - install -D -m 644 -T ${dnsmasq_original_config} ${dnsmasq_conf} + install -D -m 644 -T "${dnsmasq_original_config}" "${dnsmasq_conf}" printf "%b %b Restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" # Otherwise, else @@ -1263,7 +1263,7 @@ version_check_dnsmasq() { # If a file cannot be found, printf " %b No dnsmasq.conf found... restoring default dnsmasq.conf..." "${INFO}" # restore the default one - install -D -m 644 -T ${dnsmasq_original_config} ${dnsmasq_conf} + install -D -m 644 -T "${dnsmasq_original_config}" "${dnsmasq_conf}" printf "%b %b No dnsmasq.conf found... restoring default dnsmasq.conf...\\n" "${OVER}" "${TICK}" fi @@ -1273,37 +1273,37 @@ version_check_dnsmasq() { install -d -m 755 "/etc/dnsmasq.d" fi # Copy the new Pi-hole DNS config file into the dnsmasq.d directory - install -D -m 644 -T ${dnsmasq_pihole_01_snippet} ${dnsmasq_pihole_01_location} + install -D -m 644 -T "${dnsmasq_pihole_01_snippet}" "${dnsmasq_pihole_01_location}" printf "%b %b Copying 01-pihole.conf to /etc/dnsmasq.d/01-pihole.conf\\n" "${OVER}" "${TICK}" # Replace our placeholder values with the GLOBAL DNS variables that we populated earlier # First, swap in the interface to listen on - sed -i "s/@INT@/$PIHOLE_INTERFACE/" ${dnsmasq_pihole_01_location} + sed -i "s/@INT@/$PIHOLE_INTERFACE/" "${dnsmasq_pihole_01_location}" if [[ "${PIHOLE_DNS_1}" != "" ]]; then # Then swap in the primary DNS server - sed -i "s/@DNS1@/$PIHOLE_DNS_1/" ${dnsmasq_pihole_01_location} + sed -i "s/@DNS1@/$PIHOLE_DNS_1/" "${dnsmasq_pihole_01_location}" else # - sed -i '/^server=@DNS1@/d' ${dnsmasq_pihole_01_location} + sed -i '/^server=@DNS1@/d' "${dnsmasq_pihole_01_location}" fi if [[ "${PIHOLE_DNS_2}" != "" ]]; then # Then swap in the primary DNS server - sed -i "s/@DNS2@/$PIHOLE_DNS_2/" ${dnsmasq_pihole_01_location} + sed -i "s/@DNS2@/$PIHOLE_DNS_2/" "${dnsmasq_pihole_01_location}" else # - sed -i '/^server=@DNS2@/d' ${dnsmasq_pihole_01_location} + sed -i '/^server=@DNS2@/d' "${dnsmasq_pihole_01_location}" fi # - sed -i 's/^#conf-dir=\/etc\/dnsmasq.d$/conf-dir=\/etc\/dnsmasq.d/' ${dnsmasq_conf} + sed -i 's/^#conf-dir=\/etc\/dnsmasq.d$/conf-dir=\/etc\/dnsmasq.d/' "${dnsmasq_conf}" # If the user does not want to enable logging, if [[ "${QUERY_LOGGING}" == false ]] ; then # Disable it by commenting out the directive in the DNS config file - sed -i 's/^log-queries/#log-queries/' ${dnsmasq_pihole_01_location} + sed -i 's/^log-queries/#log-queries/' "${dnsmasq_pihole_01_location}" # Otherwise, else # enable it by uncommenting the directive in the DNS config file - sed -i 's/^#log-queries/log-queries/' ${dnsmasq_pihole_01_location} + sed -i 's/^#log-queries/log-queries/' "${dnsmasq_pihole_01_location}" fi } @@ -1647,7 +1647,7 @@ install_dependent_packages() { # Install Fedora/CentOS packages for i in "${argArray1[@]}"; do printf " %b Checking for %s..." "${INFO}" "${i}" - if ${PKG_MANAGER} -q list installed "${i}" &> /dev/null; then + if "${PKG_MANAGER}" -q list installed "${i}" &> /dev/null; then printf "%b %b Checking for %s" "${OVER}" "${TICK}" "${i}" else printf "%b %b Checking for %s (will be installed)" "${OVER}" "${INFO}" "${i}" @@ -1765,7 +1765,7 @@ configureFirewall() { # If a firewall is running, if firewall-cmd --state &> /dev/null; then # ask if the user wants to install Pi-hole's default firewall rules - whiptail --title "Firewall in use" --yesno "We have detected a running firewall\\n\\nPi-hole currently requires HTTP and DNS port access.\\n\\n\\n\\nInstall Pi-hole default firewall rules?" ${r} ${c} || \ + whiptail --title "Firewall in use" --yesno "We have detected a running firewall\\n\\nPi-hole currently requires HTTP and DNS port access.\\n\\n\\n\\nInstall Pi-hole default firewall rules?" "${r}" "${c}" || \ { printf " %b Not installing firewall rulesets.\\n" "${INFO}"; return 0; } printf " %b Configuring FirewallD for httpd and pihole-FTL\\n" "${TICK}" # Allow HTTP and DNS traffic @@ -1778,7 +1778,7 @@ configureFirewall() { # If chain Policy is not ACCEPT or last Rule is not ACCEPT # then check and insert our Rules above the DROP/REJECT Rule. if iptables -S INPUT | head -n1 | grep -qv '^-P.*ACCEPT$' || iptables -S INPUT | tail -n1 | grep -qv '^-\(A\|P\).*ACCEPT$'; then - whiptail --title "Firewall in use" --yesno "We have detected a running firewall\\n\\nPi-hole currently requires HTTP and DNS port access.\\n\\n\\n\\nInstall Pi-hole default firewall rules?" ${r} ${c} || \ + whiptail --title "Firewall in use" --yesno "We have detected a running firewall\\n\\nPi-hole currently requires HTTP and DNS port access.\\n\\n\\n\\nInstall Pi-hole default firewall rules?" "${r}" "${c}" || \ { printf " %b Not installing firewall rulesets.\\n" "${INFO}"; return 0; } printf " %b Installing new IPTables firewall rulesets\\n" "${TICK}" # Check chain first, otherwise a new rule will duplicate old ones @@ -1872,21 +1872,21 @@ installLogrotate() { # At some point in the future this list can be pruned, for now we'll need it to ensure updates don't break. # Refactoring of install script has changed the name of a couple of variables. Sort them out here. accountForRefactor() { - sed -i 's/piholeInterface/PIHOLE_INTERFACE/g' ${setupVars} - sed -i 's/IPv4_address/IPV4_ADDRESS/g' ${setupVars} - sed -i 's/IPv4addr/IPV4_ADDRESS/g' ${setupVars} - sed -i 's/IPv6_address/IPV6_ADDRESS/g' ${setupVars} - sed -i 's/piholeIPv6/IPV6_ADDRESS/g' ${setupVars} - sed -i 's/piholeDNS1/PIHOLE_DNS_1/g' ${setupVars} - sed -i 's/piholeDNS2/PIHOLE_DNS_2/g' ${setupVars} - sed -i 's/^INSTALL_WEB=/INSTALL_WEB_INTERFACE=/' ${setupVars} + sed -i 's/piholeInterface/PIHOLE_INTERFACE/g' "${setupVars}" + sed -i 's/IPv4_address/IPV4_ADDRESS/g' "${setupVars}" + sed -i 's/IPv4addr/IPV4_ADDRESS/g' "${setupVars}" + sed -i 's/IPv6_address/IPV6_ADDRESS/g' "${setupVars}" + sed -i 's/piholeIPv6/IPV6_ADDRESS/g' "${setupVars}" + sed -i 's/piholeDNS1/PIHOLE_DNS_1/g' "${setupVars}" + sed -i 's/piholeDNS2/PIHOLE_DNS_2/g' "${setupVars}" + sed -i 's/^INSTALL_WEB=/INSTALL_WEB_INTERFACE=/' "${setupVars}" # Add 'INSTALL_WEB_SERVER', if its not been applied already: https://github.com/pi-hole/pi-hole/pull/2115 if ! grep -q '^INSTALL_WEB_SERVER=' ${setupVars}; then local webserver_installed=false if grep -q '^INSTALL_WEB_INTERFACE=true' ${setupVars}; then webserver_installed=true fi - echo -e "INSTALL_WEB_SERVER=$webserver_installed" >> ${setupVars} + echo -e "INSTALL_WEB_SERVER=$webserver_installed" >> "${setupVars}" fi } @@ -1968,7 +1968,7 @@ checkSelinux() { # If it's enforcing, if [[ "${enforceMode}" == "Enforcing" ]]; then # Explain Pi-hole does not support it yet - whiptail --defaultno --title "SELinux Enforcing Detected" --yesno "SELinux is being ENFORCED on your system! \\n\\nPi-hole currently does not support SELinux, but you may still continue with the installation.\\n\\nNote: Web Admin will not be fully functional unless you set your policies correctly\\n\\nContinue installing Pi-hole?" ${r} ${c} || \ + whiptail --defaultno --title "SELinux Enforcing Detected" --yesno "SELinux is being ENFORCED on your system! \\n\\nPi-hole currently does not support SELinux, but you may still continue with the installation.\\n\\nNote: Web Admin will not be fully functional unless you set your policies correctly\\n\\nContinue installing Pi-hole?" "${r}" "${c}" || \ { printf "\\n %bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } printf " %b Continuing installation with SELinux Enforcing\\n" "${INFO}" printf " %b Please refer to official SELinux documentation to create a custom policy\\n" "${INFO}" @@ -2007,7 +2007,7 @@ If you set a new IP address, you should restart the Pi. The install log is in /etc/pihole. -${additional}" ${r} ${c} +${additional}" "${r}" "${c}" } update_dialogs() { @@ -2028,7 +2028,7 @@ update_dialogs() { opt2b="This will reset your Pi-hole and allow you to enter new settings." # Display the information to the user - UpdateCmd=$(whiptail --title "Existing Install Detected!" --menu "\\n\\nWe have detected an existing install.\\n\\nPlease choose from the following options: \\n($strAdd)" ${r} ${c} 2 \ + UpdateCmd=$(whiptail --title "Existing Install Detected!" --menu "\\n\\nWe have detected an existing install.\\n\\nPlease choose from the following options: \\n($strAdd)" "${r}" "${c}" 2 \ "${opt1a}" "${opt1b}" \ "${opt2a}" "${opt2b}" 3>&2 2>&1 1>&3) || \ { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } @@ -2556,7 +2556,7 @@ main() { installDefaultBlocklists # Source ${setupVars} to use predefined user variables in the functions - source ${setupVars} + source "${setupVars}" # Get the privacy level if it exists (default is 0) if [[ -f "${PI_HOLE_CONFIG_DIR}/pihole-FTL.conf" ]]; then @@ -2613,7 +2613,7 @@ main() { pw=$(tr -dc _A-Z-a-z-0-9 < /dev/urandom | head -c 8) # shellcheck disable=SC1091 . /opt/pihole/webpage.sh - echo "WEBPASSWORD=$(HashPassword ${pw})" >> ${setupVars} + echo "WEBPASSWORD=$(HashPassword "${pw}")" >> "${setupVars}" fi fi From 69081a91757590f6a7d1ef157c786be418d2a7e8 Mon Sep 17 00:00:00 2001 From: David Haguenauer Date: Fri, 10 May 2019 10:37:38 -0400 Subject: [PATCH 090/366] Drop indirection from install_dependent_packages Previously, install_dependent_packages would receive an array variable name as its single parameter, and would use variable indirection to access it; this change simplifies that function so that it instead receives the expanded array. Signed-off-by: David Haguenauer --- automated install/basic-install.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 35d3305c..a5a78102 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1616,7 +1616,6 @@ install_dependent_packages() { # Install packages passed in via argument array # No spinner - conflicts with set -e - declare -a argArray1=("${!1}") declare -a installArray # Debian based package install - debconf will download the entire package list @@ -1626,7 +1625,7 @@ install_dependent_packages() { # installed by us, and remove only the installed packages, and not the entire list. if is_command debconf-apt-progress ; then # For each package, - for i in "${argArray1[@]}"; do + for i in "$@"; do printf " %b Checking for %s..." "${INFO}" "${i}" if dpkg-query -W -f='${Status}' "${i}" 2>/dev/null | grep "ok installed" &> /dev/null; then printf "%b %b Checking for %s\\n" "${OVER}" "${TICK}" "${i}" @@ -1645,7 +1644,7 @@ install_dependent_packages() { fi # Install Fedora/CentOS packages - for i in "${argArray1[@]}"; do + for i in "$@"; do printf " %b Checking for %s..." "${INFO}" "${i}" if "${PKG_MANAGER}" -q list installed "${i}" &> /dev/null; then printf "%b %b Checking for %s" "${OVER}" "${TICK}" "${i}" @@ -2525,7 +2524,7 @@ main() { notify_package_updates_available # Install packages used by this installation script - install_dependent_packages INSTALLER_DEPS[@] + install_dependent_packages "${INSTALLER_DEPS[@]}" # Check if SELinux is Enforcing checkSelinux @@ -2576,7 +2575,7 @@ main() { dep_install_list+=("${PIHOLE_WEB_DEPS[@]}") fi - install_dependent_packages dep_install_list[@] + install_dependent_packages "${dep_install_list[@]}" unset dep_install_list # On some systems, lighttpd is not enabled on first install. We need to enable it here if the user From be3a21ae0770371c0614f1923f02ac504c53df0a Mon Sep 17 00:00:00 2001 From: David Haguenauer Date: Mon, 13 May 2019 09:13:23 -0400 Subject: [PATCH 091/366] Put quotes at ends of strings Signed-off-by: David Haguenauer --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index a5a78102..bb000808 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -233,7 +233,7 @@ if is_command apt-get ; then phpVer="php$phpInsMajor.$phpInsMinor" fi # We also need the correct version for `php-sqlite` (which differs across distros) - if "${PKG_MANAGER}" install --dry-run "${phpVer}"-sqlite3 > /dev/null 2>&1; then + if "${PKG_MANAGER}" install --dry-run "${phpVer}-sqlite3" > /dev/null 2>&1; then phpSqlite="sqlite3" else phpSqlite="sqlite" @@ -245,7 +245,7 @@ if is_command apt-get ; then PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd "${phpVer}"-common "${phpVer}"-cgi "${phpVer}-${phpSqlite}") + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}") # The Web server user, LIGHTTPD_USER="www-data" # group, From 03d93aa19a61cbe3e23dea47c3bdd9cd2c66287a Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 20:58:57 -0700 Subject: [PATCH 092/366] Update debug script with gravity DB changes Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 67 +++++++++++++++++---------------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 1010f26c..3a5c482f 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -89,16 +89,12 @@ PIHOLE_WILDCARD_CONFIG_FILE="${DNSMASQ_D_DIRECTORY}/03-wildcard.conf" WEB_SERVER_CONFIG_FILE="${WEB_SERVER_CONFIG_DIRECTORY}/lighttpd.conf" #WEB_SERVER_CUSTOM_CONFIG_FILE="${WEB_SERVER_CONFIG_DIRECTORY}/external.conf" -PIHOLE_DEFAULT_AD_LISTS="${PIHOLE_DIRECTORY}/adlists.default" -PIHOLE_USER_DEFINED_AD_LISTS="${PIHOLE_DIRECTORY}/adlists.list" -PIHOLE_BLACKLIST_FILE="${PIHOLE_DIRECTORY}/blacklist.txt" -PIHOLE_BLOCKLIST_FILE="${PIHOLE_DIRECTORY}/gravity.list" PIHOLE_INSTALL_LOG_FILE="${PIHOLE_DIRECTORY}/install.log" PIHOLE_RAW_BLOCKLIST_FILES="${PIHOLE_DIRECTORY}/list.*" PIHOLE_LOCAL_HOSTS_FILE="${PIHOLE_DIRECTORY}/local.list" PIHOLE_LOGROTATE_FILE="${PIHOLE_DIRECTORY}/logrotate" PIHOLE_SETUP_VARS_FILE="${PIHOLE_DIRECTORY}/setupVars.conf" -PIHOLE_WHITELIST_FILE="${PIHOLE_DIRECTORY}/whitelist.txt" +PIHOLE_GRAVITY_DB_FILE="${PIHOLE_DIRECTORY}/gravity.db" PIHOLE_COMMAND="${BIN_DIRECTORY}/pihole" PIHOLE_COLTABLE_FILE="${BIN_DIRECTORY}/COL_TABLE" @@ -142,16 +138,11 @@ REQUIRED_FILES=("${PIHOLE_CRON_FILE}" "${PIHOLE_DHCP_CONFIG_FILE}" "${PIHOLE_WILDCARD_CONFIG_FILE}" "${WEB_SERVER_CONFIG_FILE}" -"${PIHOLE_DEFAULT_AD_LISTS}" -"${PIHOLE_USER_DEFINED_AD_LISTS}" -"${PIHOLE_BLACKLIST_FILE}" -"${PIHOLE_BLOCKLIST_FILE}" "${PIHOLE_INSTALL_LOG_FILE}" "${PIHOLE_RAW_BLOCKLIST_FILES}" "${PIHOLE_LOCAL_HOSTS_FILE}" "${PIHOLE_LOGROTATE_FILE}" "${PIHOLE_SETUP_VARS_FILE}" -"${PIHOLE_WHITELIST_FILE}" "${PIHOLE_COMMAND}" "${PIHOLE_COLTABLE_FILE}" "${FTL_PID}" @@ -793,7 +784,7 @@ dig_at() { # This helps emulate queries to different domains that a user might query # It will also give extra assurance that Pi-hole is correctly resolving and blocking domains local random_url - random_url=$(shuf -n 1 "${PIHOLE_BLOCKLIST_FILE}") + random_url=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity ORDER BY RANDOM() LIMIT 1") # First, do a dig on localhost to see if Pi-hole can use itself to block a domain if local_dig=$(dig +tries=1 +time=2 -"${protocol}" "${random_url}" @${local_address} +short "${record_type}"); then @@ -975,8 +966,7 @@ list_files_in_dir() { if [[ -d "${dir_to_parse}/${each_file}" ]]; then # If it's a directoy, do nothing : - elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_BLOCKLIST_FILE}" ]] || \ - [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \ + elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \ [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \ [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \ [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_SETUP_VARS_FILE}" ]] || \ @@ -1061,31 +1051,43 @@ head_tail_log() { IFS="$OLD_IFS" } -analyze_gravity_list() { - echo_current_diagnostic "Gravity list" - local head_line - local tail_line - # Put the current Internal Field Separator into another variable so it can be restored later +show_adlists() { + echo_current_diagnostic "Adlists" + OLD_IFS="$IFS" - # Get the lines that are in the file(s) and store them in an array for parsing later IFS=$'\r\n' + local adlists=() + mapfile -t adlists < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT address FROM vw_adlists") + + for line in "${adlists[@]}"; do + log_write " ${line}" + done + + IFS="$OLD_IFS" +} + +analyze_gravity_list() { + echo_current_diagnostic "Gravity List and Database" + local gravity_permissions - gravity_permissions=$(ls -ld "${PIHOLE_BLOCKLIST_FILE}") + gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}") log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" - local gravity_head=() - mapfile -t gravity_head < <(head -n 4 ${PIHOLE_BLOCKLIST_FILE}) - log_write " ${COL_CYAN}-----head of $(basename ${PIHOLE_BLOCKLIST_FILE})------${COL_NC}" - for head_line in "${gravity_head[@]}"; do - log_write " ${head_line}" + + local gravity_size + gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") + log_write " Size: ${COL_CYAN}${gravity_size}${COL_NC} entries" + + OLD_IFS="$IFS" + IFS=$'\r\n' + local gravity_sample=() + mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") + log_write " ${COL_CYAN}----- First 10 Domains -----${COL_NC}" + + for line in "${gravity_sample[@]}"; do + log_write " ${line}" done + log_write "" - local gravity_tail=() - mapfile -t gravity_tail < <(tail -n 4 ${PIHOLE_BLOCKLIST_FILE}) - log_write " ${COL_CYAN}-----tail of $(basename ${PIHOLE_BLOCKLIST_FILE})------${COL_NC}" - for tail_line in "${gravity_tail[@]}"; do - log_write " ${tail_line}" - done - # Set the IFS back to what it was IFS="$OLD_IFS" } @@ -1236,6 +1238,7 @@ process_status parse_setup_vars check_x_headers analyze_gravity_list +show_adlists show_content_of_pihole_files parse_locale analyze_pihole_log From 3f05efd60f4e4ff5630621d3a1d50fd81e1e1807 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 21:02:31 -0700 Subject: [PATCH 093/366] Add extra newline Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 3a5c482f..82660d61 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1076,6 +1076,7 @@ analyze_gravity_list() { local gravity_size gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") log_write " Size: ${COL_CYAN}${gravity_size}${COL_NC} entries" + log_write "" OLD_IFS="$IFS" IFS=$'\r\n' From 807ce0af4ed7d59830c1ae26eaffb6757c320699 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 21:15:22 -0700 Subject: [PATCH 094/366] Show whitelist, blacklist, and regexlist details Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 32 +++++++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 82660d61..c491b1c6 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1051,21 +1051,40 @@ head_tail_log() { IFS="$OLD_IFS" } -show_adlists() { - echo_current_diagnostic "Adlists" +show_db_entries() { + local title="${1}" + local query="${2}" + + echo_current_diagnostic "${title}" OLD_IFS="$IFS" IFS=$'\r\n' - local adlists=() - mapfile -t adlists < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT address FROM vw_adlists") + local entries=() + mapfile -t entries < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" -cmd ".headers on" "${query}") - for line in "${adlists[@]}"; do + for line in "${entries[@]}"; do log_write " ${line}" done IFS="$OLD_IFS" } +show_adlists() { + show_db_entries "Adlists" "SELECT * FROM adlists" +} + +show_whitelist() { + show_db_entries "Whitelist" "SELECT * FROM whitelist" +} + +show_blacklist() { + show_db_entries "Blacklist" "SELECT * FROM blacklist" +} + +show_regexlist() { + show_db_entries "Regexlist" "SELECT * FROM regex" +} + analyze_gravity_list() { echo_current_diagnostic "Gravity List and Database" @@ -1240,6 +1259,9 @@ parse_setup_vars check_x_headers analyze_gravity_list show_adlists +show_whitelist +show_blacklist +show_regexlist show_content_of_pihole_files parse_locale analyze_pihole_log From 7b5fc60e003263b58fc7f1ebf8ce3bc3f6b74796 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 21:20:38 -0700 Subject: [PATCH 095/366] Improve table formatting Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c491b1c6..e56d1f94 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1060,7 +1060,7 @@ show_db_entries() { OLD_IFS="$IFS" IFS=$'\r\n' local entries=() - mapfile -t entries < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" -cmd ".headers on" "${query}") + mapfile -t entries < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" -cmd ".headers on" -cmd ".mode column" "${query}") for line in "${entries[@]}"; do log_write " ${line}" From a3e1473ac10411e2fc72952bb2fa7983393b3b17 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 21:33:09 -0700 Subject: [PATCH 096/366] Set explicit column widths to prevent text from getting cut off Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index e56d1f94..816652e0 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1054,13 +1054,20 @@ head_tail_log() { show_db_entries() { local title="${1}" local query="${2}" + local widths="${3}" echo_current_diagnostic "${title}" OLD_IFS="$IFS" IFS=$'\r\n' local entries=() - mapfile -t entries < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" -cmd ".headers on" -cmd ".mode column" "${query}") + mapfile -t entries < <(\ + sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \ + -cmd ".headers on" \ + -cmd ".mode column" \ + -cmd ".width ${widths}" \ + "${query}"\ + ) for line in "${entries[@]}"; do log_write " ${line}" @@ -1070,19 +1077,19 @@ show_db_entries() { } show_adlists() { - show_db_entries "Adlists" "SELECT * FROM adlists" + show_db_entries "Adlists" "SELECT * FROM adlists" "2 100 7 10 13 50" } show_whitelist() { - show_db_entries "Whitelist" "SELECT * FROM whitelist" + show_db_entries "Whitelist" "SELECT * FROM whitelist" "2 100 7 10 13 50" } show_blacklist() { - show_db_entries "Blacklist" "SELECT * FROM blacklist" + show_db_entries "Blacklist" "SELECT * FROM blacklist" "2 100 7 10 13 50" } show_regexlist() { - show_db_entries "Regexlist" "SELECT * FROM regex" + show_db_entries "Regexlist" "SELECT * FROM regex" "2 100 7 10 13 50" } analyze_gravity_list() { From 5796054305e4b6842d95f8f1259df35862a34ba4 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 20 May 2019 21:59:18 -0700 Subject: [PATCH 097/366] Increase ID column width to 4 Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 816652e0..07a11ff2 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1077,19 +1077,19 @@ show_db_entries() { } show_adlists() { - show_db_entries "Adlists" "SELECT * FROM adlists" "2 100 7 10 13 50" + show_db_entries "Adlists" "SELECT * FROM adlists" "4 100 7 10 13 50" } show_whitelist() { - show_db_entries "Whitelist" "SELECT * FROM whitelist" "2 100 7 10 13 50" + show_db_entries "Whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" } show_blacklist() { - show_db_entries "Blacklist" "SELECT * FROM blacklist" "2 100 7 10 13 50" + show_db_entries "Blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" } show_regexlist() { - show_db_entries "Regexlist" "SELECT * FROM regex" "2 100 7 10 13 50" + show_db_entries "Regexlist" "SELECT * FROM regex" "4 100 7 10 13 50" } analyze_gravity_list() { From bfb99c361c6db92a728760ba34edb63ffca6006c Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Tue, 21 May 2019 17:12:47 -0700 Subject: [PATCH 098/366] Note that the gravity size does not include the blacklist entries Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 07a11ff2..b31bbdc5 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1101,7 +1101,7 @@ analyze_gravity_list() { local gravity_size gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") - log_write " Size: ${COL_CYAN}${gravity_size}${COL_NC} entries" + log_write " Size (excluding blacklist): ${COL_CYAN}${gravity_size}${COL_NC} entries" log_write "" OLD_IFS="$IFS" From 69dba022c4cc9872fe3ea64b4d6ddcbbf438712f Mon Sep 17 00:00:00 2001 From: Chris Crocker-White Date: Mon, 27 May 2019 10:27:28 -0700 Subject: [PATCH 099/366] Handle an empty local.list Handle the case of an empty local.list file which would otherwise prevent the system from starting Change-type: patch Signed-off-by: Chris Crocker-White --- gravity.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index b41e8893..1fe2b4e3 100755 --- a/gravity.sh +++ b/gravity.sh @@ -184,8 +184,8 @@ migrate_to_database() { gravity_CheckDNSResolutionAvailable() { local lookupDomain="pi.hole" - # Determine if $localList does not exist - if [[ ! -e "${localList}" ]]; then + # Determine if $localList does not exist, and ensure it is not empty + if [[ ! -e "${localList}" ]] || [[ -s "${localList}" ]]; then lookupDomain="raw.githubusercontent.com" fi From f6213d4f4dfb2aa32dabf07619d9739116308bbd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 30 May 2019 15:26:27 +0200 Subject: [PATCH 100/366] Use last PID in case pidof returns multiple PIDs for pihole-FTL Signed-off-by: DL6ER --- advanced/Templates/pihole-FTL.service | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service index 9eb183ed..7c7e533e 100644 --- a/advanced/Templates/pihole-FTL.service +++ b/advanced/Templates/pihole-FTL.service @@ -13,7 +13,7 @@ FTLUSER=pihole PIDFILE=/var/run/pihole-FTL.pid get_pid() { - pidof "pihole-FTL" + pidof "pihole-FTL" | awk '{print $(NF)}' } is_running() { From c5df104a6653ae08b5d226de345a9f2cd53cf26b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 30 May 2019 16:41:37 +0200 Subject: [PATCH 101/366] Add dhcp-ignore-names option when enabling DHCP service. We currently remove anything that starts with "dhcp-" to have a clean configuration and removed these lines without noticing when enabling the DHCP server. Signed-off-by: DL6ER --- advanced/01-pihole.conf | 5 ----- advanced/Scripts/webpage.sh | 8 ++++++++ 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index 40a117fe..cd74e186 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -41,8 +41,3 @@ log-facility=/var/log/pihole.log local-ttl=2 log-async - -# If a DHCP client claims that its name is "wpad", ignore that. -# This fixes a security hole. see CERT Vulnerability VU#598349 -dhcp-name-match=set:wpad-ignore,wpad -dhcp-ignore-names=tag:wpad-ignore diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index ea699efa..583579b6 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -366,6 +366,14 @@ EnableDHCP() { delete_dnsmasq_setting "dhcp-" delete_dnsmasq_setting "quiet-dhcp" + # If a DHCP client claims that its name is "wpad", ignore that. + # This fixes a security hole. see CERT Vulnerability VU#598349 + # We also ignore "localhost" as Windows behaves strangely if a + # device claims this host name + add_dnsmasq_setting "dhcp-name-match=set:hostname-ignore,wpad +dhcp-name-match=set:hostname-ignore,localhost +dhcp-ignore-names=tag:hostname-ignore" + ProcessDHCPSettings RestartDNS From c3023fe68182c7c9adf2c39e86576cedec1bf942 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 30 May 2019 21:23:15 +0200 Subject: [PATCH 102/366] Add new "pihole arpflush" command to flush both the ARP cache as well as the network table in pihole-FTL.db Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 65 ++++++++++++++++++++++++++++++ pihole | 10 ++++- 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100755 advanced/Scripts/piholeARPTable.sh diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh new file mode 100755 index 00000000..6af96d91 --- /dev/null +++ b/advanced/Scripts/piholeARPTable.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash +# shellcheck disable=SC1090 + +# Pi-hole: A black hole for Internet advertisements +# (c) 2019 Pi-hole, LLC (https://pi-hole.net) +# Network-wide ad blocking via your own hardware. +# +# ARP table interaction +# +# This file is copyright under the latest version of the EUPL. +# Please see LICENSE file for your rights under this license. + +coltable="/opt/pihole/COL_TABLE" +if [[ -f ${coltable} ]]; then + source ${coltable} +fi + +# Determine database location +# Obtain DBFILE=... setting from pihole-FTL.db +# Constructed to return nothing when +# a) the setting is not present in the config file, or +# b) the setting is commented out (e.g. "#DBFILE=...") +FTLconf="/etc/pihole/pihole-FTL.conf" +if [ -e "$FTLconf" ]; then + DBFILE="$(sed -n -e 's/^\s*DBFILE\s*=\s*//p' ${FTLconf})" +fi +# Test for empty string. Use standard path in this case. +if [ -z "$DBFILE" ]; then + DBFILE="/etc/pihole/pihole-FTL.db" +fi + + +flushARP(){ + local output + if [[ "$@" != *"quiet"* ]]; then + echo -ne " ${INFO} Flushing network table ..." + fi + + # Flush ARP cache to avoid re-adding of dead entries + if ! output=$(ip neigh flush all 2>&1); then + echo -e "${OVER} ${CROSS} Failed to clear ARP cache" + echo " Output: ${output}" + return 1 + fi + + if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network;" 2>&1); then + echo -e "${OVER} ${CROSS} Failed to truncate network table" + echo " Database location: ${DBFILE}" + echo " Output: ${output}" + return 1 + fi + + if [[ "$@" != *"quiet"* ]]; then + echo -e "${OVER} ${TICK} Flushed network table" + fi +} + +args=("$@") + +case "${args[0]}" in + "arpflush" ) flushARP;; +esac + +shift + diff --git a/pihole b/pihole index 71c286c6..d837f873 100755 --- a/pihole +++ b/pihole @@ -54,6 +54,11 @@ flushFunc() { exit 0 } +arpFunc() { + "${PI_HOLE_SCRIPT_DIR}"/piholeARPTable.sh "$@" + exit 0 +} + updatePiholeFunc() { shift "${PI_HOLE_SCRIPT_DIR}"/update.sh "$@" @@ -430,8 +435,8 @@ fi case "${1}" in "-w" | "whitelist" ) listFunc "$@";; "-b" | "blacklist" ) listFunc "$@";; - "--wild" | "wildcard" ) listFunc "$@";; - "--regex" | "regex" ) listFunc "$@";; + "--wild" | "wildcard" ) listFunc "$@";; + "--regex" | "regex" ) listFunc "$@";; "-d" | "debug" ) debugFunc "$@";; "-f" | "flush" ) flushFunc "$@";; "-up" | "updatePihole" ) updatePiholeFunc "$@";; @@ -452,5 +457,6 @@ case "${1}" in "checkout" ) piholeCheckoutFunc "$@";; "tricorder" ) tricorderFunc;; "updatechecker" ) updateCheckFunc "$@";; + "arpflush" ) arpFunc "$@";; * ) helpFunc;; esac From 285e6fe090069ffd38e6679b38256a9632e2f9e8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 30 May 2019 21:32:35 +0200 Subject: [PATCH 103/366] Address lint complaints Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index 6af96d91..dab730f4 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -32,7 +32,7 @@ fi flushARP(){ local output - if [[ "$@" != *"quiet"* ]]; then + if [[ "${args[1]}" != *"quiet"* ]]; then echo -ne " ${INFO} Flushing network table ..." fi @@ -43,6 +43,7 @@ flushARP(){ return 1 fi + # Truncate network table in pihole-FTL.db if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network;" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network table" echo " Database location: ${DBFILE}" @@ -50,7 +51,7 @@ flushARP(){ return 1 fi - if [[ "$@" != *"quiet"* ]]; then + if [[ "${args[1]}" != *"quiet"* ]]; then echo -e "${OVER} ${TICK} Flushed network table" fi } From 5060605626950007aa0fc35153356ea49c96a187 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 30 May 2019 21:44:47 +0200 Subject: [PATCH 104/366] Print table name before entering the loop for the sake of simplicity Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index d2e16e79..88ec2173 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -103,7 +103,7 @@ if [[ -n "${str:-}" ]]; then fi scanDatabaseTable() { - local domain table type querystr result table_prev + local domain table type querystr result domain="$(printf "%q" "${1}")" table="${2}" type="${3:-}" @@ -126,22 +126,20 @@ scanDatabaseTable() { # Mark domain as having been white-/blacklist matched (global variable) wbMatch=true - # Loop through each result + + # Print table name + if [[ ! -z "${result}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + fi + + # Loop over results and print them mapfile -t results <<< "${result}" for result in "${results[@]}"; do if [[ -n "${blockpage}" ]]; then echo "π ${result}" exit 0 - elif [[ -n "${exact}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" - else - # Only print table name once - if [[ ! "${table}" == "${table_prev:-}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" - table_prev="${table}" - fi - echo " ${result}" fi + echo " ${result}" done } From 44f8fcb600cc78f7b4dd8c6c9c8bba73c64c718c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 31 May 2019 08:39:18 +0200 Subject: [PATCH 105/366] We can print the table name without any extra check as we already returned early in case there are no results. Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 88ec2173..9cb298df 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -128,9 +128,7 @@ scanDatabaseTable() { wbMatch=true # Print table name - if [[ ! -z "${result}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" - fi + echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" # Loop over results and print them mapfile -t results <<< "${result}" From 9ddce880920752cece01b48ad25caaeffe9275b8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 31 May 2019 08:42:22 +0200 Subject: [PATCH 106/366] Review comments Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index dab730f4..10bee0df 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -32,7 +32,7 @@ fi flushARP(){ local output - if [[ "${args[1]}" != *"quiet"* ]]; then + if [[ "${args[1]}" != "quiet" ]]; then echo -ne " ${INFO} Flushing network table ..." fi @@ -44,14 +44,14 @@ flushARP(){ fi # Truncate network table in pihole-FTL.db - if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network;" 2>&1); then + if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network table" echo " Database location: ${DBFILE}" echo " Output: ${output}" return 1 fi - if [[ "${args[1]}" != *"quiet"* ]]; then + if [[ "${args[1]}" != "quiet" ]]; then echo -e "${OVER} ${TICK} Flushed network table" fi } @@ -61,6 +61,3 @@ args=("$@") case "${args[0]}" in "arpflush" ) flushARP;; esac - -shift - From ae98fde32141154e7384bf38a3893ecf162d94ca Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 31 May 2019 09:18:12 +0200 Subject: [PATCH 107/366] Try to obtain PID from PIDFILE. If this fails (file does not exist or is empty), fall back to using pidof + awk Signed-off-by: DL6ER --- advanced/Templates/pihole-FTL.service | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service index 7c7e533e..8a4c7ce6 100644 --- a/advanced/Templates/pihole-FTL.service +++ b/advanced/Templates/pihole-FTL.service @@ -13,6 +13,13 @@ FTLUSER=pihole PIDFILE=/var/run/pihole-FTL.pid get_pid() { + # First, try to obtain PID from PIDFILE + if [ -s "${PIDFILE}" ]; then + cat "${PIDFILE}" + return + fi + + # If the PIDFILE is empty or not available, obtain the PID using pidof pidof "pihole-FTL" | awk '{print $(NF)}' } From a09f92f9cc664cc21381ab2d651c7803e7e361a0 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Fri, 31 May 2019 22:12:54 -0700 Subject: [PATCH 108/366] Create FUNDING.yml Signed-off-by: Dan Schaper --- .github/FUNDING.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000..3a75dc12 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,4 @@ +# These are supported funding model platforms + +patreon: pihole +custom: https://pi-hole.net/donate From 54bfaa28c1ac41fc8839c935003a1900320195c2 Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 3 Jun 2019 19:23:27 +0100 Subject: [PATCH 109/366] Fix for regexp queries through pihole -q Taking inspiration from: https://github.com/pi-hole/pi-hole/pull/2346 We will use awk to iterate through each regexp and print any that match the domain query. Signed-off-by: MMotti --- advanced/Scripts/query.sh | 64 +++++++++++++++------------------------ 1 file changed, 25 insertions(+), 39 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 9cb298df..840c03da 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -12,7 +12,7 @@ # Globals piholeDir="/etc/pihole" gravityDBfile="${piholeDir}/gravity.db" -wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf" +regexlist="/etc/pihole/regex.list" options="$*" adlist="" all="" @@ -23,27 +23,11 @@ matchType="match" colfile="/opt/pihole/COL_TABLE" source "${colfile}" -# Print each subdomain -# e.g: foo.bar.baz.com = "foo.bar.baz.com bar.baz.com baz.com com" -processWildcards() { - IFS="." read -r -a array <<< "${1}" - for (( i=${#array[@]}-1; i>=0; i-- )); do - ar="" - for (( j=${#array[@]}-1; j>${#array[@]}-i-2; j-- )); do - if [[ $j == $((${#array[@]}-1)) ]]; then - ar="${array[$j]}" - else - ar="${array[$j]}.${ar}" - fi - done - echo "${ar}" - done -} - +# Scan an array of files for matching strings # Scan an array of files for matching strings scanList(){ # Escape full stops - local domain="${1//./\\.}" lists="${2}" type="${3:-}" + local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" type="${3:-}" # Prevent grep from printing file path cd "$piholeDir" || exit 1 @@ -54,9 +38,9 @@ scanList(){ # /dev/null forces filename to be printed when only one list has been generated # shellcheck disable=SC2086 case "${type}" in - "exact" ) grep -i -E -l "(^|(?/dev/null;; - "wc" ) grep -i -o -m 1 "/${domain}/" ${lists} 2>/dev/null;; - * ) grep -i "${domain}" ${lists} /dev/null 2>/dev/null;; + "exact" ) grep -i -E -l "(^|(?/dev/null;; + "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' ${lists} <(echo "$domain") 2>/dev/null;; + * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } @@ -145,24 +129,26 @@ scanDatabaseTable() { scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" -# Scan Wildcards -if [[ -e "${wildcardlist}" ]]; then - # Determine all subdomains, domain and TLDs - mapfile -t wildcards <<< "$(processWildcards "${domainQuery}")" - for match in "${wildcards[@]}"; do - # Search wildcard list for matches - mapfile -t results <<< "$(scanList "${match}" "${wildcardlist}" "wc")" - if [[ -n "${results[*]}" ]]; then - if [[ -z "${wcMatch:-}" ]] && [[ -z "${blockpage}" ]]; then - wcMatch=true - echo " ${matchType^} found in ${COL_BOLD}Wildcards${COL_NC}:" - fi - case "${blockpage}" in - true ) echo "π ${wildcardlist##*/}"; exit 0;; - * ) echo " *.${match}";; - esac +# Scan Regex +if [[ -e "${regexlist}" ]]; then + # Return portion(s) of string that is found in the regex list + mapfile -t results <<< "$(scanList "${domainQuery}" "${regexlist}" "rx")" + + if [[ -n "${results[*]}" ]]; then + # A result is found + str="Phrase ${matchType}ed within ${COL_BOLD}regex list${COL_NC}" + result="${COL_BOLD}$(printf '%s\n' ${results[*]})${COL_NC}" + + if [[ -z "${blockpage}" ]]; then + wcMatch=true + echo " $str" fi - done + + case "${blockpage}" in + true ) echo "π ${regexlist##*/}"; exit 0;; + * ) awk '{print " "$0}' <<< "${result}";; + esac + fi fi # Get version sorted *.domains filenames (without dir path) From 97df6d7415bfc2daf10b401a8c8712a47af0ab2d Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 3 Jun 2019 19:55:29 +0100 Subject: [PATCH 110/366] Stickler fix Signed-off-by: MMotti --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 840c03da..827f97a3 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -137,7 +137,7 @@ if [[ -e "${regexlist}" ]]; then if [[ -n "${results[*]}" ]]; then # A result is found str="Phrase ${matchType}ed within ${COL_BOLD}regex list${COL_NC}" - result="${COL_BOLD}$(printf '%s\n' ${results[*]})${COL_NC}" + result="${COL_BOLD}$(IFS=$'\n'; echo "${results[*]}")${COL_NC}" if [[ -z "${blockpage}" ]]; then wcMatch=true From 09532638d5d6cbbf82aa44ec0e04a67e64402905 Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 3 Jun 2019 23:59:58 +0100 Subject: [PATCH 111/366] Read from DB instead of regex.list Signed-off-by: MMotti --- advanced/Scripts/query.sh | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 827f97a3..a4cf02b1 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -39,7 +39,7 @@ scanList(){ # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' ${lists} <(echo "$domain") 2>/dev/null;; + "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' <(echo "$lists") <(echo "$domain") 2>/dev/null;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } @@ -97,8 +97,9 @@ scanDatabaseTable() { # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. case "${type}" in - "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; - * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; + "retrievetable" ) querystr="SELECT domain FROM vw_${table}";; + * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac # Send prepared query to gravity database @@ -108,6 +109,13 @@ scanDatabaseTable() { return fi + # If we are only retrieving the table + # Just output and return + if [[ "${type}" == "retrievetable" ]]; then + echo "${result[*]}" + return + fi + # Mark domain as having been white-/blacklist matched (global variable) wbMatch=true @@ -129,14 +137,21 @@ scanDatabaseTable() { scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" -# Scan Regex -if [[ -e "${regexlist}" ]]; then +# Scan Regex table +regexlist=$(scanDatabaseTable "" "regex" "retrievetable") + +if [[ -n "${regexlist}" ]]; then # Return portion(s) of string that is found in the regex list mapfile -t results <<< "$(scanList "${domainQuery}" "${regexlist}" "rx")" - if [[ -n "${results[*]}" ]]; then - # A result is found - str="Phrase ${matchType}ed within ${COL_BOLD}regex list${COL_NC}" + # If a result is found + if [[ -n "${results[*]}" ]]; then + # Count the matches + regexCount=${#results[@]} + # Determine plural string + [[ $regexCount -gt 1 ]] && plu="es" + # Form output strings + str="${COL_BOLD}${regexCount}${COL_NC} ${matchType}${plu:-} found in ${COL_BOLD}regex${COL_NC} table" result="${COL_BOLD}$(IFS=$'\n'; echo "${results[*]}")${COL_NC}" if [[ -z "${blockpage}" ]]; then From b49c702f331c5bfecc2b4622039740f8b32aa247 Mon Sep 17 00:00:00 2001 From: MMotti Date: Tue, 4 Jun 2019 00:03:37 +0100 Subject: [PATCH 112/366] Consistency tweak Signed-off-by: MMotti --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a4cf02b1..f4aed05b 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -39,7 +39,7 @@ scanList(){ # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' <(echo "$lists") <(echo "$domain") 2>/dev/null;; + "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From 7613e94ef6f987d12203dc13e26b5637a5e31d48 Mon Sep 17 00:00:00 2001 From: MMotti Date: Tue, 4 Jun 2019 06:06:17 +0100 Subject: [PATCH 113/366] Minor tweaks Mainly for consistency Signed-off-by: MMotti --- advanced/Scripts/query.sh | 46 +++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 26 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index f4aed05b..5b8baa63 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -12,7 +12,6 @@ # Globals piholeDir="/etc/pihole" gravityDBfile="${piholeDir}/gravity.db" -regexlist="/etc/pihole/regex.list" options="$*" adlist="" all="" @@ -23,7 +22,6 @@ matchType="match" colfile="/opt/pihole/COL_TABLE" source "${colfile}" -# Scan an array of files for matching strings # Scan an array of files for matching strings scanList(){ # Escape full stops @@ -39,7 +37,12 @@ scanList(){ # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - "rx" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; + # Create array of regexps + # Iterate through each regexp and check whether it matches the domainQuery + # If it does, print the matching regexp and continue looping + # Input 1 - regexps | Input 2 - domainQuery + "regex" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' \ + <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } @@ -97,9 +100,8 @@ scanDatabaseTable() { # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. case "${type}" in - "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; - "retrievetable" ) querystr="SELECT domain FROM vw_${table}";; - * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; + * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac # Send prepared query to gravity database @@ -109,13 +111,6 @@ scanDatabaseTable() { return fi - # If we are only retrieving the table - # Just output and return - if [[ "${type}" == "retrievetable" ]]; then - echo "${result[*]}" - return - fi - # Mark domain as having been white-/blacklist matched (global variable) wbMatch=true @@ -138,20 +133,19 @@ scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" # Scan Regex table -regexlist=$(scanDatabaseTable "" "regex" "retrievetable") - -if [[ -n "${regexlist}" ]]; then - # Return portion(s) of string that is found in the regex list - mapfile -t results <<< "$(scanList "${domainQuery}" "${regexlist}" "rx")" - - # If a result is found +mapfile -t regexlist <<< "$(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null)" +# Split results over new line and store in a string +# ready for processing +str_regexlist=$(IFS=$'\n'; echo "${regexlist[*]}") +# If there are regexps in the DB +if [[ -n "${str_regexlist}" ]]; then + # Return any regexps that match the domainQuery + mapfile -t results <<< "$(scanList "${domainQuery}" "${str_regexlist}" "regex")" + + # If there are matches to the domain query if [[ -n "${results[*]}" ]]; then - # Count the matches - regexCount=${#results[@]} - # Determine plural string - [[ $regexCount -gt 1 ]] && plu="es" # Form output strings - str="${COL_BOLD}${regexCount}${COL_NC} ${matchType}${plu:-} found in ${COL_BOLD}regex${COL_NC} table" + str="${matchType^} found in ${COL_BOLD}regex list${COL_NC}" result="${COL_BOLD}$(IFS=$'\n'; echo "${results[*]}")${COL_NC}" if [[ -z "${blockpage}" ]]; then @@ -160,7 +154,7 @@ if [[ -n "${regexlist}" ]]; then fi case "${blockpage}" in - true ) echo "π ${regexlist##*/}"; exit 0;; + true ) echo "π regex list"; exit 0;; * ) awk '{print " "$0}' <<< "${result}";; esac fi From cf21efa10350cfbb257d0428c6be08594e8139ae Mon Sep 17 00:00:00 2001 From: MMotti Date: Wed, 5 Jun 2019 14:36:43 +0100 Subject: [PATCH 114/366] Minor grammar changes Signed-off-by: MMotti --- advanced/Scripts/query.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 5b8baa63..9134dc0f 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -145,7 +145,7 @@ if [[ -n "${str_regexlist}" ]]; then # If there are matches to the domain query if [[ -n "${results[*]}" ]]; then # Form output strings - str="${matchType^} found in ${COL_BOLD}regex list${COL_NC}" + str="${matchType^} found in ${COL_BOLD}Regex list${COL_NC}" result="${COL_BOLD}$(IFS=$'\n'; echo "${results[*]}")${COL_NC}" if [[ -z "${blockpage}" ]]; then @@ -154,7 +154,7 @@ if [[ -n "${str_regexlist}" ]]; then fi case "${blockpage}" in - true ) echo "π regex list"; exit 0;; + true ) echo "π Regex list"; exit 0;; * ) awk '{print " "$0}' <<< "${result}";; esac fi From 785f1fedd9ade421aa923b997fe873dff171f942 Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 10 Jun 2019 17:48:52 +0100 Subject: [PATCH 115/366] Tidy regexp queries Signed-off-by: MMotti --- advanced/Scripts/query.sh | 55 +++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 9134dc0f..e418eecf 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -133,31 +133,36 @@ scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" # Scan Regex table -mapfile -t regexlist <<< "$(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null)" -# Split results over new line and store in a string -# ready for processing -str_regexlist=$(IFS=$'\n'; echo "${regexlist[*]}") -# If there are regexps in the DB -if [[ -n "${str_regexlist}" ]]; then - # Return any regexps that match the domainQuery - mapfile -t results <<< "$(scanList "${domainQuery}" "${str_regexlist}" "regex")" - - # If there are matches to the domain query - if [[ -n "${results[*]}" ]]; then - # Form output strings - str="${matchType^} found in ${COL_BOLD}Regex list${COL_NC}" - result="${COL_BOLD}$(IFS=$'\n'; echo "${results[*]}")${COL_NC}" - - if [[ -z "${blockpage}" ]]; then - wcMatch=true - echo " $str" - fi - - case "${blockpage}" in - true ) echo "π Regex list"; exit 0;; - * ) awk '{print " "$0}' <<< "${result}";; - esac - fi +mapfile -t regexlist < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null) + +# If we have regexps to process +if [[ "${#regexlist[@]}" -ne 0 ]]; then + # Split regexps over a new line + str_regexlist=$(printf '%s\n' "${regexlist[@]}") + # Check domainQuery against regexps + mapfile -t regexMatches < <(scanList "${domainQuery}" "${str_regexlist}" "regex") + # If there were regex matches + if [[ "${#regexMatches[@]}" -ne 0 ]]; then + # Split matching regexps over a new line + str_regexMatches=$(printf '%s\n' "${regexMatches[@]}") + # Form a "matched" message + str_message="${matchType^} found in ${COL_BOLD}Regex list${COL_NC}" + # Form a "results" message + str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" + + if [[ -z "${blockpage}" ]]; then + # Set the wildcard match flag + wcMatch=true + # Echo the "matched" message, indented by one space + echo " ${str_message}" + # Echo the "results" message, each line indented by three spaces + echo "${str_result}" | sed 's/^/ /' + else + echo "π Regex list" + exit 0 + fi + + fi fi # Get version sorted *.domains filenames (without dir path) From 10fbed50f3e3213a5a1e836cecd144cc59562107 Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 10 Jun 2019 18:10:25 +0100 Subject: [PATCH 116/366] Shellcheck Suggesting to use parameter expansion but we need to be able to identify the start of each line Signed-off-by: MMotti --- advanced/Scripts/query.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index e418eecf..93d8baee 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -156,6 +156,7 @@ if [[ "${#regexlist[@]}" -ne 0 ]]; then # Echo the "matched" message, indented by one space echo " ${str_message}" # Echo the "results" message, each line indented by three spaces + # shellcheck disable=SC2001 echo "${str_result}" | sed 's/^/ /' else echo "π Regex list" From a9d0690f4dff7b081f22146cad1440eff04082a1 Mon Sep 17 00:00:00 2001 From: MMotti Date: Mon, 10 Jun 2019 18:50:52 +0100 Subject: [PATCH 117/366] Change the case of some variables Just for consistency Signed-off-by: MMotti --- advanced/Scripts/query.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 93d8baee..aa23ab41 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -133,14 +133,14 @@ scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" # Scan Regex table -mapfile -t regexlist < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null) +mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null) # If we have regexps to process -if [[ "${#regexlist[@]}" -ne 0 ]]; then +if [[ "${#regexList[@]}" -ne 0 ]]; then # Split regexps over a new line - str_regexlist=$(printf '%s\n' "${regexlist[@]}") + str_regexList=$(printf '%s\n' "${regexList[@]}") # Check domainQuery against regexps - mapfile -t regexMatches < <(scanList "${domainQuery}" "${str_regexlist}" "regex") + mapfile -t regexMatches < <(scanList "${domainQuery}" "${str_regexList}" "regex") # If there were regex matches if [[ "${#regexMatches[@]}" -ne 0 ]]; then # Split matching regexps over a new line From bcf03647efd8b1685bf077bfab968c327381de2a Mon Sep 17 00:00:00 2001 From: MMotti Date: Wed, 12 Jun 2019 16:02:59 +0100 Subject: [PATCH 118/366] Add comment and remove blankspace Signed-off-by: MMotti --- advanced/Scripts/query.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index aa23ab41..42ea4395 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -149,7 +149,7 @@ if [[ "${#regexList[@]}" -ne 0 ]]; then str_message="${matchType^} found in ${COL_BOLD}Regex list${COL_NC}" # Form a "results" message str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" - + # If we are displaying more than just the source of the block if [[ -z "${blockpage}" ]]; then # Set the wildcard match flag wcMatch=true @@ -162,7 +162,6 @@ if [[ "${#regexList[@]}" -ne 0 ]]; then echo "π Regex list" exit 0 fi - fi fi From c8987e20c239a59653ae00f01348722440e7b564 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Wed, 12 Jun 2019 19:13:29 -0700 Subject: [PATCH 119/366] Use the filtered IPv6 OpenDNS servers The ones we were using previously were not filtered. See https://support.opendns.com/hc/en-us/articles/227986667-Does-OpenDNS-Support-IPv6- Signed-off-by: Mcat12 --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 29e565ed..1e87b943 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -31,7 +31,7 @@ set -e # List of supported DNS servers DNS_SERVERS=$(cat << EOM Google (ECS);8.8.8.8;8.8.4.4;2001:4860:4860:0:0:0:0:8888;2001:4860:4860:0:0:0:0:8844 -OpenDNS (ECS);208.67.222.222;208.67.220.220;2620:0:ccc::2;2620:0:ccd::2 +OpenDNS (ECS);208.67.222.222;208.67.220.220;2620:119:35::35;2620:119:53::53 Level3;4.2.2.1;4.2.2.2;; Comodo;8.26.56.26;8.20.247.20;; DNS.WATCH;84.200.69.80;84.200.70.40;2001:1608:10:25:0:0:1c04:b12f;2001:1608:10:25:0:0:9249:d69b From 4947350ca5d946a66fdcb7952b4ef200d833e779 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 15 Jun 2019 09:06:10 +0200 Subject: [PATCH 120/366] Add arpflush to help and bash autocompletion Signed-off-by: DL6ER --- advanced/bash-completion/pihole | 2 +- pihole | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/advanced/bash-completion/pihole b/advanced/bash-completion/pihole index 7ba0dad8..cea36060 100644 --- a/advanced/bash-completion/pihole +++ b/advanced/bash-completion/pihole @@ -7,7 +7,7 @@ _pihole() { case "${prev}" in "pihole") - opts="admin blacklist checkout chronometer debug disable enable flush help logging query reconfigure regex restartdns status tail uninstall updateGravity updatePihole version wildcard whitelist" + opts="admin blacklist checkout chronometer debug disable enable flush help logging query reconfigure regex restartdns status tail uninstall updateGravity updatePihole version wildcard whitelist arpflush" COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) ;; "whitelist"|"blacklist"|"wildcard"|"regex") diff --git a/pihole b/pihole index d837f873..9fa65a8f 100755 --- a/pihole +++ b/pihole @@ -408,7 +408,8 @@ Options: Add '-h' for more info on disable usage restartdns Restart Pi-hole subsystems checkout Switch Pi-hole subsystems to a different Github branch - Add '-h' for more info on checkout usage"; + Add '-h' for more info on checkout usage + arpflush Flush information stored in Pi-hole's network tables"; exit 0 } From 435a5fb3adb8e38f38772f89260da9cf40441ba0 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 15 Jun 2019 09:12:44 +0200 Subject: [PATCH 121/366] Add pihole arpflush to man page. Signed-off-by: DL6ER --- manpages/pihole.8 | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/manpages/pihole.8 b/manpages/pihole.8 index bd7d0933..7a750f4c 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -351,6 +351,12 @@ Switching Pi-hole subsystem branches .br Switch to core development branch .br + +\fBpihole arpflush\fR +.br + Flush information stored in Pi-hole's network tables +.br + .SH "SEE ALSO" \fBlighttpd\fR(8), \fBpihole-FTL\fR(8) From 6996ffa451d4b4911d7394653c840cd738c2a6d6 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 15 Jun 2019 09:15:12 +0200 Subject: [PATCH 122/366] Also flush network_addresses table Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index 10bee0df..aa45f9ad 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -43,6 +43,16 @@ flushARP(){ return 1 fi + # Truncate network_addresses table in pihole-FTL.db + # This needs to be done before we can truncate the network table due to + # foreign key contraints + if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then + echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table" + echo " Database location: ${DBFILE}" + echo " Output: ${output}" + return 1 + fi + # Truncate network table in pihole-FTL.db if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network table" From c53ee4202b4baf58283b965d3cb2d617379bf8a4 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Jun 2019 16:50:51 +0200 Subject: [PATCH 123/366] Add filtering by domain groups to gravity database. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 372a4a29..ee0caa8e 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,3 +1,13 @@ +PRAGMA FOREIGN_KEYS=ON; + +CREATE TABLE domain_groups +( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "enabled" BOOLEAN NOT NULL DEFAULT 1, + "description" TEXT +); +INSERT INTO domain_groups ("id","description") VALUES (0,'Standard group'); + CREATE TABLE whitelist ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -5,7 +15,9 @@ CREATE TABLE whitelist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + group_id INTEGER DEFAULT 0, + comment TEXT, + FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); CREATE TABLE blacklist ( @@ -14,7 +26,9 @@ CREATE TABLE blacklist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + group_id INTEGER DEFAULT 0, + comment TEXT, + FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); CREATE TABLE regex ( @@ -23,7 +37,9 @@ CREATE TABLE regex enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + group_id INTEGER DEFAULT 0, + comment TEXT, + FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); CREATE TABLE adlists ( @@ -52,7 +68,8 @@ CREATE VIEW vw_gravity AS SELECT a.domain CREATE VIEW vw_whitelist AS SELECT a.domain FROM whitelist a - WHERE a.enabled == 1 + INNER JOIN domain_groups b ON b.id = a.group_id + WHERE a.enabled = 1 AND b.enabled = 1 ORDER BY a.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -62,7 +79,8 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist CREATE VIEW vw_blacklist AS SELECT a.domain FROM blacklist a - WHERE a.enabled == 1 AND a.domain NOT IN vw_whitelist + INNER JOIN domain_groups b ON b.id = a.group_id + WHERE a.enabled = 1 AND a.domain NOT IN vw_whitelist AND b.enabled = 1 ORDER BY a.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -72,7 +90,8 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist CREATE VIEW vw_regex AS SELECT a.domain FROM regex a - WHERE a.enabled == 1 + INNER JOIN domain_groups b ON b.id = a.group_id + WHERE a.enabled = 1 AND b.enabled = 1 ORDER BY a.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex From cbc210b0145415a6aedfc28c6ae266fd0a85c3ec Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Jun 2019 17:05:33 +0200 Subject: [PATCH 124/366] Add adlist_groups and mark group_id columns as NOT NULL. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 22 +++++++++++++++++----- gravity.sh | 2 +- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index ee0caa8e..e22cc44b 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -15,7 +15,7 @@ CREATE TABLE whitelist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER DEFAULT 0, + group_id INTEGER NOT NULL DEFAULT 0, comment TEXT, FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); @@ -26,7 +26,7 @@ CREATE TABLE blacklist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER DEFAULT 0, + group_id INTEGER NOT NULL DEFAULT 0, comment TEXT, FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); @@ -37,10 +37,19 @@ CREATE TABLE regex enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER DEFAULT 0, + group_id INTEGER NOT NULL DEFAULT 0, comment TEXT, FOREIGN KEY (group_id) REFERENCES domain_groups(id) ); + +CREATE TABLE adlist_groups +( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "enabled" BOOLEAN NOT NULL DEFAULT 1, + "description" TEXT +); +INSERT INTO adlist_groups ("id","description") VALUES (0,'Standard group'); + CREATE TABLE adlists ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -48,7 +57,9 @@ CREATE TABLE adlists enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + group_id INTEGER NOT NULL DEFAULT 0, + comment TEXT, + FOREIGN KEY (group_id) REFERENCES adlist_groups(id) ); CREATE TABLE gravity ( @@ -101,7 +112,8 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex CREATE VIEW vw_adlists AS SELECT a.address FROM adlists a - WHERE a.enabled == 1 + INNER JOIN adlist_groups b ON b.id = a.group_id + WHERE a.enabled = 1 AND b.enabled = 1 ORDER BY a.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists diff --git a/gravity.sh b/gravity.sh index 1fe2b4e3..ce7aa6ba 100755 --- a/gravity.sh +++ b/gravity.sh @@ -120,7 +120,7 @@ database_table_from_file() { do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},0,\"Migrated from ${source}\"" >> "${tmpFile}" rowid+=1 fi done From 01850c2128f2f9889db352028d1c296247153ca5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Jun 2019 17:15:36 +0200 Subject: [PATCH 125/366] Automatically recreate standard group after deletion to avoid violating foreign key constraints even if foreign key enforcing is disabled (SQLite's default behavior) Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index e22cc44b..5fb9a5eb 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -7,6 +7,10 @@ CREATE TABLE domain_groups "description" TEXT ); INSERT INTO domain_groups ("id","description") VALUES (0,'Standard group'); +CREATE TRIGGER domain_groups_standard_group AFTER DELETE ON domain_groups WHEN OLD.id = 0 + BEGIN + INSERT INTO domain_groups ("id","description") VALUES (0,'Standard group'); + END; CREATE TABLE whitelist ( @@ -49,6 +53,10 @@ CREATE TABLE adlist_groups "description" TEXT ); INSERT INTO adlist_groups ("id","description") VALUES (0,'Standard group'); +CREATE TRIGGER adlist_groups_standard_group AFTER DELETE ON adlist_groups WHEN OLD.id = 0 + BEGIN + INSERT INTO adlist_groups ("id","description") VALUES (0,'Standard group'); + END; CREATE TABLE adlists ( From 0774c4e5cac3acd7955ef9a53d19f0a4ed69c13b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Jun 2019 18:52:23 +0200 Subject: [PATCH 126/366] vw_gravity should actually filter using vw_whitelist. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 5fb9a5eb..f4f9cf4c 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -83,7 +83,7 @@ INSERT INTO info VALUES("version","1"); CREATE VIEW vw_gravity AS SELECT a.domain FROM gravity a - WHERE a.domain NOT IN (SELECT domain from whitelist WHERE enabled == 1); + WHERE a.domain NOT IN (SELECT domain from vw_whitelist); CREATE VIEW vw_whitelist AS SELECT a.domain FROM whitelist a From d67122dffc82456040960abb62c02b8a7103ab22 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Sun, 16 Jun 2019 11:13:23 -0700 Subject: [PATCH 127/366] Pin the Fedora test docker container to 29 The tests are currently failing due to a change in Fedora 30, which we are not currently supporting. Signed-off-by: Mcat12 --- test/fedora.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fedora.Dockerfile b/test/fedora.Dockerfile index c4834388..8c27bbcc 100644 --- a/test/fedora.Dockerfile +++ b/test/fedora.Dockerfile @@ -1,4 +1,4 @@ -FROM fedora:latest +FROM fedora:29 ENV GITDIR /etc/.pihole ENV SCRIPTDIR /opt/pihole From b6700924b23a580a5286d8af343e205ae2d807d5 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 17 Jun 2019 16:13:31 -0700 Subject: [PATCH 128/366] Fix uninstall causing 403 errors and not removing packages The 403 lighttpd errors were caused by removing the lighttpd config directory and not removing lighttpd itself. This caused a subsequent Pi-hole reinstall to not have all of the required lighttpd config files. The error while removing packages was caused by combining arguments into a string instead of listing each argument. Signed-off-by: Mcat12 --- automated install/uninstall.sh | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index 732fc246..8e5c5455 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -80,7 +80,7 @@ removeAndPurge() { case ${yn} in [Yy]* ) echo -ne " ${INFO} Removing ${i}..."; - ${SUDO} "${PKG_REMOVE} ${i}" &> /dev/null; + ${SUDO} ${PKG_REMOVE} "${i}" &> /dev/null; echo -e "${OVER} ${INFO} Removed ${i}"; break;; [Nn]* ) echo -e " ${INFO} Skipped ${i}"; break;; @@ -132,12 +132,15 @@ removeNoPurge() { fi if package_check lighttpd > /dev/null; then - ${SUDO} rm -rf /etc/lighttpd/ &> /dev/null - echo -e " ${TICK} Removed lighttpd" - else - if [ -f /etc/lighttpd/lighttpd.conf.orig ]; then + if [[ -f /etc/lighttpd/lighttpd.conf.orig ]]; then ${SUDO} mv /etc/lighttpd/lighttpd.conf.orig /etc/lighttpd/lighttpd.conf fi + + if [[ -f /etc/lighttpd/external.conf ]]; then + ${SUDO} rm /etc/lighttpd/external.conf + fi + + echo -e " ${TICK} Removed lighttpd configs" fi ${SUDO} rm -f /etc/dnsmasq.d/adList.conf &> /dev/null From 951732fc1b07823e8f7c6b4cbfd28f753f1a8e50 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 17 Jun 2019 16:28:55 -0700 Subject: [PATCH 129/366] Use an array for PKG_REMOVE Fixes shellcheck warning. Signed-off-by: Mcat12 --- automated install/uninstall.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index 8e5c5455..1f58b887 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -55,13 +55,13 @@ fi # Compatability if [ -x "$(command -v apt-get)" ]; then # Debian Family - PKG_REMOVE="${PKG_MANAGER} -y remove --purge" + PKG_REMOVE=("${PKG_MANAGER}" -y remove --purge) package_check() { dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -c "ok installed" } elif [ -x "$(command -v rpm)" ]; then # Fedora Family - PKG_REMOVE="${PKG_MANAGER} remove -y" + PKG_REMOVE=("${PKG_MANAGER}" remove -y) package_check() { rpm -qa | grep "^$1-" > /dev/null } @@ -80,7 +80,7 @@ removeAndPurge() { case ${yn} in [Yy]* ) echo -ne " ${INFO} Removing ${i}..."; - ${SUDO} ${PKG_REMOVE} "${i}" &> /dev/null; + ${SUDO} "${PKG_REMOVE}" "${i}" &> /dev/null; echo -e "${OVER} ${INFO} Removed ${i}"; break;; [Nn]* ) echo -e " ${INFO} Skipped ${i}"; break;; From 974bba4a45427e628165e6e229984b810dcec2de Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 17 Jun 2019 16:30:26 -0700 Subject: [PATCH 130/366] Fix PKG_REMOVE array usage Signed-off-by: Mcat12 --- automated install/uninstall.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index 1f58b887..2d6837b4 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -80,7 +80,7 @@ removeAndPurge() { case ${yn} in [Yy]* ) echo -ne " ${INFO} Removing ${i}..."; - ${SUDO} "${PKG_REMOVE}" "${i}" &> /dev/null; + ${SUDO} "${PKG_REMOVE[@]}" "${i}" &> /dev/null; echo -e "${OVER} ${INFO} Removed ${i}"; break;; [Nn]* ) echo -e " ${INFO} Skipped ${i}"; break;; From d5e8f1a781bd21caec692b40949fb3af436c5c07 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 26 Jun 2019 18:20:36 +0200 Subject: [PATCH 131/366] Add 10 seconds timeout to the sqlite3 command that writes tables to the gravity database. This prevents gravity from failing when the database file is locked for a short time. Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 1fe2b4e3..1c142ffd 100755 --- a/gravity.sh +++ b/gravity.sh @@ -129,7 +129,7 @@ database_table_from_file() { # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 10000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then From 91a2d052a7975d543e35d2d38aa65293e3b11e46 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Fri, 28 Jun 2019 20:49:56 -0700 Subject: [PATCH 132/366] Fix pihole -up showing FTL update when network is down Fixes #1877 Signed-off-by: Mcat12 --- automated install/basic-install.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 1e87b943..baeaaa9d 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2402,6 +2402,11 @@ FTLcheckUpdate() { local FTLlatesttag FTLlatesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep 'Location' | awk -F '/' '{print $NF}' | tr -d '\r\n') + if [[ $? != 0 ]]; then + # There was an issue while retrieving the latest version + return 3 + fi + if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then return 0 else From 37e7cd5211516e87fe84e5b908b93ae9aee6e2d5 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Fri, 28 Jun 2019 20:57:05 -0700 Subject: [PATCH 133/366] Fix ShellCheck issue by refactoring a bit Signed-off-by: Mcat12 --- automated install/basic-install.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index baeaaa9d..380b424f 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2399,14 +2399,16 @@ FTLcheckUpdate() { if [[ ${ftlLoc} ]]; then local FTLversion FTLversion=$(/usr/bin/pihole-FTL tag) + local FTLreleaseData local FTLlatesttag - FTLlatesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep 'Location' | awk -F '/' '{print $NF}' | tr -d '\r\n') - if [[ $? != 0 ]]; then + if ! FTLreleaseData=$(curl -sI https://github.com/pi-hole/FTL/releases/latest); then # There was an issue while retrieving the latest version return 3 fi + FTLlatesttag=$(grep 'Location' < "${FTLreleaseData}" | awk -F '/' '{print $NF}' | tr -d '\r\n') + if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then return 0 else From acee97916e3d9b22a8afb81230303c9a90a745a3 Mon Sep 17 00:00:00 2001 From: Andreas Kurth Date: Sun, 30 Jun 2019 08:02:51 +0200 Subject: [PATCH 134/366] Fix pihole manpage to match code. The dry-run argument to pihole -up is "--check-only", not "--checkonly". Signed-off-by: Andreas Kurth --- manpages/pihole.8 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manpages/pihole.8 b/manpages/pihole.8 index bd7d0933..b3008563 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -35,7 +35,7 @@ pihole -g\fR .br \fBpihole\fR \fB-l\fR (\fBon|off|off noflush\fR) .br -\fBpihole -up \fR[--checkonly] +\fBpihole -up \fR[--check-only] .br \fBpihole -v\fR [-p|-a|-f] [-c|-l|-hash] .br From 87c115fc86bd0365e82d5b46f911c0c52b228463 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 18:58:47 +0200 Subject: [PATCH 135/366] Change implementation to use linking tables (domain_groups, adlists_groups). This allows adding domains to multiple lists conveniently. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 121 +++++++++++++++++------------- gravity.sh | 2 +- 2 files changed, 70 insertions(+), 53 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index f4f9cf4c..77de31aa 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -2,15 +2,10 @@ PRAGMA FOREIGN_KEYS=ON; CREATE TABLE domain_groups ( - "id" INTEGER PRIMARY KEY AUTOINCREMENT, - "enabled" BOOLEAN NOT NULL DEFAULT 1, - "description" TEXT + id INTEGER PRIMARY KEY AUTOINCREMENT, + enabled BOOLEAN NOT NULL DEFAULT 1, + description TEXT ); -INSERT INTO domain_groups ("id","description") VALUES (0,'Standard group'); -CREATE TRIGGER domain_groups_standard_group AFTER DELETE ON domain_groups WHEN OLD.id = 0 - BEGIN - INSERT INTO domain_groups ("id","description") VALUES (0,'Standard group'); - END; CREATE TABLE whitelist ( @@ -19,10 +14,16 @@ CREATE TABLE whitelist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER NOT NULL DEFAULT 0, - comment TEXT, - FOREIGN KEY (group_id) REFERENCES domain_groups(id) + comment TEXT +); + +CREATE TABLE whitelist_by_group +( + whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), + group_id INTEGER NOT NULL REFERENCES domain_groups (id), + PRIMARY KEY (whitelist_id, group_id) ); + CREATE TABLE blacklist ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -30,10 +31,16 @@ CREATE TABLE blacklist enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER NOT NULL DEFAULT 0, - comment TEXT, - FOREIGN KEY (group_id) REFERENCES domain_groups(id) + comment TEXT +); + +CREATE TABLE blacklist_by_group +( + blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), + group_id INTEGER NOT NULL REFERENCES domain_groups (id), + PRIMARY KEY (blacklist_id, group_id) ); + CREATE TABLE regex ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -41,22 +48,22 @@ CREATE TABLE regex enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER NOT NULL DEFAULT 0, - comment TEXT, - FOREIGN KEY (group_id) REFERENCES domain_groups(id) + comment TEXT ); -CREATE TABLE adlist_groups +CREATE TABLE regex_by_group ( - "id" INTEGER PRIMARY KEY AUTOINCREMENT, - "enabled" BOOLEAN NOT NULL DEFAULT 1, - "description" TEXT + regex_id INTEGER NOT NULL REFERENCES regex (id), + group_id INTEGER NOT NULL REFERENCES domain_groups (id), + PRIMARY KEY (regex_id, group_id) +); + +CREATE TABLE adlists_groups +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + enabled BOOLEAN NOT NULL DEFAULT 1, + description TEXT ); -INSERT INTO adlist_groups ("id","description") VALUES (0,'Standard group'); -CREATE TRIGGER adlist_groups_standard_group AFTER DELETE ON adlist_groups WHEN OLD.id = 0 - BEGIN - INSERT INTO adlist_groups ("id","description") VALUES (0,'Standard group'); - END; CREATE TABLE adlists ( @@ -65,10 +72,16 @@ CREATE TABLE adlists enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - group_id INTEGER NOT NULL DEFAULT 0, - comment TEXT, - FOREIGN KEY (group_id) REFERENCES adlist_groups(id) + comment TEXT ); + +CREATE TABLE adlists_by_group +( + adlists_id INTEGER NOT NULL REFERENCES adlists (id), + group_id INTEGER NOT NULL REFERENCES adlists_groups (id), + PRIMARY KEY (adlists_id, group_id) +); + CREATE TABLE gravity ( domain TEXT PRIMARY KEY @@ -81,48 +94,52 @@ CREATE TABLE info INSERT INTO info VALUES("version","1"); -CREATE VIEW vw_gravity AS SELECT a.domain - FROM gravity a - WHERE a.domain NOT IN (SELECT domain from vw_whitelist); +CREATE VIEW vw_gravity AS SELECT domain + FROM gravity + WHERE domain NOT IN (SELECT domain from vw_whitelist); -CREATE VIEW vw_whitelist AS SELECT a.domain - FROM whitelist a - INNER JOIN domain_groups b ON b.id = a.group_id - WHERE a.enabled = 1 AND b.enabled = 1 - ORDER BY a.id; +CREATE VIEW vw_whitelist AS SELECT domain + FROM whitelist + LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id + LEFT JOIN domain_groups ON domain_groups.id = whitelist_by_group.group_id + WHERE whitelist.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + ORDER BY whitelist.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist BEGIN UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_blacklist AS SELECT a.domain - FROM blacklist a - INNER JOIN domain_groups b ON b.id = a.group_id - WHERE a.enabled = 1 AND a.domain NOT IN vw_whitelist AND b.enabled = 1 - ORDER BY a.id; +CREATE VIEW vw_blacklist AS SELECT domain + FROM blacklist + LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id + LEFT JOIN domain_groups ON domain_groups.id = blacklist_by_group.group_id + WHERE blacklist.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + ORDER BY blacklist.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist BEGIN UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_regex AS SELECT a.domain - FROM regex a - INNER JOIN domain_groups b ON b.id = a.group_id - WHERE a.enabled = 1 AND b.enabled = 1 - ORDER BY a.id; +CREATE VIEW vw_regex AS SELECT domain + FROM regex + LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id + LEFT JOIN domain_groups ON domain_groups.id = regex_by_group.group_id + WHERE regex.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + ORDER BY regex.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex BEGIN UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_adlists AS SELECT a.address - FROM adlists a - INNER JOIN adlist_groups b ON b.id = a.group_id - WHERE a.enabled = 1 AND b.enabled = 1 - ORDER BY a.id; +CREATE VIEW vw_adlists AS SELECT address + FROM adlists + LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id + LEFT JOIN adlists_groups ON adlists_groups.id = adlists_by_group.group_id + WHERE adlists.enabled = 1 AND adlists_groups.enabled IS NULL OR adlists_groups.enabled == 1 + ORDER BY adlists.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists BEGIN diff --git a/gravity.sh b/gravity.sh index ce7aa6ba..1fe2b4e3 100755 --- a/gravity.sh +++ b/gravity.sh @@ -120,7 +120,7 @@ database_table_from_file() { do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},0,\"Migrated from ${source}\"" >> "${tmpFile}" + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" rowid+=1 fi done From 94d83dbb2d410c0303ec09ab7e5365cb4772cb10 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 19:33:02 +0200 Subject: [PATCH 136/366] Unify the two group tables into a single "groups" table. This allows managing adlists and domains together. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 33 ++++++++++++------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 77de31aa..dfb486a8 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,6 @@ PRAGMA FOREIGN_KEYS=ON; -CREATE TABLE domain_groups +CREATE TABLE groups ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, @@ -20,7 +20,7 @@ CREATE TABLE whitelist CREATE TABLE whitelist_by_group ( whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), - group_id INTEGER NOT NULL REFERENCES domain_groups (id), + group_id INTEGER NOT NULL REFERENCES groups (id), PRIMARY KEY (whitelist_id, group_id) ); @@ -37,7 +37,7 @@ CREATE TABLE blacklist CREATE TABLE blacklist_by_group ( blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), - group_id INTEGER NOT NULL REFERENCES domain_groups (id), + group_id INTEGER NOT NULL REFERENCES groups (id), PRIMARY KEY (blacklist_id, group_id) ); @@ -54,17 +54,10 @@ CREATE TABLE regex CREATE TABLE regex_by_group ( regex_id INTEGER NOT NULL REFERENCES regex (id), - group_id INTEGER NOT NULL REFERENCES domain_groups (id), + group_id INTEGER NOT NULL REFERENCES groups (id), PRIMARY KEY (regex_id, group_id) ); -CREATE TABLE adlists_groups -( - id INTEGER PRIMARY KEY AUTOINCREMENT, - enabled BOOLEAN NOT NULL DEFAULT 1, - description TEXT -); - CREATE TABLE adlists ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -78,7 +71,7 @@ CREATE TABLE adlists CREATE TABLE adlists_by_group ( adlists_id INTEGER NOT NULL REFERENCES adlists (id), - group_id INTEGER NOT NULL REFERENCES adlists_groups (id), + group_id INTEGER NOT NULL REFERENCES groups (id), PRIMARY KEY (adlists_id, group_id) ); @@ -101,8 +94,8 @@ CREATE VIEW vw_gravity AS SELECT domain CREATE VIEW vw_whitelist AS SELECT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id - LEFT JOIN domain_groups ON domain_groups.id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + LEFT JOIN groups ON groups.id = whitelist_by_group.group_id + WHERE whitelist.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 ORDER BY whitelist.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -113,8 +106,8 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist CREATE VIEW vw_blacklist AS SELECT domain FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id - LEFT JOIN domain_groups ON domain_groups.id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + LEFT JOIN groups ON groups.id = blacklist_by_group.group_id + WHERE blacklist.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 ORDER BY blacklist.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -125,8 +118,8 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist CREATE VIEW vw_regex AS SELECT domain FROM regex LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id - LEFT JOIN domain_groups ON domain_groups.id = regex_by_group.group_id - WHERE regex.enabled = 1 AND domain_groups.enabled IS NULL OR domain_groups.enabled == 1 + LEFT JOIN groups ON groups.id = regex_by_group.group_id + WHERE regex.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 ORDER BY regex.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex @@ -137,8 +130,8 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex CREATE VIEW vw_adlists AS SELECT address FROM adlists LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id - LEFT JOIN adlists_groups ON adlists_groups.id = adlists_by_group.group_id - WHERE adlists.enabled = 1 AND adlists_groups.enabled IS NULL OR adlists_groups.enabled == 1 + LEFT JOIN groups ON groups.id = adlists_by_group.group_id + WHERE adlists.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 ORDER BY adlists.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists From 6fe637b9ee18e657c2806ba2ba3d1c86c5e8b2bc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 19:43:18 +0200 Subject: [PATCH 137/366] Rename groups -> group Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index dfb486a8..3c2562b4 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,6 @@ PRAGMA FOREIGN_KEYS=ON; -CREATE TABLE groups +CREATE TABLE group ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, @@ -20,7 +20,7 @@ CREATE TABLE whitelist CREATE TABLE whitelist_by_group ( whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), - group_id INTEGER NOT NULL REFERENCES groups (id), + group_id INTEGER NOT NULL REFERENCES group (id), PRIMARY KEY (whitelist_id, group_id) ); @@ -37,7 +37,7 @@ CREATE TABLE blacklist CREATE TABLE blacklist_by_group ( blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), - group_id INTEGER NOT NULL REFERENCES groups (id), + group_id INTEGER NOT NULL REFERENCES group (id), PRIMARY KEY (blacklist_id, group_id) ); @@ -54,7 +54,7 @@ CREATE TABLE regex CREATE TABLE regex_by_group ( regex_id INTEGER NOT NULL REFERENCES regex (id), - group_id INTEGER NOT NULL REFERENCES groups (id), + group_id INTEGER NOT NULL REFERENCES group (id), PRIMARY KEY (regex_id, group_id) ); @@ -71,7 +71,7 @@ CREATE TABLE adlists CREATE TABLE adlists_by_group ( adlists_id INTEGER NOT NULL REFERENCES adlists (id), - group_id INTEGER NOT NULL REFERENCES groups (id), + group_id INTEGER NOT NULL REFERENCES group (id), PRIMARY KEY (adlists_id, group_id) ); @@ -94,8 +94,8 @@ CREATE VIEW vw_gravity AS SELECT domain CREATE VIEW vw_whitelist AS SELECT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id - LEFT JOIN groups ON groups.id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 + LEFT JOIN group ON group.id = whitelist_by_group.group_id + WHERE whitelist.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 ORDER BY whitelist.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -106,8 +106,8 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist CREATE VIEW vw_blacklist AS SELECT domain FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id - LEFT JOIN groups ON groups.id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 + LEFT JOIN group ON group.id = blacklist_by_group.group_id + WHERE blacklist.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 ORDER BY blacklist.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -118,8 +118,8 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist CREATE VIEW vw_regex AS SELECT domain FROM regex LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id - LEFT JOIN groups ON groups.id = regex_by_group.group_id - WHERE regex.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 + LEFT JOIN group ON group.id = regex_by_group.group_id + WHERE regex.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 ORDER BY regex.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex @@ -130,8 +130,8 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex CREATE VIEW vw_adlists AS SELECT address FROM adlists LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id - LEFT JOIN groups ON groups.id = adlists_by_group.group_id - WHERE adlists.enabled = 1 AND groups.enabled IS NULL OR groups.enabled == 1 + LEFT JOIN group ON group.id = adlists_by_group.group_id + WHERE adlists.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 ORDER BY adlists.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists From 5b01facd73f52ccb248a65109b45a62e327c597d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 20:03:22 +0200 Subject: [PATCH 138/366] Fix logix on the views: AND ( OR ) Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 3c2562b4..66df7111 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,6 +1,6 @@ PRAGMA FOREIGN_KEYS=ON; -CREATE TABLE group +CREATE TABLE "group" ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, @@ -20,7 +20,7 @@ CREATE TABLE whitelist CREATE TABLE whitelist_by_group ( whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), - group_id INTEGER NOT NULL REFERENCES group (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), PRIMARY KEY (whitelist_id, group_id) ); @@ -37,7 +37,7 @@ CREATE TABLE blacklist CREATE TABLE blacklist_by_group ( blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), - group_id INTEGER NOT NULL REFERENCES group (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), PRIMARY KEY (blacklist_id, group_id) ); @@ -54,7 +54,7 @@ CREATE TABLE regex CREATE TABLE regex_by_group ( regex_id INTEGER NOT NULL REFERENCES regex (id), - group_id INTEGER NOT NULL REFERENCES group (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), PRIMARY KEY (regex_id, group_id) ); @@ -71,7 +71,7 @@ CREATE TABLE adlists CREATE TABLE adlists_by_group ( adlists_id INTEGER NOT NULL REFERENCES adlists (id), - group_id INTEGER NOT NULL REFERENCES group (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), PRIMARY KEY (adlists_id, group_id) ); @@ -94,8 +94,8 @@ CREATE VIEW vw_gravity AS SELECT domain CREATE VIEW vw_whitelist AS SELECT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id - LEFT JOIN group ON group.id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 + LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id + WHERE whitelist.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) ORDER BY whitelist.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -106,8 +106,8 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist CREATE VIEW vw_blacklist AS SELECT domain FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id - LEFT JOIN group ON group.id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 + LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id + WHERE blacklist.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) ORDER BY blacklist.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -118,8 +118,8 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist CREATE VIEW vw_regex AS SELECT domain FROM regex LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id - LEFT JOIN group ON group.id = regex_by_group.group_id - WHERE regex.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 + LEFT JOIN "group" ON "group".id = regex_by_group.group_id + WHERE regex.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) ORDER BY regex.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex @@ -130,8 +130,8 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex CREATE VIEW vw_adlists AS SELECT address FROM adlists LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id - LEFT JOIN group ON group.id = adlists_by_group.group_id - WHERE adlists.enabled = 1 AND group.enabled IS NULL OR group.enabled = 1 + LEFT JOIN "group" ON "group".id = adlists_by_group.group_id + WHERE adlists.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) ORDER BY adlists.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists From ad97e95f2fbf101011ac053793dc72da15568c99 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 23:06:15 +0200 Subject: [PATCH 139/366] Add required name column in group table. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 66df7111..9be4bbf9 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -4,6 +4,7 @@ CREATE TABLE "group" ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, + name TEXT NOT NULL, description TEXT ); From 3a14e8b013a0ebc5e8133d78153920d6326b7fd9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 23:07:03 +0200 Subject: [PATCH 140/366] Check whitelist_by_group.group_id instead of group.enabled against NULL. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 9be4bbf9..a5fd5bef 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -96,7 +96,7 @@ CREATE VIEW vw_whitelist AS SELECT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) + WHERE whitelist.enabled = 1 AND (whitelist_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY whitelist.id; CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist @@ -108,7 +108,7 @@ CREATE VIEW vw_blacklist AS SELECT domain FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) + WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY blacklist.id; CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist @@ -120,7 +120,7 @@ CREATE VIEW vw_regex AS SELECT domain FROM regex LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id LEFT JOIN "group" ON "group".id = regex_by_group.group_id - WHERE regex.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) + WHERE regex.enabled = 1 AND (regex_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY regex.id; CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex @@ -132,7 +132,7 @@ CREATE VIEW vw_adlists AS SELECT address FROM adlists LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id LEFT JOIN "group" ON "group".id = adlists_by_group.group_id - WHERE adlists.enabled = 1 AND ("group".enabled IS NULL OR "group".enabled = 1) + WHERE adlists.enabled = 1 AND (adlists_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY adlists.id; CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists From f91421418ad3817023b53585a351a2778b644034 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 30 Jun 2019 23:21:10 +0200 Subject: [PATCH 141/366] Rename adlists -> adlist table. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 24 ++++++++++++------------ gravity.sh | 10 +++++----- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index a5fd5bef..3cbc7b8f 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -59,7 +59,7 @@ CREATE TABLE regex_by_group PRIMARY KEY (regex_id, group_id) ); -CREATE TABLE adlists +CREATE TABLE adlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, address TEXT UNIQUE NOT NULL, @@ -69,11 +69,11 @@ CREATE TABLE adlists comment TEXT ); -CREATE TABLE adlists_by_group +CREATE TABLE adlist_by_group ( - adlists_id INTEGER NOT NULL REFERENCES adlists (id), + adlist_id INTEGER NOT NULL REFERENCES adlist (id), group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (adlists_id, group_id) + PRIMARY KEY (adlist_id, group_id) ); CREATE TABLE gravity @@ -128,15 +128,15 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_adlists AS SELECT address - FROM adlists - LEFT JOIN adlists_by_group ON adlists_by_group.adlists_id = adlists.id - LEFT JOIN "group" ON "group".id = adlists_by_group.group_id - WHERE adlists.enabled = 1 AND (adlists_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY adlists.id; +CREATE VIEW vw_adlist AS SELECT address + FROM adlist + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY adlist.id; -CREATE TRIGGER tr_adlists_update AFTER UPDATE ON adlists +CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist BEGIN - UPDATE adlists SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; + UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; END; diff --git a/gravity.sh b/gravity.sh index 1fe2b4e3..d2436002 100755 --- a/gravity.sh +++ b/gravity.sh @@ -111,7 +111,7 @@ database_table_from_file() { # No need to modify the input data for the gravity table inputfile="${source}" else - # Apply format for white-, blacklist, regex, and adlists tables + # Apply format for white-, blacklist, regex, and adlist tables local rowid declare -i rowid rowid=1 @@ -159,9 +159,9 @@ migrate_to_database() { # Migrate list files to new database if [[ -e "${adListFile}" ]]; then - # Store adlists domains in database + # Store adlist domains in database echo -e " ${INFO} Migrating content of ${adListFile} into new database" - database_table_from_file "adlists" "${adListFile}" + database_table_from_file "adlist" "${adListFile}" fi if [[ -e "${blacklistFile}" ]]; then # Store blacklisted domains in database @@ -236,13 +236,13 @@ gravity_CheckDNSResolutionAvailable() { gravity_CheckDNSResolutionAvailable } -# Retrieve blocklist URLs and parse domains from adlists.list +# Retrieve blocklist URLs and parse domains from adlist.list gravity_GetBlocklistUrls() { echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..." # Retrieve source URLs from gravity database # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true) - mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)" + mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( From 7b2ac25a52b71fae5aac79742ad41c263cb402f0 Mon Sep 17 00:00:00 2001 From: Jeroen Baert <3607063+Forceflow@users.noreply.github.com> Date: Mon, 1 Jul 2019 03:42:02 +0200 Subject: [PATCH 142/366] Fix for 404 error when browsing to pi.hole (without /admin) Signed-off-by: Jeroen Baert <3607063+Forceflow@users.noreply.github.com> --- advanced/lighttpd.conf.debian | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/lighttpd.conf.debian b/advanced/lighttpd.conf.debian index 47bdbee0..2215bbdb 100644 --- a/advanced/lighttpd.conf.debian +++ b/advanced/lighttpd.conf.debian @@ -27,7 +27,7 @@ server.modules = ( ) server.document-root = "/var/www/html" -server.error-handler-404 = "pihole/index.php" +server.error-handler-404 = "/pihole/index.php" server.upload-dirs = ( "/var/cache/lighttpd/uploads" ) server.errorlog = "/var/log/lighttpd/error.log" server.pid-file = "/var/run/lighttpd.pid" From 23b3a9a650d49afd8a1fd945f88415c659cce0d9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 1 Jul 2019 09:44:20 +0200 Subject: [PATCH 143/366] Add DISTINCT to the view\'s SELECT queries to avoid domain duplication in case they are enabled in multiple groups at the same time. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 3cbc7b8f..91a38ef7 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -92,7 +92,7 @@ CREATE VIEW vw_gravity AS SELECT domain FROM gravity WHERE domain NOT IN (SELECT domain from vw_whitelist); -CREATE VIEW vw_whitelist AS SELECT domain +CREATE VIEW vw_whitelist AS SELECT DISTINCT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id @@ -104,7 +104,7 @@ CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_blacklist AS SELECT domain +CREATE VIEW vw_blacklist AS SELECT DISTINCT domain FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id @@ -116,7 +116,7 @@ CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_regex AS SELECT domain +CREATE VIEW vw_regex AS SELECT DISTINCT domain FROM regex LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id LEFT JOIN "group" ON "group".id = regex_by_group.group_id @@ -128,7 +128,7 @@ CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; -CREATE VIEW vw_adlist AS SELECT address +CREATE VIEW vw_adlist AS SELECT DISTINCT address FROM adlist LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id LEFT JOIN "group" ON "group".id = adlist_by_group.group_id From 9fedafed1595a2cb5fd9250b1e00e8088912e0da Mon Sep 17 00:00:00 2001 From: Jeroen Baert <3607063+Forceflow@users.noreply.github.com> Date: Wed, 3 Jul 2019 04:28:23 +0200 Subject: [PATCH 144/366] Fix 404 error when browsing to pi.hole (without /admin) (for fedora) Signed-off-by: Jeroen Baert <3607063+Forceflow@users.noreply.github.com> --- advanced/lighttpd.conf.fedora | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/lighttpd.conf.fedora b/advanced/lighttpd.conf.fedora index 27bc33cc..4232c90f 100644 --- a/advanced/lighttpd.conf.fedora +++ b/advanced/lighttpd.conf.fedora @@ -28,7 +28,7 @@ server.modules = ( ) server.document-root = "/var/www/html" -server.error-handler-404 = "pihole/index.php" +server.error-handler-404 = "/pihole/index.php" server.upload-dirs = ( "/var/cache/lighttpd/uploads" ) server.errorlog = "/var/log/lighttpd/error.log" server.pid-file = "/var/run/lighttpd.pid" From 1fe3507bc5e5e76109eecd9bd025dd024abf858f Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 3 Jul 2019 07:56:10 +0200 Subject: [PATCH 145/366] Adlists table was renamed to Adlist. Adapt further places in the code to match the new name. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- advanced/Scripts/query.sh | 2 +- advanced/Scripts/webpage.sh | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index b31bbdc5..2bfeef2d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1077,7 +1077,7 @@ show_db_entries() { } show_adlists() { - show_db_entries "Adlists" "SELECT * FROM adlists" "4 100 7 10 13 50" + show_db_entries "Adlists" "SELECT * FROM adlist" "4 100 7 10 13 50" } show_whitelist() { diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 42ea4395..af5ae0a8 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -201,7 +201,7 @@ fi # Get adlist file content as array if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then # Retrieve source URLs from gravity database - mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)" + mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" fi # Print "Exact matches for" title diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 583579b6..f744d1b9 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -400,13 +400,13 @@ CustomizeAdLists() { address="${args[3]}" if [[ "${args[2]}" == "enable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlists SET enabled = 1 WHERE address = '${address}'" + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" elif [[ "${args[2]}" == "disable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlists SET enabled = 0 WHERE address = '${address}'" + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" elif [[ "${args[2]}" == "add" ]]; then - sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlists (address) VALUES ('${address}')" + sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address) VALUES ('${address}')" elif [[ "${args[2]}" == "del" ]]; then - sqlite3 "${gravityDBfile}" "DELETE FROM adlists WHERE address = '${address}'" + sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" else echo "Not permitted" return 1 From 04abcb71f62acb12c60f79de61db510e29fcb42c Mon Sep 17 00:00:00 2001 From: MMotti Date: Wed, 3 Jul 2019 13:04:57 +0100 Subject: [PATCH 146/366] Fix potential invalid result for pihole -q Signed-off-by: MMotti --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 42ea4395..b7edaccf 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -41,7 +41,7 @@ scanList(){ # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) awk 'NR==FNR{regexps[$0]}{for (r in regexps)if($0 ~ r)print r}' \ + "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac From f1733f9c5d8a798b38e372abeba30c7ddc540a87 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Thu, 4 Jul 2019 13:11:46 -0700 Subject: [PATCH 147/366] Fetch adlists for the block page from gravity.db Signed-off-by: Mcat12 --- advanced/index.php | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index 78135e1a..b44a725d 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -111,11 +111,30 @@ if (is_file("/etc/pihole/adlists.list")) { die("[ERROR] File not found: /etc/pihole/adlists.list"); } -// Get all URLs starting with "http" or "www" from adlists and re-index array numerically -$adlistsUrls = array_values(preg_grep("/(^http)|(^www)/i", file($adLists, FILE_IGNORE_NEW_LINES))); +// Get possible non-standard location of FTL's database +$FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf"); +if(isset($FTLsettings["GRAVITYDB"])) { + $gravityDBFile = $FTLsettings["GRAVITYDB"]; +} else { + $gravityDBFile = "/etc/pihole/gravity.db"; +} + +// Connect to gravity.db +try { + $db = new SQLite3($gravityDBFile, SQLITE3_OPEN_READONLY); +} catch (Exception $exception) { + die("[ERROR]: Failed to connect to gravity.db"); +} + +// Get all adlist addresses +$adlistResults = $db->query("SELECT address FROM vw_adlist"); +$adlistsUrls = array(); +while($row = $adlistResults->fetchArray()) { + array_push($adlistsUrls, $row[0]); +} if (empty($adlistsUrls)) - die("[ERROR]: There are no adlist URL's found within $adLists"); + die("[ERROR]: There are no adlists configured"); // Get total number of blocklists (Including Whitelist, Blacklist & Wildcard lists) $adlistsCount = count($adlistsUrls) + 3; From 8d9ff550d469002b4aef323203f6ea7fd356f033 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Thu, 4 Jul 2019 13:44:14 -0700 Subject: [PATCH 148/366] Fix blockpage error if whitelisted, blacklisted, or regex filtered Signed-off-by: Mcat12 --- advanced/Scripts/query.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 4fc82744..6d061ba5 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -115,7 +115,9 @@ scanDatabaseTable() { wbMatch=true # Print table name - echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + if [[ -z "${blockpage}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + fi # Loop over results and print them mapfile -t results <<< "${result}" @@ -159,7 +161,7 @@ if [[ "${#regexList[@]}" -ne 0 ]]; then # shellcheck disable=SC2001 echo "${str_result}" | sed 's/^/ /' else - echo "π Regex list" + echo "π .wildcard" exit 0 fi fi From 2b5033e732e93be8e03a8049f1e83caa28a6bd25 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Thu, 4 Jul 2019 13:49:39 -0700 Subject: [PATCH 149/366] Add missing spaces found by linter Signed-off-by: Mcat12 --- advanced/index.php | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index b44a725d..ff13ec60 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -113,7 +113,7 @@ if (is_file("/etc/pihole/adlists.list")) { // Get possible non-standard location of FTL's database $FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf"); -if(isset($FTLsettings["GRAVITYDB"])) { +if (isset($FTLsettings["GRAVITYDB"])) { $gravityDBFile = $FTLsettings["GRAVITYDB"]; } else { $gravityDBFile = "/etc/pihole/gravity.db"; @@ -129,7 +129,7 @@ try { // Get all adlist addresses $adlistResults = $db->query("SELECT address FROM vw_adlist"); $adlistsUrls = array(); -while($row = $adlistResults->fetchArray()) { +while ($row = $adlistResults->fetchArray()) { array_push($adlistsUrls, $row[0]); } From 1dbe6c83c38e64d2fb1026d65524a377ac990102 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 13:54:18 +0200 Subject: [PATCH 150/366] Add database upgrading mechanism for adding the audit table. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 21 +++++++++++++++++++ .../database_migration/gravity/1_to_2.sql | 9 ++++++++ advanced/Templates/gravity.db.sql | 11 +++++++++- 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 advanced/Scripts/database_migration/gravity-db.sh create mode 100644 advanced/Scripts/database_migration/gravity/1_to_2.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh new file mode 100644 index 00000000..2c5669f0 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# shellcheck disable=SC1090 + +# Pi-hole: A black hole for Internet advertisements +# (c) 2019 Pi-hole, LLC (https://pi-hole.net) +# Network-wide ad blocking via your own hardware. +# +# Updates gravity.db database +# +# This file is copyright under the latest version of the EUPL. +# Please see LICENSE file for your rights under this license. + +upgrade_gravityDB(){ + version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") + echo $version + case "$version" in + 1) + sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" + ;; + esac +} diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql new file mode 100644 index 00000000..63b7f56f --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -0,0 +1,9 @@ +CREATE TABLE audit +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); + +UPDATE info SET value = 2 WHERE property = 'version'; diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 91a38ef7..eef9ac80 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -80,13 +80,22 @@ CREATE TABLE gravity ( domain TEXT PRIMARY KEY ); + +CREATE TABLE audit +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); + CREATE TABLE info ( property TEXT PRIMARY KEY, value TEXT NOT NULL ); -INSERT INTO info VALUES("version","1"); +INSERT INTO info VALUES("version","2"); CREATE VIEW vw_gravity AS SELECT domain FROM gravity From 4f4a12bb40ab6f09a8651ad5ad908edc8c9b59b1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 14:03:57 +0200 Subject: [PATCH 151/366] Upgrade database if necessary and store audit domains therein. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 4 +- gravity.sh | 54 +++++++++++++------ 2 files changed, 40 insertions(+), 18 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 2c5669f0..492546be 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -11,8 +11,8 @@ # Please see LICENSE file for your rights under this license. upgrade_gravityDB(){ - version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") - echo $version + local version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") + case "$version" in 1) sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" diff --git a/gravity.sh b/gravity.sh index 1a325ba7..724238fa 100755 --- a/gravity.sh +++ b/gravity.sh @@ -17,6 +17,8 @@ coltable="/opt/pihole/COL_TABLE" source "${coltable}" regexconverter="/opt/pihole/wildcard_regex_converter.sh" source "${regexconverter}" +readonly databaseMigrationScript="/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh" +source "${databaseMigrationScript}" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" @@ -28,6 +30,7 @@ whitelistFile="${piholeDir}/whitelist.txt" blacklistFile="${piholeDir}/blacklist.txt" regexFile="${piholeDir}/regex.list" adListFile="${piholeDir}/adlists.list" +auditFile="${piholeDir}/audit.list" localList="${piholeDir}/local.list" VPNList="/etc/openvpn/ipp.txt" @@ -116,14 +119,27 @@ database_table_from_file() { declare -i rowid rowid=1 # Read file line by line - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ ! -z "${domain}" ]]; then - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" - rowid+=1 - fi - done + if [[ "${table}" == "audit" ]]; then + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ ! -z "${domain}" ]]; then + # Audit table format + echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" + rowid+=1 + fi + done + else + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ ! -z "${domain}" ]]; then + # White-, black-, and regexlist format + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + rowid+=1 + fi + done + fi inputfile="${tmpFile}" fi # Store domains in database table specified by ${table} @@ -150,34 +166,40 @@ database_table_from_file() { # Migrate pre-v5.0 list files to database-based Pi-hole versions migrate_to_database() { # Create database file only if not present - if [ -e "${gravityDBfile}" ]; then - return 0 + if [ ! -e "${gravityDBfile}" ]; then + echo -e " ${INFO} Creating new gravity database" + generate_gravity_database fi - echo -e " ${INFO} Creating new gravity database" - generate_gravity_database + # Check if gravity database needs to be updated + upgrade_gravityDB "${gravityDBfile}" # Migrate list files to new database - if [[ -e "${adListFile}" ]]; then + if [ -e "${adListFile}" ]; then # Store adlist domains in database echo -e " ${INFO} Migrating content of ${adListFile} into new database" database_table_from_file "adlist" "${adListFile}" fi - if [[ -e "${blacklistFile}" ]]; then + if [ -e "${blacklistFile}" ]; then # Store blacklisted domains in database echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" database_table_from_file "blacklist" "${blacklistFile}" fi - if [[ -e "${whitelistFile}" ]]; then + if [ -e "${whitelistFile}" ]; then # Store whitelisted domains in database echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" database_table_from_file "whitelist" "${whitelistFile}" fi - if [[ -e "${regexFile}" ]]; then + if [ -e "${regexFile}" ]; then # Store regex domains in database echo -e " ${INFO} Migrating content of ${regexFile} into new database" database_table_from_file "regex" "${regexFile}" fi + if [ -e "${auditFile}" ]; then + # Store audit domains in database + echo -e " ${INFO} Migrating content of ${auditFile} into new database" + database_table_from_file "audit" "${auditFile}" + fi } # Determine if DNS resolution is available before proceeding From 0c8f5f12215eaceb370e5110d8383fe34a428b17 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 14:06:05 +0200 Subject: [PATCH 152/366] Remove comment field from audit table Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/1_to_2.sql | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index 63b7f56f..4e560df5 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -2,8 +2,7 @@ CREATE TABLE audit ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)) ); UPDATE info SET value = 2 WHERE property = 'version'; From 5293beeb77665fc60503f7566352e183c31e4d04 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 14:10:33 +0200 Subject: [PATCH 153/366] Update audit script to store domains in new database table. Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index f744d1b9..9a6ddfc9 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -546,17 +546,17 @@ addAudit() { shift # skip "-a" shift # skip "audit" - for var in "$@" + for domain in "$@" do - echo "${var}" >> /etc/pihole/auditlog.list + # Insert only the domain here. The date_added field will be + # filled with its default value (date_added = current timestamp) + sqlite3 "${gravityDBfile}" "INSERT INTO \"audit\" (domain) VALUES ('${domain}');" done - chmod 644 /etc/pihole/auditlog.list } clearAudit() { - echo -n "" > /etc/pihole/auditlog.list - chmod 644 /etc/pihole/auditlog.list + sqlite3 "${gravityDBfile}" "DELETE FROM \"audit\";" } SetPrivacyLevel() { From 82476138c151eba69c223943eb7ef1aa60cfed78 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 16:09:13 +0200 Subject: [PATCH 154/366] Instead of calling sqlite3 multiple times within a loop, we use the ability to add multiple rows within one INSERT clause. This is supported since sqlite3 3.7.11 (2012-03-20) and should be available on all systems. Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 9a6ddfc9..3996f7d0 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -546,12 +546,15 @@ addAudit() { shift # skip "-a" shift # skip "audit" + local domains="('${1}')" + shift # skip first domain, as it has already been added for domain in "$@" do # Insert only the domain here. The date_added field will be # filled with its default value (date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO \"audit\" (domain) VALUES ('${domain}');" + domains="${domains},('${domain}')" done + sqlite3 "${gravityDBfile}" "INSERT INTO \"audit\" (domain) VALUES ${domains};" } clearAudit() From 2fb4256f84b1de7b264c5bf62849c5b7c58399bd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 5 Jul 2019 16:28:36 +0200 Subject: [PATCH 155/366] Rename table to "auditlist" Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/1_to_2.sql | 2 +- advanced/Scripts/webpage.sh | 4 ++-- advanced/Templates/gravity.db.sql | 5 ++--- gravity.sh | 8 ++++---- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index 4e560df5..ba051e25 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -1,4 +1,4 @@ -CREATE TABLE audit +CREATE TABLE auditlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 3996f7d0..91d35d98 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -554,12 +554,12 @@ addAudit() # filled with its default value (date_added = current timestamp) domains="${domains},('${domain}')" done - sqlite3 "${gravityDBfile}" "INSERT INTO \"audit\" (domain) VALUES ${domains};" + sqlite3 "${gravityDBfile}" "INSERT INTO \"auditlist\" (domain) VALUES ${domains};" } clearAudit() { - sqlite3 "${gravityDBfile}" "DELETE FROM \"audit\";" + sqlite3 "${gravityDBfile}" "DELETE FROM \"auditlist\";" } SetPrivacyLevel() { diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index eef9ac80..78999e7c 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -81,12 +81,11 @@ CREATE TABLE gravity domain TEXT PRIMARY KEY ); -CREATE TABLE audit +CREATE TABLE auditlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)) ); CREATE TABLE info diff --git a/gravity.sh b/gravity.sh index 724238fa..09c66c4f 100755 --- a/gravity.sh +++ b/gravity.sh @@ -30,7 +30,7 @@ whitelistFile="${piholeDir}/whitelist.txt" blacklistFile="${piholeDir}/blacklist.txt" regexFile="${piholeDir}/regex.list" adListFile="${piholeDir}/adlists.list" -auditFile="${piholeDir}/audit.list" +auditFile="${piholeDir}/auditlog.list" localList="${piholeDir}/local.list" VPNList="/etc/openvpn/ipp.txt" @@ -119,12 +119,12 @@ database_table_from_file() { declare -i rowid rowid=1 # Read file line by line - if [[ "${table}" == "audit" ]]; then + if [[ "${table}" == "auditlist" ]]; then grep -v '^ *#' < "${source}" | while IFS= read -r domain do # Only add non-empty lines if [[ ! -z "${domain}" ]]; then - # Audit table format + # Auditlist table format echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" rowid+=1 fi @@ -198,7 +198,7 @@ migrate_to_database() { if [ -e "${auditFile}" ]; then # Store audit domains in database echo -e " ${INFO} Migrating content of ${auditFile} into new database" - database_table_from_file "audit" "${auditFile}" + database_table_from_file "auditlist" "${auditFile}" fi } From 0405aaa3dac94da58585ff9ba7b131d5fc7c490f Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 6 Jul 2019 09:32:41 +0200 Subject: [PATCH 156/366] Review comments and fixing stickler complaints. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 10 ++++----- .../database_migration/gravity/1_to_2.sql | 2 +- advanced/Scripts/webpage.sh | 22 +++++++++++++++---- advanced/Templates/gravity.db.sql | 2 +- gravity.sh | 15 ++++++++----- 5 files changed, 34 insertions(+), 17 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 492546be..55411214 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -11,11 +11,11 @@ # Please see LICENSE file for your rights under this license. upgrade_gravityDB(){ - local version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") + local version + version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") - case "$version" in - 1) + if [[ "$version" == "1" ]]; then sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" - ;; - esac + version=2 + fi } diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index ba051e25..073eced5 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -1,4 +1,4 @@ -CREATE TABLE auditlist +CREATE TABLE domain_auditlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 91d35d98..516ea4e4 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -542,24 +542,38 @@ Teleporter() { php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.tar.gz" } +checkDomain() +{ + local domain validDomain + # Convert to lowercase + domain="${1,,}" + validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check + validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label + echo "${validDomain}" +} + addAudit() { shift # skip "-a" shift # skip "audit" - local domains="('${1}')" + local domains validDomain + domains="('$(checkDomain "${1}")')" shift # skip first domain, as it has already been added for domain in "$@" do # Insert only the domain here. The date_added field will be # filled with its default value (date_added = current timestamp) - domains="${domains},('${domain}')" + validDomain="$(checkDomain "${domain}")" + if [[ -n "${validDomain}" ]]; then + domains="${domains},('${domain}')" + fi done - sqlite3 "${gravityDBfile}" "INSERT INTO \"auditlist\" (domain) VALUES ${domains};" + sqlite3 "${gravityDBfile}" "INSERT INTO \"domain_auditlist\" (domain) VALUES ${domains};" } clearAudit() { - sqlite3 "${gravityDBfile}" "DELETE FROM \"auditlist\";" + sqlite3 "${gravityDBfile}" "DELETE FROM \"domain_auditlist\";" } SetPrivacyLevel() { diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 78999e7c..113c035f 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -81,7 +81,7 @@ CREATE TABLE gravity domain TEXT PRIMARY KEY ); -CREATE TABLE auditlist +CREATE TABLE domain_auditlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, diff --git a/gravity.sh b/gravity.sh index 09c66c4f..3a8afc7a 100755 --- a/gravity.sh +++ b/gravity.sh @@ -115,25 +115,28 @@ database_table_from_file() { inputfile="${source}" else # Apply format for white-, blacklist, regex, and adlist tables - local rowid - declare -i rowid - rowid=1 # Read file line by line if [[ "${table}" == "auditlist" ]]; then + local rowid + declare -i rowid + rowid=1 grep -v '^ *#' < "${source}" | while IFS= read -r domain do # Only add non-empty lines - if [[ ! -z "${domain}" ]]; then + if [[ -n "${domain}" ]]; then # Auditlist table format echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" rowid+=1 fi done else + local rowid + declare -i rowid + rowid=1 grep -v '^ *#' < "${source}" | while IFS= read -r domain do # Only add non-empty lines - if [[ ! -z "${domain}" ]]; then + if [[ -n "${domain}" ]]; then # White-, black-, and regexlist format echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" rowid+=1 @@ -198,7 +201,7 @@ migrate_to_database() { if [ -e "${auditFile}" ]; then # Store audit domains in database echo -e " ${INFO} Migrating content of ${auditFile} into new database" - database_table_from_file "auditlist" "${auditFile}" + database_table_from_file "domain_auditlist" "${auditFile}" fi } From efe8216445fbb7345a381f91fed3a3fefd5f5117 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 6 Jul 2019 09:45:07 +0200 Subject: [PATCH 157/366] Fix further stickler complaint. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 55411214..46ff0a72 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -12,7 +12,7 @@ upgrade_gravityDB(){ local version - version=$(sqlite3 "$1" "SELECT "value" FROM "info" WHERE "property" = 'version';") + version="$(sqlite3 "$1" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" From c48b03584f315d20f8f05561709e59bd462ebb28 Mon Sep 17 00:00:00 2001 From: bcambl Date: Sat, 6 Jul 2019 10:57:57 -0600 Subject: [PATCH 158/366] update Fedora Dockerfile to 30 Signed-off-by: bcambl --- test/fedora.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fedora.Dockerfile b/test/fedora.Dockerfile index 8c27bbcc..a06fc007 100644 --- a/test/fedora.Dockerfile +++ b/test/fedora.Dockerfile @@ -1,4 +1,4 @@ -FROM fedora:29 +FROM fedora:30 ENV GITDIR /etc/.pihole ENV SCRIPTDIR /opt/pihole From ec850dc82090683fd041673f489585e31e99569f Mon Sep 17 00:00:00 2001 From: bcambl Date: Sat, 6 Jul 2019 10:58:19 -0600 Subject: [PATCH 159/366] add chkconfig to INSTALLER_DEPS (CentOS/Fedora) chkconfig is a dependency of spawn-fcgi which is a dependency of lighttpd which is installed via PIHOLE_WEB_DEPS in phase 2 adding chkconfig to INSTALLER_DEPS to ensure /etc/init.d is present during the installation prompts (phase 1) Signed-off-by: bcambl --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 1e87b943..a4adac94 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -283,7 +283,7 @@ elif is_command rpm ; then UPDATE_PKG_CACHE=":" PKG_INSTALL=("${PKG_MANAGER}" install -y) PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" - INSTALLER_DEPS=(dialog git iproute newt procps-ng which) + INSTALLER_DEPS=(dialog git iproute newt procps-ng which chkconfig) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo) LIGHTTPD_USER="lighttpd" From 218476fab087a3fbd07170249a4b1371fb5f92f6 Mon Sep 17 00:00:00 2001 From: bcambl Date: Sat, 6 Jul 2019 11:06:08 -0600 Subject: [PATCH 160/366] ensure installation dependencies for FTL tests which rely on /etc/init.d Signed-off-by: bcambl --- test/test_automated_install.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 282c627d..4bfb0f6a 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -486,6 +486,13 @@ def test_FTL_download_aarch64_no_errors(Pihole): ''' confirms only aarch64 package is downloaded for FTL engine ''' + # mock whiptail answers and ensure installer dependencies + mock_command('whiptail', {'*': ('', '0')}, Pihole) + Pihole.run(''' + source /opt/pihole/basic-install.sh + distro_check + install_dependent_packages ${INSTALLER_DEPS[@]} + ''') download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh binary="pihole-FTL-aarch64-linux-gnu" @@ -501,6 +508,13 @@ def test_FTL_download_unknown_fails_no_errors(Pihole): ''' confirms unknown binary is not downloaded for FTL engine ''' + # mock whiptail answers and ensure installer dependencies + mock_command('whiptail', {'*': ('', '0')}, Pihole) + Pihole.run(''' + source /opt/pihole/basic-install.sh + distro_check + install_dependent_packages ${INSTALLER_DEPS[@]} + ''') download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh binary="pihole-FTL-mips" @@ -519,6 +533,13 @@ def test_FTL_download_binary_unset_no_errors(Pihole): ''' confirms unset binary variable does not download FTL engine ''' + # mock whiptail answers and ensure installer dependencies + mock_command('whiptail', {'*': ('', '0')}, Pihole) + Pihole.run(''' + source /opt/pihole/basic-install.sh + distro_check + install_dependent_packages ${INSTALLER_DEPS[@]} + ''') download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user From acc50b709efab4a88e2c1f2e7940b5708a47b6aa Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 7 Jul 2019 10:33:08 +0200 Subject: [PATCH 161/366] Only migrate files once (domain and adlist lists druing initial creation of gravity.db auditlog.list on database upgrade from version 1 to 2. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 9 ++ advanced/Templates/gravity.db.sql | 9 +- gravity.sh | 87 ++++++++----------- 3 files changed, 46 insertions(+), 59 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 46ff0a72..714676f4 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -15,7 +15,16 @@ upgrade_gravityDB(){ version="$(sqlite3 "$1" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then + # This migration script upgrades the gravity.db file by + # adding the domain_auditlist table sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" version=2 + + # Store audit domains in database table + if [ -e "${auditFile}" ]; then + echo -e " ${INFO} Migrating content of ${auditFile} into new database" + # database_table_from_file is defined in gravity.sh + database_table_from_file "domain_auditlist" "${auditFile}" + fi fi } diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 113c035f..09d581f0 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -81,20 +81,13 @@ CREATE TABLE gravity domain TEXT PRIMARY KEY ); -CREATE TABLE domain_auditlist -( - id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)) -); - CREATE TABLE info ( property TEXT PRIMARY KEY, value TEXT NOT NULL ); -INSERT INTO info VALUES("version","2"); +INSERT INTO info VALUES("version","1"); CREATE VIEW vw_gravity AS SELECT domain FROM gravity diff --git a/gravity.sh b/gravity.sh index 3a8afc7a..e2f2a9a8 100755 --- a/gravity.sh +++ b/gravity.sh @@ -17,8 +17,7 @@ coltable="/opt/pihole/COL_TABLE" source "${coltable}" regexconverter="/opt/pihole/wildcard_regex_converter.sh" source "${regexconverter}" -readonly databaseMigrationScript="/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh" -source "${databaseMigrationScript}" +source "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh" basename="pihole" PIHOLE_COMMAND="/usr/local/bin/${basename}" @@ -116,33 +115,23 @@ database_table_from_file() { else # Apply format for white-, blacklist, regex, and adlist tables # Read file line by line - if [[ "${table}" == "auditlist" ]]; then - local rowid - declare -i rowid - rowid=1 - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then + local rowid + declare -i rowid + rowid=1 + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ -n "${domain}" ]]; then + if [[ "${table}" == "auditlist" ]]; then # Auditlist table format echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" - rowid+=1 - fi - done - else - local rowid - declare -i rowid - rowid=1 - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then + else # White-, black-, and regexlist format echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" - rowid+=1 fi - done - fi + rowid+=1 + fi + done inputfile="${tmpFile}" fi # Store domains in database table specified by ${table} @@ -170,39 +159,35 @@ database_table_from_file() { migrate_to_database() { # Create database file only if not present if [ ! -e "${gravityDBfile}" ]; then + # Create new database file - note that this will be created in version 1 echo -e " ${INFO} Creating new gravity database" generate_gravity_database + + # Migrate list files to new database + if [ -e "${adListFile}" ]; then + # Store adlist domains in database + echo -e " ${INFO} Migrating content of ${adListFile} into new database" + database_table_from_file "adlist" "${adListFile}" + fi + if [ -e "${blacklistFile}" ]; then + # Store blacklisted domains in database + echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" + database_table_from_file "blacklist" "${blacklistFile}" + fi + if [ -e "${whitelistFile}" ]; then + # Store whitelisted domains in database + echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" + database_table_from_file "whitelist" "${whitelistFile}" + fi + if [ -e "${regexFile}" ]; then + # Store regex domains in database + echo -e " ${INFO} Migrating content of ${regexFile} into new database" + database_table_from_file "regex" "${regexFile}" + fi fi # Check if gravity database needs to be updated upgrade_gravityDB "${gravityDBfile}" - - # Migrate list files to new database - if [ -e "${adListFile}" ]; then - # Store adlist domains in database - echo -e " ${INFO} Migrating content of ${adListFile} into new database" - database_table_from_file "adlist" "${adListFile}" - fi - if [ -e "${blacklistFile}" ]; then - # Store blacklisted domains in database - echo -e " ${INFO} Migrating content of ${blacklistFile} into new database" - database_table_from_file "blacklist" "${blacklistFile}" - fi - if [ -e "${whitelistFile}" ]; then - # Store whitelisted domains in database - echo -e " ${INFO} Migrating content of ${whitelistFile} into new database" - database_table_from_file "whitelist" "${whitelistFile}" - fi - if [ -e "${regexFile}" ]; then - # Store regex domains in database - echo -e " ${INFO} Migrating content of ${regexFile} into new database" - database_table_from_file "regex" "${regexFile}" - fi - if [ -e "${auditFile}" ]; then - # Store audit domains in database - echo -e " ${INFO} Migrating content of ${auditFile} into new database" - database_table_from_file "domain_auditlist" "${auditFile}" - fi } # Determine if DNS resolution is available before proceeding From be3e198f9a83989a6db71011a294cdce0ef7f56c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 7 Jul 2019 10:46:20 +0200 Subject: [PATCH 162/366] Address linting errors. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 8 +++++--- gravity.sh | 3 ++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 714676f4..294fd32a 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -11,13 +11,15 @@ # Please see LICENSE file for your rights under this license. upgrade_gravityDB(){ - local version - version="$(sqlite3 "$1" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" + local database auditFile version + database="${1}" + auditFile="${2}" + version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then # This migration script upgrades the gravity.db file by # adding the domain_auditlist table - sqlite3 "$1" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" + sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" version=2 # Store audit domains in database table diff --git a/gravity.sh b/gravity.sh index e2f2a9a8..89f77ce0 100755 --- a/gravity.sh +++ b/gravity.sh @@ -17,6 +17,7 @@ coltable="/opt/pihole/COL_TABLE" source "${coltable}" regexconverter="/opt/pihole/wildcard_regex_converter.sh" source "${regexconverter}" +# shellcheck disable=SC1091 source "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh" basename="pihole" @@ -187,7 +188,7 @@ migrate_to_database() { fi # Check if gravity database needs to be updated - upgrade_gravityDB "${gravityDBfile}" + upgrade_gravityDB "${gravityDBfile}" "${auditFile}" } # Determine if DNS resolution is available before proceeding From 8382f4d7274e893e19dbe09de5dd090daa590c24 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 7 Jul 2019 21:21:56 +0200 Subject: [PATCH 163/366] Rename table to domain_audit and simplify subroutine addAudit(). Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 4 ++-- .../Scripts/database_migration/gravity/1_to_2.sql | 2 +- advanced/Scripts/webpage.sh | 14 +++++++++----- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 294fd32a..51a3480b 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -18,7 +18,7 @@ upgrade_gravityDB(){ if [[ "$version" == "1" ]]; then # This migration script upgrades the gravity.db file by - # adding the domain_auditlist table + # adding the domain_audit table sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" version=2 @@ -26,7 +26,7 @@ upgrade_gravityDB(){ if [ -e "${auditFile}" ]; then echo -e " ${INFO} Migrating content of ${auditFile} into new database" # database_table_from_file is defined in gravity.sh - database_table_from_file "domain_auditlist" "${auditFile}" + database_table_from_file "domain_audit" "${auditFile}" fi fi } diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index 073eced5..90a48418 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -1,4 +1,4 @@ -CREATE TABLE domain_auditlist +CREATE TABLE domain_audit ( id INTEGER PRIMARY KEY AUTOINCREMENT, domain TEXT UNIQUE NOT NULL, diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 516ea4e4..918fde27 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -557,23 +557,27 @@ addAudit() shift # skip "-a" shift # skip "audit" local domains validDomain - domains="('$(checkDomain "${1}")')" - shift # skip first domain, as it has already been added + domains="" for domain in "$@" do # Insert only the domain here. The date_added field will be # filled with its default value (date_added = current timestamp) validDomain="$(checkDomain "${domain}")" if [[ -n "${validDomain}" ]]; then - domains="${domains},('${domain}')" + # Put comma in between () when there is + # more than one domains to be added + if [[ -n "${domains}" ]]; then + domains="${domains}," + fi + domains="${domains}('${domain}')" fi done - sqlite3 "${gravityDBfile}" "INSERT INTO \"domain_auditlist\" (domain) VALUES ${domains};" + sqlite3 "${gravityDBfile}" "INSERT INTO \"domain_audit\" (domain) VALUES ${domains};" } clearAudit() { - sqlite3 "${gravityDBfile}" "DELETE FROM \"domain_auditlist\";" + sqlite3 "${gravityDBfile}" "DELETE FROM \"domain_audit\";" } SetPrivacyLevel() { From e8e5d4afda44f8d46d546cc12140b0dcc980aa39 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Sun, 7 Jul 2019 18:10:39 -0700 Subject: [PATCH 164/366] Get file locations of FTL files from the config Instead of hardcoding the location of certain FTL files (`gravity.db`, `pihole-FTL.log`), read the configured location from FTL's config. The default location is used if no custom location has been configured. Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 2bfeef2d..b9bd6e8c 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -94,7 +94,35 @@ PIHOLE_RAW_BLOCKLIST_FILES="${PIHOLE_DIRECTORY}/list.*" PIHOLE_LOCAL_HOSTS_FILE="${PIHOLE_DIRECTORY}/local.list" PIHOLE_LOGROTATE_FILE="${PIHOLE_DIRECTORY}/logrotate" PIHOLE_SETUP_VARS_FILE="${PIHOLE_DIRECTORY}/setupVars.conf" -PIHOLE_GRAVITY_DB_FILE="${PIHOLE_DIRECTORY}/gravity.db" +PIHOLE_FTL_CONF_FILE="${PIHOLE_DIRECTORY}/pihole-FTL.conf" + +# Read the value of an FTL config key. The value is printed to stdout. +# +# Args: +# 1. The key to read +# 2. The default if the setting or config does not exist +get_ftl_conf_value() { + local key=$1 + local default=$2 + local value + + # Obtain key=... setting from pihole-FTL.conf + if [[ -e "$PIHOLE_FTL_CONF_FILE" ]]; then + # Constructed to return nothing when + # a) the setting is not present in the config file, or + # b) the setting is commented out (e.g. "#DBFILE=...") + value="$(sed -n -e "s/^\\s*$key=\\s*//p" ${PIHOLE_FTL_CONF_FILE})" + fi + + # Test for missing value. Use default value in this case. + if [[ -z "$value" ]]; then + value="$default" + fi + + echo "$value" +} + +PIHOLE_GRAVITY_DB_FILE=$(get_ftl_conf_value "GRAVITYDB" "${PIHOLE_DIRECTORY}/gravity.db") PIHOLE_COMMAND="${BIN_DIRECTORY}/pihole" PIHOLE_COLTABLE_FILE="${BIN_DIRECTORY}/COL_TABLE" @@ -105,7 +133,7 @@ FTL_PORT="${RUN_DIRECTORY}/pihole-FTL.port" PIHOLE_LOG="${LOG_DIRECTORY}/pihole.log" PIHOLE_LOG_GZIPS="${LOG_DIRECTORY}/pihole.log.[0-9].*" PIHOLE_DEBUG_LOG="${LOG_DIRECTORY}/pihole_debug.log" -PIHOLE_FTL_LOG="${LOG_DIRECTORY}/pihole-FTL.log" +PIHOLE_FTL_LOG=$(get_ftl_conf_value "LOGFILE" "${LOG_DIRECTORY}/pihole-FTL.log") PIHOLE_WEB_SERVER_ACCESS_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/access.log" PIHOLE_WEB_SERVER_ERROR_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/error.log" From 3d3fc2947e5d848e7ea73bef7b6a34a9faa48091 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 8 Jul 2019 19:22:35 +0200 Subject: [PATCH 165/366] Review comments Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 918fde27..227363f0 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -564,20 +564,22 @@ addAudit() # filled with its default value (date_added = current timestamp) validDomain="$(checkDomain "${domain}")" if [[ -n "${validDomain}" ]]; then - # Put comma in between () when there is + # Put comma in between domains when there is # more than one domains to be added + # SQL INSERT allows adding multiple rows at once using the format + ## INSERT INTO table (domain) VALUES ('abc.de'),('fgh.ij'),('klm.no'),('pqr.st'); if [[ -n "${domains}" ]]; then domains="${domains}," fi domains="${domains}('${domain}')" fi done - sqlite3 "${gravityDBfile}" "INSERT INTO \"domain_audit\" (domain) VALUES ${domains};" + sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" } clearAudit() { - sqlite3 "${gravityDBfile}" "DELETE FROM \"domain_audit\";" + sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;" } SetPrivacyLevel() { From 054c7a2c050b416ac7de09e29351bdb69564430d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 8 Jul 2019 21:23:46 +0200 Subject: [PATCH 166/366] Create new table + view regex_whitelist + rename old regex table to regex_blacklist. This updates the gravity.db version to 3. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 7 +++++ .../database_migration/gravity/2_to_3.sql | 30 +++++++++++++++++++ advanced/Scripts/list.sh | 21 ++++++------- advanced/Scripts/piholeDebug.sh | 11 +++++-- advanced/Scripts/query.sh | 2 +- gravity.sh | 9 +++--- 6 files changed, 62 insertions(+), 18 deletions(-) create mode 100644 advanced/Scripts/database_migration/gravity/2_to_3.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 51a3480b..f37ce176 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -29,4 +29,11 @@ upgrade_gravityDB(){ database_table_from_file "domain_audit" "${auditFile}" fi fi + if [[ "$version" == "2" ]]; then + # This migration script upgrades the gravity.db file by + # renaming the regex table to regex_blacklist, and + # creating a new regex_whitelist table + corresponding linking table and views + sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" + version=3 + fi } diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql new file mode 100644 index 00000000..e368592a --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -0,0 +1,30 @@ +PRAGMA FOREIGN_KEYS=OFF; + +ALTER TABLE regex RENAME TO regex_blacklist; + +CREATE TABLE regex_blacklist_by_group +( + regex_blacklist_id INTEGER NOT NULL REFERENCES regex_blacklist (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (regex_blacklist_id, group_id) +); + +INSERT INTO regex_blacklist_by_group SELECT * FROM regex_by_group; +DROP TABLE regex_by_group; +DROP VIEW vw_regex; +DROP TRIGGER tr_regex_update; + +CREATE VIEW vw_regex_blacklist AS SELECT DISTINCT domain + FROM regex + LEFT JOIN regex_blacklist_by_group ON regex_blacklist_by_group.regex_blacklist_id = regex_blacklist.id + LEFT JOIN "group" ON "group".id = regex_blacklist_by_group.group_id + WHERE regex_blacklist.enabled = 1 AND (regex_blacklist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY regex_blacklist.id; + +CREATE TRIGGER tr_regex_blacklist_update AFTER UPDATE ON regex_blacklist + BEGIN + UPDATE regex_blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; + + +UPDATE info SET value = 3 WHERE property = 'version'; diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index fa81348b..84acf4fc 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -32,12 +32,12 @@ helpFunc() { if [[ "${listType}" == "whitelist" ]]; then param="w" type="whitelist" - elif [[ "${listType}" == "regex" && "${wildcard}" == true ]]; then + elif [[ "${listType}" == "regex_blacklist" && "${wildcard}" == true ]]; then param="-wild" type="wildcard blacklist" - elif [[ "${listType}" == "regex" ]]; then + elif [[ "${listType}" == "regex_blacklist" ]]; then param="-regex" - type="regex filter" + type="regex blacklist filter" else param="b" type="blacklist" @@ -58,7 +58,8 @@ Options: exit 0 } -EscapeRegexp() { +Escape +Regexp() { # This way we may safely insert an arbitrary # string in our regular expressions # This sed is intentionally executed in three steps to ease maintainability @@ -72,7 +73,7 @@ HandleOther() { # Check validity of domain (don't check for regex entries) if [[ "${#domain}" -le 253 ]]; then - if [[ "${listType}" == "regex" && "${wildcard}" == false ]]; then + if [[ "${listType}" == "regex_blacklist" && "${wildcard}" == false ]]; then validDomain="${domain}" else validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check @@ -88,9 +89,9 @@ HandleOther() { } ProcessDomainList() { - if [[ "${listType}" == "regex" ]]; then + if [[ "${listType}" == "regex_blacklist" ]]; then # Regex filter list - listname="regex filters" + listname="regex blacklist filters" else # Whitelist / Blacklist listname="${listType}" @@ -106,7 +107,7 @@ ProcessDomainList() { # if delmode then remove from desired list but do not add to the other if ${addmode}; then AddDomain "${dom}" "${listType}" - if [[ ! "${listType}" == "regex" ]]; then + if [[ ! "${listType}" == "regex_blacklist" ]]; then RemoveDomain "${dom}" "${listAlt}" fi else @@ -215,8 +216,8 @@ for var in "$@"; do case "${var}" in "-w" | "whitelist" ) listType="whitelist"; listAlt="blacklist";; "-b" | "blacklist" ) listType="blacklist"; listAlt="whitelist";; - "--wild" | "wildcard" ) listType="regex"; wildcard=true;; - "--regex" | "regex" ) listType="regex";; + "--wild" | "wildcard" ) listType="regex_blacklist"; wildcard=true;; + "--regex" | "regex" ) listType="regex_blacklist";; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 2bfeef2d..965250f0 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1088,8 +1088,12 @@ show_blacklist() { show_db_entries "Blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" } -show_regexlist() { - show_db_entries "Regexlist" "SELECT * FROM regex" "4 100 7 10 13 50" +show_regexblacklist() { + show_db_entries "Regexblacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" +} + +show_regexwhitelist() { + show_db_entries "Regexwhitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" } analyze_gravity_list() { @@ -1268,7 +1272,8 @@ analyze_gravity_list show_adlists show_whitelist show_blacklist -show_regexlist +show_regexblacklist +show_regexwhitelist show_content_of_pihole_files parse_locale analyze_pihole_log diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 4fc82744..a587f238 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -133,7 +133,7 @@ scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" # Scan Regex table -mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex" 2> /dev/null) +mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_blacklist" 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then diff --git a/gravity.sh b/gravity.sh index 89f77ce0..a379e248 100755 --- a/gravity.sh +++ b/gravity.sh @@ -183,7 +183,7 @@ migrate_to_database() { if [ -e "${regexFile}" ]; then # Store regex domains in database echo -e " ${INFO} Migrating content of ${regexFile} into new database" - database_table_from_file "regex" "${regexFile}" + database_table_from_file "regex_blacklist" "${regexFile}" fi fi @@ -591,9 +591,10 @@ gravity_Table_Count() { # Output count of blacklisted domains and regex filters gravity_ShowCount() { - gravity_Table_Count "blacklist" "blacklisted domains" - gravity_Table_Count "whitelist" "whitelisted domains" - gravity_Table_Count "regex" "regex filters" + gravity_Table_Count "blacklist" "exact blacklisted domains" + gravity_Table_Count "regex_blacklist" "regex blacklist filters" + gravity_Table_Count "whitelist" "exact whitelisted domains" + gravity_Table_Count "regex_whitelist" "regex whitelist filters" } # Parse list of domains into hosts format From f5121c64be312a0c4c900383d882f20cccdb8a6d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 8 Jul 2019 21:39:30 +0200 Subject: [PATCH 167/366] We should still add the regex lines (initially) to the regex table as the renaming will happen only after the importing. Signed-off-by: DL6ER --- gravity.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index a379e248..f9ecc1d1 100755 --- a/gravity.sh +++ b/gravity.sh @@ -182,8 +182,10 @@ migrate_to_database() { fi if [ -e "${regexFile}" ]; then # Store regex domains in database + # Important note: We need to add the domains to the "regex" table + # as it will only later be renamed to "regex_blacklist"! echo -e " ${INFO} Migrating content of ${regexFile} into new database" - database_table_from_file "regex_blacklist" "${regexFile}" + database_table_from_file "regex" "${regexFile}" fi fi From 0683842ec30081e57ce2e801ebcfec7c4f08f8ff Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 8 Jul 2019 21:43:49 +0200 Subject: [PATCH 168/366] Fix typo in 2->3 migration script. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/2_to_3.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql index e368592a..9b0bd412 100644 --- a/advanced/Scripts/database_migration/gravity/2_to_3.sql +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -15,7 +15,7 @@ DROP VIEW vw_regex; DROP TRIGGER tr_regex_update; CREATE VIEW vw_regex_blacklist AS SELECT DISTINCT domain - FROM regex + FROM regex_blacklist LEFT JOIN regex_blacklist_by_group ON regex_blacklist_by_group.regex_blacklist_id = regex_blacklist.id LEFT JOIN "group" ON "group".id = regex_blacklist_by_group.group_id WHERE regex_blacklist.enabled = 1 AND (regex_blacklist_by_group.group_id IS NULL OR "group".enabled = 1) From b154dd5f0792ff804ae5f1b5f8c704c00787a3ca Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Mon, 8 Jul 2019 19:48:50 -0700 Subject: [PATCH 169/366] Quote calls to read FTL config Signed-off-by: Mcat12 --- advanced/Scripts/piholeDebug.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index b9bd6e8c..d1acb950 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -122,7 +122,7 @@ get_ftl_conf_value() { echo "$value" } -PIHOLE_GRAVITY_DB_FILE=$(get_ftl_conf_value "GRAVITYDB" "${PIHOLE_DIRECTORY}/gravity.db") +PIHOLE_GRAVITY_DB_FILE="$(get_ftl_conf_value "GRAVITYDB" "${PIHOLE_DIRECTORY}/gravity.db")" PIHOLE_COMMAND="${BIN_DIRECTORY}/pihole" PIHOLE_COLTABLE_FILE="${BIN_DIRECTORY}/COL_TABLE" @@ -133,7 +133,7 @@ FTL_PORT="${RUN_DIRECTORY}/pihole-FTL.port" PIHOLE_LOG="${LOG_DIRECTORY}/pihole.log" PIHOLE_LOG_GZIPS="${LOG_DIRECTORY}/pihole.log.[0-9].*" PIHOLE_DEBUG_LOG="${LOG_DIRECTORY}/pihole_debug.log" -PIHOLE_FTL_LOG=$(get_ftl_conf_value "LOGFILE" "${LOG_DIRECTORY}/pihole-FTL.log") +PIHOLE_FTL_LOG="$(get_ftl_conf_value "LOGFILE" "${LOG_DIRECTORY}/pihole-FTL.log")" PIHOLE_WEB_SERVER_ACCESS_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/access.log" PIHOLE_WEB_SERVER_ERROR_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/error.log" From 5ff90522002576d9a0202a7d458d1d38117659e1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 9 Jul 2019 11:41:44 +0200 Subject: [PATCH 170/366] Review comments Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 7 +++++-- advanced/Scripts/webpage.sh | 5 +++-- gravity.sh | 7 +++---- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 51a3480b..65b42b95 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -11,9 +11,12 @@ # Please see LICENSE file for your rights under this license. upgrade_gravityDB(){ - local database auditFile version + local database piholeDir auditFile version database="${1}" - auditFile="${2}" + piholeDir="${2}" + auditFile="${piholeDir}/auditlog.list" + + # Get database version version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 227363f0..9db10bda 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -560,8 +560,7 @@ addAudit() domains="" for domain in "$@" do - # Insert only the domain here. The date_added field will be - # filled with its default value (date_added = current timestamp) + # Check domain to be added. Only continue if it is valid validDomain="$(checkDomain "${domain}")" if [[ -n "${validDomain}" ]]; then # Put comma in between domains when there is @@ -574,6 +573,8 @@ addAudit() domains="${domains}('${domain}')" fi done + # Insert only the domain here. The date_added field will be + # filled with its default value (date_added = current timestamp) sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" } diff --git a/gravity.sh b/gravity.sh index 89f77ce0..d7c66d68 100755 --- a/gravity.sh +++ b/gravity.sh @@ -30,7 +30,6 @@ whitelistFile="${piholeDir}/whitelist.txt" blacklistFile="${piholeDir}/blacklist.txt" regexFile="${piholeDir}/regex.list" adListFile="${piholeDir}/adlists.list" -auditFile="${piholeDir}/auditlog.list" localList="${piholeDir}/local.list" VPNList="/etc/openvpn/ipp.txt" @@ -123,8 +122,8 @@ database_table_from_file() { do # Only add non-empty lines if [[ -n "${domain}" ]]; then - if [[ "${table}" == "auditlist" ]]; then - # Auditlist table format + if [[ "${table}" == "domain_audit" ]]; then + # domain_audit table format (no enable or modified fields) echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" else # White-, black-, and regexlist format @@ -188,7 +187,7 @@ migrate_to_database() { fi # Check if gravity database needs to be updated - upgrade_gravityDB "${gravityDBfile}" "${auditFile}" + upgrade_gravityDB "${gravityDBfile}" "${piholeDir}" } # Determine if DNS resolution is available before proceeding From 87f75c737a8d7eced48c5f61e2bb6581db1f4c2a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 10 Jul 2019 12:00:38 +0200 Subject: [PATCH 171/366] Review comments. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 3 +-- advanced/Scripts/piholeDebug.sh | 16 ++++------------ 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 84acf4fc..e3dc552a 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -58,8 +58,7 @@ Options: exit 0 } -Escape -Regexp() { +EscapeRegexp() { # This way we may safely insert an arbitrary # string in our regular expressions # This sed is intentionally executed in three steps to ease maintainability diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 965250f0..0924e984 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1081,19 +1081,13 @@ show_adlists() { } show_whitelist() { - show_db_entries "Whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" + show_db_entries "Exact whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" + show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" } show_blacklist() { - show_db_entries "Blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" -} - -show_regexblacklist() { - show_db_entries "Regexblacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" -} - -show_regexwhitelist() { - show_db_entries "Regexwhitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" + show_db_entries "Exact blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" + show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" } analyze_gravity_list() { @@ -1272,8 +1266,6 @@ analyze_gravity_list show_adlists show_whitelist show_blacklist -show_regexblacklist -show_regexwhitelist show_content_of_pihole_files parse_locale analyze_pihole_log From 65fdbc85d5e1cbafb986da221783fc73ad972df8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 10 Jul 2019 12:01:38 +0200 Subject: [PATCH 172/366] Add timeout to migration script (2->3). Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/2_to_3.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql index 9b0bd412..a2602c4a 100644 --- a/advanced/Scripts/database_migration/gravity/2_to_3.sql +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -1,3 +1,5 @@ +.timeout 30000 + PRAGMA FOREIGN_KEYS=OFF; ALTER TABLE regex RENAME TO regex_blacklist; From 420f60b5c7748ccf939c104a77c9dbf30116f668 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 10 Jul 2019 11:56:39 +0200 Subject: [PATCH 173/366] Add timeout to migration script (1->2). Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/1_to_2.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index 90a48418..45b5fa02 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -1,3 +1,5 @@ +.timeout 30000 + CREATE TABLE domain_audit ( id INTEGER PRIMARY KEY AUTOINCREMENT, From fa8751f9ad89cbe1e9eb32784bdbb99e213ef390 Mon Sep 17 00:00:00 2001 From: Mark Drobnak Date: Wed, 10 Jul 2019 19:42:51 -0700 Subject: [PATCH 174/366] Fix error when checking if IP address is valid During install in `valid_ip`, we split up the IP address into octets to verify it is valid (each is <= 255). This validation was broken in #2743 when a variable usage was quoted where it should have stayed unquoted: ``` ./automated install/basic-install.sh: line 942: [[: 192.241.211.120: syntax error: invalid arithmetic operator (error token is ".241.211.120") ``` Due to this error, `127.0.0.1` would be used instead of the requested IP address. Also, this prevented the user from entering a custom DNS server as it would be marked as an invalid IP address. Signed-off-by: Mark Drobnak --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index a4adac94..cb6783a2 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -934,7 +934,7 @@ valid_ip() { # and set the new one to a dot (period) IFS='.' # Put the IP into an array - ip=("${ip}") + ip=(${ip}) # Restore the IFS to what it was IFS=${OIFS} ## Evaluate each octet by checking if it's less than or equal to 255 (the max for each octet) From c156af020c017e0a0429724cba695a4d1984bba1 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Wed, 10 Jul 2019 19:52:17 -0700 Subject: [PATCH 175/366] Use suggested array creation to fix linter error Signed-off-by: Mcat12 --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index cb6783a2..17bdde31 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -934,7 +934,7 @@ valid_ip() { # and set the new one to a dot (period) IFS='.' # Put the IP into an array - ip=(${ip}) + read -r -a ip <<< "${ip}" # Restore the IFS to what it was IFS=${OIFS} ## Evaluate each octet by checking if it's less than or equal to 255 (the max for each octet) From 1d5755a4c2712156bd16d30fe61fcaef229714c9 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Wed, 10 Jul 2019 21:18:58 -0700 Subject: [PATCH 176/366] Add tests for valid_ip Signed-off-by: Mcat12 --- test/test_automated_install.py | 39 ++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 4bfb0f6a..cce11857 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -700,3 +700,42 @@ def test_IPv6_ULA_GUA_test(Pihole): ''') expected_stdout = 'Found IPv6 ULA address, using it for blocking IPv6 ads' assert expected_stdout in detectPlatform.stdout + + +def test_validate_ip_valid(Pihole): + ''' + Given a valid IP address, valid_ip returns success + ''' + + output = Pihole.run(''' + source /opt/pihole/basic-install.sh + valid_ip "192.168.1.1" + ''') + + assert output.rc == 0 + + +def test_validate_ip_invalid_octet(Pihole): + ''' + Given an invalid IP address (large octet), valid_ip returns an error + ''' + + output = Pihole.run(''' + source /opt/pihole/basic-install.sh + valid_ip "1092.168.1.1" + ''') + + assert output.rc == 1 + + +def test_validate_ip_invalid_letters(Pihole): + ''' + Given an invalid IP address (contains letters), valid_ip returns an error + ''' + + output = Pihole.run(''' + source /opt/pihole/basic-install.sh + valid_ip "not an IP" + ''') + + assert output.rc == 1 From c3ec2e68adf68b8d4ae169385e1158754e8a67d3 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Fri, 12 Jul 2019 20:03:36 -0700 Subject: [PATCH 177/366] Remove the ZeusTracker blocklist from the defaults It is no longer served. Fixes #2843. Signed-off-by: Mcat12 --- automated install/basic-install.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 17bdde31..65fb7c2e 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1177,12 +1177,11 @@ chooseBlocklists() { mv "${adlistFile}" "${adlistFile}.old" fi # Let user select (or not) blocklists via a checklist - cmd=(whiptail --separate-output --checklist "Pi-hole relies on third party lists in order to block ads.\\n\\nYou can use the suggestions below, and/or add your own after installation\\n\\nTo deselect any list, use the arrow keys and spacebar" "${r}" "${c}" 7) + cmd=(whiptail --separate-output --checklist "Pi-hole relies on third party lists in order to block ads.\\n\\nYou can use the suggestions below, and/or add your own after installation\\n\\nTo deselect any list, use the arrow keys and spacebar" "${r}" "${c}" 6) # In an array, show the options available (all off by default): options=(StevenBlack "StevenBlack's Unified Hosts List" on MalwareDom "MalwareDomains" on Cameleon "Cameleon" on - ZeusTracker "ZeusTracker" on DisconTrack "Disconnect.me Tracking" on DisconAd "Disconnect.me Ads" on HostsFile "Hosts-file.net Ads" on) @@ -1205,7 +1204,6 @@ appendToListsFile() { StevenBlack ) echo "https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts" >> "${adlistFile}";; MalwareDom ) echo "https://mirror1.malwaredomains.com/files/justdomains" >> "${adlistFile}";; Cameleon ) echo "http://sysctl.org/cameleon/hosts" >> "${adlistFile}";; - ZeusTracker ) echo "https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist" >> "${adlistFile}";; DisconTrack ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt" >> "${adlistFile}";; DisconAd ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt" >> "${adlistFile}";; HostsFile ) echo "https://hosts-file.net/ad_servers.txt" >> "${adlistFile}";; @@ -1223,7 +1221,6 @@ installDefaultBlocklists() { appendToListsFile StevenBlack appendToListsFile MalwareDom appendToListsFile Cameleon - appendToListsFile ZeusTracker appendToListsFile DisconTrack appendToListsFile DisconAd appendToListsFile HostsFile From 38ff3431340d3d35307aa7c6b18aebad9d1ff7e6 Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Fri, 19 Jul 2019 17:35:21 -0700 Subject: [PATCH 178/366] Print an error message if the FTL release metadata download fails Signed-off-by: Mcat12 --- automated install/basic-install.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 380b424f..3641b961 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2404,6 +2404,7 @@ FTLcheckUpdate() { if ! FTLreleaseData=$(curl -sI https://github.com/pi-hole/FTL/releases/latest); then # There was an issue while retrieving the latest version + printf " %b Failed to retrieve latest FTL release metadata" "${CROSS}" return 3 fi From 3ebd43ebf00cd59b7ff8eef84885fd952a391acf Mon Sep 17 00:00:00 2001 From: Mcat12 Date: Fri, 19 Jul 2019 17:39:00 -0700 Subject: [PATCH 179/366] Remove outdated adlists.list check and fix empty adlists error message Signed-off-by: Mcat12 --- advanced/index.php | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index ff13ec60..62e45091 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -102,15 +102,6 @@ if ($blocklistglob === array()) { die("[ERROR] There are no domain lists generated lists within /etc/pihole/! Please update gravity by running pihole -g, or repair Pi-hole using pihole -r."); } -// Set location of adlists file -if (is_file("/etc/pihole/adlists.list")) { - $adLists = "/etc/pihole/adlists.list"; -} elseif (is_file("/etc/pihole/adlists.default")) { - $adLists = "/etc/pihole/adlists.default"; -} else { - die("[ERROR] File not found: /etc/pihole/adlists.list"); -} - // Get possible non-standard location of FTL's database $FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf"); if (isset($FTLsettings["GRAVITYDB"])) { @@ -134,7 +125,7 @@ while ($row = $adlistResults->fetchArray()) { } if (empty($adlistsUrls)) - die("[ERROR]: There are no adlists configured"); + die("[ERROR]: There are no adlists enabled"); // Get total number of blocklists (Including Whitelist, Blacklist & Wildcard lists) $adlistsCount = count($adlistsUrls) + 3; From 96031214c6a0d797015b4461901cf1059edbc00a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 22 Jul 2019 19:35:16 +0200 Subject: [PATCH 180/366] Add support for whitelist regex filter management via CLI. Signed-off-by: DL6ER --- .../database_migration/gravity/2_to_3.sql | 29 +++++++++++++++++++ advanced/Scripts/list.sh | 13 +++++++-- pihole | 2 ++ 3 files changed, 41 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql index a2602c4a..d7997936 100644 --- a/advanced/Scripts/database_migration/gravity/2_to_3.sql +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -28,5 +28,34 @@ CREATE TRIGGER tr_regex_blacklist_update AFTER UPDATE ON regex_blacklist UPDATE regex_blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; END; +CREATE TABLE regex_whitelist +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); + +CREATE TABLE regex_whitelist_by_group +( + regex_id INTEGER NOT NULL REFERENCES regex_whitelist (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (regex_id, group_id) +); + +CREATE VIEW vw_regex_whitelist AS SELECT DISTINCT domain + FROM regex_whitelist + LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_id = regex_whitelist.id + LEFT JOIN "group" ON "group".id = regex_whitelist_by_group.group_id + WHERE regex_whitelist.enabled = 1 AND (regex_whitelist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY regex_whitelist.id; + +CREATE TRIGGER tr_regex_whitelist_update AFTER UPDATE ON regex_whitelist + BEGIN + UPDATE regex_whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; + UPDATE info SET value = 3 WHERE property = 'version'; diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index e3dc552a..4ef86407 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -38,6 +38,9 @@ helpFunc() { elif [[ "${listType}" == "regex_blacklist" ]]; then param="-regex" type="regex blacklist filter" + elif [[ "${listType}" == "regex_blacklist" ]]; then + param="-whiteregex" + type="regex whitelist filter" else param="b" type="blacklist" @@ -89,8 +92,11 @@ HandleOther() { ProcessDomainList() { if [[ "${listType}" == "regex_blacklist" ]]; then - # Regex filter list + # Regex black filter list listname="regex blacklist filters" + elif [[ "${listType}" == "regex_whitelist" ]]; then + # Regex white filter list + listname="regex whitelist filters" else # Whitelist / Blacklist listname="${listType}" @@ -106,7 +112,7 @@ ProcessDomainList() { # if delmode then remove from desired list but do not add to the other if ${addmode}; then AddDomain "${dom}" "${listType}" - if [[ ! "${listType}" == "regex_blacklist" ]]; then + if [[ ! "${listType}" == "regex_"*"list" ]]; then RemoveDomain "${dom}" "${listAlt}" fi else @@ -173,7 +179,7 @@ Displaylist() { data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM ${listType};" 2> /dev/null)" if [[ -z $data ]]; then - echo -e "Not showing empty ${listname}" + echo -e "Not showing empty list" else echo -e "Displaying ${listname}:" count=1 @@ -217,6 +223,7 @@ for var in "$@"; do "-b" | "blacklist" ) listType="blacklist"; listAlt="whitelist";; "--wild" | "wildcard" ) listType="regex_blacklist"; wildcard=true;; "--regex" | "regex" ) listType="regex_blacklist";; + "--whiteregex" | "whiteregex" ) listType="regex_whitelist";; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; diff --git a/pihole b/pihole index 9fa65a8f..411b5791 100755 --- a/pihole +++ b/pihole @@ -377,6 +377,7 @@ Whitelist/Blacklist Options: -b, blacklist Blacklist domain(s) --wild, wildcard Wildcard blacklist domain(s) --regex, regex Regex blacklist domains(s) + --whiteregex Regex whitelist domains(s) Add '-h' for more info on whitelist/blacklist usage Debugging Options: @@ -438,6 +439,7 @@ case "${1}" in "-b" | "blacklist" ) listFunc "$@";; "--wild" | "wildcard" ) listFunc "$@";; "--regex" | "regex" ) listFunc "$@";; + "--whiteregex" | "whiteregex" ) listFunc "$@";; "-d" | "debug" ) debugFunc "$@";; "-f" | "flush" ) flushFunc "$@";; "-up" | "updatePihole" ) updatePiholeFunc "$@";; From 0d28dce326facddc13f02fe80903f78ea732736a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 22 Jul 2019 20:18:15 +0200 Subject: [PATCH 181/366] Print group table contents in debug log. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 0924e984..b2533eb0 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1076,18 +1076,27 @@ show_db_entries() { IFS="$OLD_IFS" } +show_groups() { + show_db_entries "Groups" "SELECT * FROM \"group\"" "4 100 7 10 13 50" +} + show_adlists() { show_db_entries "Adlists" "SELECT * FROM adlist" "4 100 7 10 13 50" + show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 100 7 10 13 50" } show_whitelist() { show_db_entries "Exact whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" + show_db_entries "Exact whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 100 7 10 13 50" + show_db_entries "Regex whitelist groups" "SELECT * FROM whitelist_by_group" "4 100 7 10 13 50" } show_blacklist() { show_db_entries "Exact blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" + show_db_entries "Exact blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 100 7 10 13 50" + show_db_entries "Regex blacklist groups" "SELECT * FROM blacklist_by_group" "4 100 7 10 13 50" } analyze_gravity_list() { @@ -1263,6 +1272,7 @@ process_status parse_setup_vars check_x_headers analyze_gravity_list +show_groups show_adlists show_whitelist show_blacklist From 0692be9bae12a8a96f582a2dc15968282df2e8f2 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 22 Jul 2019 20:59:52 +0200 Subject: [PATCH 182/366] Fix small mistake in 2->3 upgrade script. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/2_to_3.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql index d7997936..e121f28c 100644 --- a/advanced/Scripts/database_migration/gravity/2_to_3.sql +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -40,14 +40,14 @@ CREATE TABLE regex_whitelist CREATE TABLE regex_whitelist_by_group ( - regex_id INTEGER NOT NULL REFERENCES regex_whitelist (id), + regex_whitelist_id INTEGER NOT NULL REFERENCES regex_whitelist (id), group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (regex_id, group_id) + PRIMARY KEY (regex_whitelist_id, group_id) ); CREATE VIEW vw_regex_whitelist AS SELECT DISTINCT domain FROM regex_whitelist - LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_id = regex_whitelist.id + LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_whitelist_id = regex_whitelist.id LEFT JOIN "group" ON "group".id = regex_whitelist_by_group.group_id WHERE regex_whitelist.enabled = 1 AND (regex_whitelist_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY regex_whitelist.id; From 40d0caa70b642382b2ca35344e6da7fd6c452432 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 22 Jul 2019 21:03:42 +0200 Subject: [PATCH 183/366] Add undocumented --whitewild option that does the same --wild does for the whitelist. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 3 ++- pihole | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 4ef86407..0183a9e2 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -75,7 +75,7 @@ HandleOther() { # Check validity of domain (don't check for regex entries) if [[ "${#domain}" -le 253 ]]; then - if [[ "${listType}" == "regex_blacklist" && "${wildcard}" == false ]]; then + if [[ ( "${listType}" == "regex_blacklist" || "${listType}" == "regex_whitelist" ) && "${wildcard}" == false ]]; then validDomain="${domain}" else validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check @@ -224,6 +224,7 @@ for var in "$@"; do "--wild" | "wildcard" ) listType="regex_blacklist"; wildcard=true;; "--regex" | "regex" ) listType="regex_blacklist";; "--whiteregex" | "whiteregex" ) listType="regex_whitelist";; + "--whitewild" | "whitewild" ) listType="regex_whitelist"; wildcard=true;; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; diff --git a/pihole b/pihole index 411b5791..b3260d83 100755 --- a/pihole +++ b/pihole @@ -440,6 +440,7 @@ case "${1}" in "--wild" | "wildcard" ) listFunc "$@";; "--regex" | "regex" ) listFunc "$@";; "--whiteregex" | "whiteregex" ) listFunc "$@";; + "--whitewild" | "whitewild" ) listFunc "$@";; "-d" | "debug" ) debugFunc "$@";; "-f" | "flush" ) flushFunc "$@";; "-up" | "updatePihole" ) updatePiholeFunc "$@";; From 6f58d58cae7ab35a523c39e268ecac2e562d16a1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 22 Jul 2019 22:26:27 +0200 Subject: [PATCH 184/366] Add --whitewild to help texts and man pages. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 11 +++++++---- manpages/pihole.8 | 14 ++++++++++++-- pihole | 3 ++- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 0183a9e2..f0cf4701 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -32,15 +32,18 @@ helpFunc() { if [[ "${listType}" == "whitelist" ]]; then param="w" type="whitelist" - elif [[ "${listType}" == "regex_blacklist" && "${wildcard}" == true ]]; then - param="-wild" - type="wildcard blacklist" elif [[ "${listType}" == "regex_blacklist" ]]; then param="-regex" type="regex blacklist filter" - elif [[ "${listType}" == "regex_blacklist" ]]; then + elif [[ "${listType}" == "regex_blacklist" && "${wildcard}" == true ]]; then + param="-wild" + type="wildcard blacklist" + elif [[ "${listType}" == "regex_whitelist" ]]; then param="-whiteregex" type="regex whitelist filter" + elif [[ "${listType}" == "regex_whitelist" && "${wildcard}" == true ]]; then + param="-whitewild" + type="wildcard whitelist" else param="b" type="blacklist" diff --git a/manpages/pihole.8 b/manpages/pihole.8 index 065280c7..11923392 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -66,14 +66,24 @@ Available commands and options: Adds or removes specified domain or domains to the blacklist .br +\fB--regex, regex\fR [options] [ ] +.br + Add or removes specified regex filter to the regex blacklist +.br + +\fB--whiteregex\fR [options] [ ] +.br + Add or removes specified regex filter to the regex whitelist +.br + \fB--wild, wildcard\fR [options] [ ] .br Add or removes specified domain to the wildcard blacklist .br -\fB--regex, regex\fR [options] [ ] +\fB--whitewild\fR [options] [ ] .br - Add or removes specified regex filter to the regex blacklist + Add or removes specified domain to the wildcard whitelist .br (Whitelist/Blacklist manipulation options): diff --git a/pihole b/pihole index b3260d83..5d373a47 100755 --- a/pihole +++ b/pihole @@ -375,9 +375,10 @@ Add '-h' after specific commands for more information on usage Whitelist/Blacklist Options: -w, whitelist Whitelist domain(s) -b, blacklist Blacklist domain(s) - --wild, wildcard Wildcard blacklist domain(s) --regex, regex Regex blacklist domains(s) --whiteregex Regex whitelist domains(s) + --wild, wildcard Wildcard blacklist domain(s) + --whitewild Wildcard whitelist domain(s) Add '-h' for more info on whitelist/blacklist usage Debugging Options: From ecd6817aaf89e1c5012c9bbf0ac116ff6a79c4ab Mon Sep 17 00:00:00 2001 From: "B. Olausson" Date: Mon, 29 Jul 2019 18:34:00 +0200 Subject: [PATCH 185/366] This change fixes issue #145 "stty: standard input: Inappropriate ioctl for device ".It checks if a real terminal exist, if not it sets the screen size to a fixed value. This helps to avoid nasty and unnecessary logs when running "pihole -up" via e.g. cron. Signed-off-by: B. Olausson --- automated install/basic-install.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 5b81f691..9891fd9d 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -84,8 +84,13 @@ if [ -z "${USER}" ]; then fi -# Find the rows and columns will default to 80x24 if it can not be detected -screen_size=$(stty size || printf '%d %d' 24 80) +# Check if we are running on a real terminal and find the rows and columns +# If there is no real terminal, we will default to 80x24 +if [ -t 0 ] ; then + screen_size=$(stty size) +else + screen_size="24 80" +fi # Set rows variable to contain first number printf -v rows '%d' "${screen_size%% *}" # Set columns variable to contain second number From 63230cb72dbe4d7201aa68c6a5f3abe898f8de2d Mon Sep 17 00:00:00 2001 From: Andreas Date: Sun, 4 Aug 2019 21:21:08 +0200 Subject: [PATCH 186/366] quick fix for when dig also returns a CNAME Signed-off-by: ryrun --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index d7c66d68..14b32827 100755 --- a/gravity.sh +++ b/gravity.sh @@ -349,7 +349,7 @@ gravity_DownloadBlocklistFromUrl() { else printf -v port "%s" "${PIHOLE_DNS_1#*#}" fi - ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}") + ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1) if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then port=443; else port=80 From a95b4734170c8a401a698cd03665546f3269396e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 20:56:01 +0200 Subject: [PATCH 187/366] Rearranage if statements to ensure the proper output is shown for wildcard-style filters. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index f0cf4701..31822c9a 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -32,18 +32,18 @@ helpFunc() { if [[ "${listType}" == "whitelist" ]]; then param="w" type="whitelist" - elif [[ "${listType}" == "regex_blacklist" ]]; then - param="-regex" - type="regex blacklist filter" elif [[ "${listType}" == "regex_blacklist" && "${wildcard}" == true ]]; then param="-wild" type="wildcard blacklist" - elif [[ "${listType}" == "regex_whitelist" ]]; then - param="-whiteregex" - type="regex whitelist filter" + elif [[ "${listType}" == "regex_blacklist" ]]; then + param="-regex" + type="regex blacklist filter" elif [[ "${listType}" == "regex_whitelist" && "${wildcard}" == true ]]; then param="-whitewild" type="wildcard whitelist" + elif [[ "${listType}" == "regex_whitelist" ]]; then + param="-whiteregex" + type="regex whitelist filter" else param="b" type="blacklist" From 09190c1735eb7482537257cab54ea4103d8ffd18 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 21:03:47 +0200 Subject: [PATCH 188/366] Only check once for if this is a regex list or not. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 31822c9a..60b820f5 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -94,15 +94,19 @@ HandleOther() { } ProcessDomainList() { + local is_regexlist if [[ "${listType}" == "regex_blacklist" ]]; then # Regex black filter list listname="regex blacklist filters" + is_regexlist=true elif [[ "${listType}" == "regex_whitelist" ]]; then # Regex white filter list listname="regex whitelist filters" + is_regexlist=true else # Whitelist / Blacklist listname="${listType}" + is_regexlist=false fi for dom in "${domList[@]}"; do @@ -115,7 +119,7 @@ ProcessDomainList() { # if delmode then remove from desired list but do not add to the other if ${addmode}; then AddDomain "${dom}" "${listType}" - if [[ ! "${listType}" == "regex_"*"list" ]]; then + if ! ${is_regexlist}; then RemoveDomain "${dom}" "${listAlt}" fi else From 06860ed5b49a4554026ca4f67aa1dc8f9eed06c7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 21:07:39 +0200 Subject: [PATCH 189/366] Group tables have only two columns. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index b2533eb0..e7e6b791 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1082,21 +1082,21 @@ show_groups() { show_adlists() { show_db_entries "Adlists" "SELECT * FROM adlist" "4 100 7 10 13 50" - show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 100 7 10 13 50" + show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } show_whitelist() { show_db_entries "Exact whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 100 7 10 13 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM whitelist_by_group" "4 100 7 10 13 50" + show_db_entries "Exact whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" + show_db_entries "Regex whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" } show_blacklist() { show_db_entries "Exact blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 100 7 10 13 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM blacklist_by_group" "4 100 7 10 13 50" + show_db_entries "Exact blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" + show_db_entries "Regex blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" } analyze_gravity_list() { From af754e3fc4c394276aadeec26f1068ef9ae18c2d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 21:08:36 +0200 Subject: [PATCH 190/366] Rearrange group tables directly next to the tables they refer to. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index e7e6b791..dc353ff0 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1087,15 +1087,15 @@ show_adlists() { show_whitelist() { show_db_entries "Exact whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" - show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" show_db_entries "Exact whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" + show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" show_db_entries "Regex whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" } show_blacklist() { show_db_entries "Exact blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" - show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" show_db_entries "Exact blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" + show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" show_db_entries "Regex blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" } From 6e2e825a5fe3c4148347067e58309d55e4771bf8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 21:10:52 +0200 Subject: [PATCH 191/366] Rename options "pihole --whiteregex" to "pihole --white-regex" for the sake of readability. The same applied for "whitewild" -> "white-wild" Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 8 ++++---- pihole | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 60b820f5..6a606665 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -39,10 +39,10 @@ helpFunc() { param="-regex" type="regex blacklist filter" elif [[ "${listType}" == "regex_whitelist" && "${wildcard}" == true ]]; then - param="-whitewild" + param="-white-wild" type="wildcard whitelist" elif [[ "${listType}" == "regex_whitelist" ]]; then - param="-whiteregex" + param="-white-regex" type="regex whitelist filter" else param="b" @@ -230,8 +230,8 @@ for var in "$@"; do "-b" | "blacklist" ) listType="blacklist"; listAlt="whitelist";; "--wild" | "wildcard" ) listType="regex_blacklist"; wildcard=true;; "--regex" | "regex" ) listType="regex_blacklist";; - "--whiteregex" | "whiteregex" ) listType="regex_whitelist";; - "--whitewild" | "whitewild" ) listType="regex_whitelist"; wildcard=true;; + "--white-regex" | "white-regex" ) listType="regex_whitelist";; + "--white-wild" | "white-wild" ) listType="regex_whitelist"; wildcard=true;; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; diff --git a/pihole b/pihole index 5d373a47..dfd6eda9 100755 --- a/pihole +++ b/pihole @@ -440,8 +440,8 @@ case "${1}" in "-b" | "blacklist" ) listFunc "$@";; "--wild" | "wildcard" ) listFunc "$@";; "--regex" | "regex" ) listFunc "$@";; - "--whiteregex" | "whiteregex" ) listFunc "$@";; - "--whitewild" | "whitewild" ) listFunc "$@";; + "--white-regex" | "white-regex" ) listFunc "$@";; + "--white-wild" | "white-wild" ) listFunc "$@";; "-d" | "debug" ) debugFunc "$@";; "-f" | "flush" ) flushFunc "$@";; "-up" | "updatePihole" ) updatePiholeFunc "$@";; From 4371c9ba03e48daa2f40e1dbe31c1b3bd72d2447 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 5 Aug 2019 21:20:07 +0200 Subject: [PATCH 192/366] Ensure proper permissions are set for gravity.db after creation. Signed-off-by: DL6ER --- automated install/basic-install.sh | 3 +++ gravity.sh | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 1e87b943..7dbdc596 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1908,6 +1908,9 @@ installPihole() { chmod a+rx /var/www/html # Give pihole access to the Web server group usermod -a -G ${LIGHTTPD_GROUP} pihole + # Give lighttpd access to the pihole group so the web interface can + # manage the gravity.db database + usermod -a -G pihole ${LIGHTTPD_USER} # If the lighttpd command is executable, if is_command lighty-enable-mod ; then # enable fastcgi and fastcgi-php diff --git a/gravity.sh b/gravity.sh index f9ecc1d1..1ad43df1 100755 --- a/gravity.sh +++ b/gravity.sh @@ -85,6 +85,10 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" + + # Ensure proper permissions are set for the newly created database + chown pihole:pihole "${gravityDBfile}" + chmod g+w "${piholeDir}" "${gravityDBfile}" } # Import domains from file and store them in the specified database table From dc93462d42e77b488883f6a392e67a5561d303c5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 6 Aug 2019 20:28:00 +0200 Subject: [PATCH 193/366] Group table has only two columns Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index dc353ff0..38861849 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1077,7 +1077,7 @@ show_db_entries() { } show_groups() { - show_db_entries "Groups" "SELECT * FROM \"group\"" "4 100 7 10 13 50" + show_db_entries "Groups" "SELECT * FROM \"group\"" "4 4" } show_adlists() { From b2d8c4374b8d1b36518c3dbafcb3c502d9d5857d Mon Sep 17 00:00:00 2001 From: snapsl Date: Wed, 14 Aug 2019 23:28:13 +0200 Subject: [PATCH 194/366] tweaked code style of webpage.sh Signed-off-by: snapsl --- advanced/Scripts/webpage.sh | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 9db10bda..b799d67d 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -87,9 +87,9 @@ SetTemperatureUnit() { HashPassword() { # Compute password hash twice to avoid rainbow table vulnerability - return=$(echo -n ${1} | sha256sum | sed 's/\s.*$//') - return=$(echo -n ${return} | sha256sum | sed 's/\s.*$//') - echo ${return} + return=$(echo -n "${1}" | sha256sum | sed 's/\s.*$//') + return=$(echo -n "${return}" | sha256sum | sed 's/\s.*$//') + echo "${return}" } SetWebPassword() { @@ -143,18 +143,18 @@ ProcessDNSSettings() { delete_dnsmasq_setting "server" COUNTER=1 - while [[ 1 ]]; do + while true ; do var=PIHOLE_DNS_${COUNTER} if [ -z "${!var}" ]; then break; fi add_dnsmasq_setting "server" "${!var}" - let COUNTER=COUNTER+1 + (( COUNTER++ )) done # The option LOCAL_DNS_PORT is deprecated # We apply it once more, and then convert it into the current format - if [ ! -z "${LOCAL_DNS_PORT}" ]; then + if [ -n "${LOCAL_DNS_PORT}" ]; then add_dnsmasq_setting "server" "127.0.0.1#${LOCAL_DNS_PORT}" add_setting "PIHOLE_DNS_${COUNTER}" "127.0.0.1#${LOCAL_DNS_PORT}" delete_setting "LOCAL_DNS_PORT" @@ -184,7 +184,7 @@ trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC68345710423 delete_dnsmasq_setting "host-record" - if [ ! -z "${HOSTRECORD}" ]; then + if [ -n "${HOSTRECORD}" ]; then add_dnsmasq_setting "host-record" "${HOSTRECORD}" fi @@ -538,7 +538,8 @@ Interfaces: } Teleporter() { - local datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") + datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") + local datetimestamp php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.tar.gz" } From 20a839fef58e463333ba1c08d5fe9d9bee8fd5e1 Mon Sep 17 00:00:00 2001 From: snapsl Date: Thu, 15 Aug 2019 11:20:55 +0200 Subject: [PATCH 195/366] fixed local declaration before assignment Signed-off-by: snapsl --- advanced/Scripts/webpage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index b799d67d..39299960 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -538,8 +538,8 @@ Interfaces: } Teleporter() { - datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") local datetimestamp + datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S") php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.tar.gz" } From 3e78ed95d4cc6a7ef50d0bca11fcc17edb8eb3c5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 17 Aug 2019 15:04:04 +0200 Subject: [PATCH 196/366] Fix displaying options for table "group" in the debugger. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 38861849..134b15ce 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1077,7 +1077,7 @@ show_db_entries() { } show_groups() { - show_db_entries "Groups" "SELECT * FROM \"group\"" "4 4" + show_db_entries "Groups" "SELECT * FROM \"group\"" "4 4 30 50" } show_adlists() { From b1838512b25bf3715d629c5f43fc27539f70d4ac Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 22 Aug 2019 13:39:58 +0200 Subject: [PATCH 197/366] Explicitly select columns (and their order) when listing the databaes tables. Print timestamps translated to strings instead of printing the integer timestamps. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 134b15ce..7ba03a37 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1081,22 +1081,22 @@ show_groups() { } show_adlists() { - show_db_entries "Adlists" "SELECT * FROM adlist" "4 100 7 10 13 50" + show_db_entries "Adlists" "SELECT id,address,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM adlist" "4 100 7 19 19 50" show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } show_whitelist() { - show_db_entries "Exact whitelist" "SELECT * FROM whitelist" "4 100 7 10 13 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT * FROM regex_whitelist" "4 100 7 10 13 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" + show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM whitelist" "4 100 7 19 19 50" + show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" + show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" + show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" } show_blacklist() { - show_db_entries "Exact blacklist" "SELECT * FROM blacklist" "4 100 7 10 13 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT * FROM regex_blacklist" "4 100 7 10 13 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" + show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM blacklist" "4 100 7 19 19 50" + show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" + show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" + show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" } analyze_gravity_list() { From cc40c18f49ccf9dcf926cd18bb9dc1880345be3c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 22 Aug 2019 13:54:46 +0200 Subject: [PATCH 198/366] Wrap upgrade script commands in a transaction. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/1_to_2.sql | 6 ++++++ advanced/Scripts/database_migration/gravity/2_to_3.sql | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/advanced/Scripts/database_migration/gravity/1_to_2.sql b/advanced/Scripts/database_migration/gravity/1_to_2.sql index 90a48418..6d57a6fe 100644 --- a/advanced/Scripts/database_migration/gravity/1_to_2.sql +++ b/advanced/Scripts/database_migration/gravity/1_to_2.sql @@ -1,3 +1,7 @@ +.timeout 30000 + +BEGIN TRANSACTION; + CREATE TABLE domain_audit ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -6,3 +10,5 @@ CREATE TABLE domain_audit ); UPDATE info SET value = 2 WHERE property = 'version'; + +COMMIT; diff --git a/advanced/Scripts/database_migration/gravity/2_to_3.sql b/advanced/Scripts/database_migration/gravity/2_to_3.sql index e121f28c..fd7c24d2 100644 --- a/advanced/Scripts/database_migration/gravity/2_to_3.sql +++ b/advanced/Scripts/database_migration/gravity/2_to_3.sql @@ -2,6 +2,8 @@ PRAGMA FOREIGN_KEYS=OFF; +BEGIN TRANSACTION; + ALTER TABLE regex RENAME TO regex_blacklist; CREATE TABLE regex_blacklist_by_group @@ -59,3 +61,5 @@ CREATE TRIGGER tr_regex_whitelist_update AFTER UPDATE ON regex_whitelist UPDATE info SET value = 3 WHERE property = 'version'; + +COMMIT; From aef7892de68a272df0dd62bd18e28a8790af66fa Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 22 Aug 2019 13:57:01 +0200 Subject: [PATCH 199/366] Add missing hyphens. Signed-off-by: DL6ER --- manpages/pihole.8 | 4 ++-- pihole | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/manpages/pihole.8 b/manpages/pihole.8 index 11923392..ed012092 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -71,7 +71,7 @@ Available commands and options: Add or removes specified regex filter to the regex blacklist .br -\fB--whiteregex\fR [options] [ ] +\fB--white-regex\fR [options] [ ] .br Add or removes specified regex filter to the regex whitelist .br @@ -81,7 +81,7 @@ Available commands and options: Add or removes specified domain to the wildcard blacklist .br -\fB--whitewild\fR [options] [ ] +\fB--white-wild\fR [options] [ ] .br Add or removes specified domain to the wildcard whitelist .br diff --git a/pihole b/pihole index dfd6eda9..1d9f0809 100755 --- a/pihole +++ b/pihole @@ -375,10 +375,10 @@ Add '-h' after specific commands for more information on usage Whitelist/Blacklist Options: -w, whitelist Whitelist domain(s) -b, blacklist Blacklist domain(s) - --regex, regex Regex blacklist domains(s) - --whiteregex Regex whitelist domains(s) - --wild, wildcard Wildcard blacklist domain(s) - --whitewild Wildcard whitelist domain(s) + --regex, regex Regex blacklist domains(s) + --white-regex Regex whitelist domains(s) + --wild, wildcard Wildcard blacklist domain(s) + --white-wild Wildcard whitelist domain(s) Add '-h' for more info on whitelist/blacklist usage Debugging Options: From 42ccc1ef24cf80d53d0b20e9e01d0a6d1a66a992 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 22 Aug 2019 14:06:42 +0200 Subject: [PATCH 200/366] Add support for regex whitelist in "pihole -q". Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a587f238..4bffe251 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -128,25 +128,24 @@ scanDatabaseTable() { done } -# Scan Whitelist and Blacklist -scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" -scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" - -# Scan Regex table -mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_blacklist" 2> /dev/null) +scanRegexDatabaseTable() { +local domain list +domain="${1}" +list="${2}" +mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_${list}" 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then # Split regexps over a new line str_regexList=$(printf '%s\n' "${regexList[@]}") - # Check domainQuery against regexps - mapfile -t regexMatches < <(scanList "${domainQuery}" "${str_regexList}" "regex") + # Check domain against regexps + mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex") # If there were regex matches if [[ "${#regexMatches[@]}" -ne 0 ]]; then # Split matching regexps over a new line str_regexMatches=$(printf '%s\n' "${regexMatches[@]}") # Form a "matched" message - str_message="${matchType^} found in ${COL_BOLD}Regex list${COL_NC}" + str_message="${matchType^} found in ${COL_BOLD}Regex ${list}${COL_NC}" # Form a "results" message str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" # If we are displaying more than just the source of the block @@ -159,11 +158,20 @@ if [[ "${#regexList[@]}" -ne 0 ]]; then # shellcheck disable=SC2001 echo "${str_result}" | sed 's/^/ /' else - echo "π Regex list" + echo "π Regex ${list}" exit 0 fi fi fi +} + +# Scan Whitelist and Blacklist +scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" +scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" + +# Scan Regex table +scanRegexDatabaseTable "${domainQuery}" "whitelist" +scanRegexDatabaseTable "${domainQuery}" "blacklist" # Get version sorted *.domains filenames (without dir path) lists=("$(cd "$piholeDir" || exit 0; printf "%s\\n" -- *.domains | sort -V)") From 23b688287f4fdb9c809053880594f894c4cfbfc8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 22 Aug 2019 14:12:58 +0200 Subject: [PATCH 201/366] Fix indentation in query.sh. No functional change in this commit. Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 90 ++++++++++++++++++++------------------- 1 file changed, 46 insertions(+), 44 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 4bffe251..035adaac 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -36,14 +36,14 @@ scanList(){ # /dev/null forces filename to be printed when only one list has been generated # shellcheck disable=SC2086 case "${type}" in - "exact" ) grep -i -E -l "(^|(?/dev/null;; - # Create array of regexps - # Iterate through each regexp and check whether it matches the domainQuery - # If it does, print the matching regexp and continue looping - # Input 1 - regexps | Input 2 - domainQuery - "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ - <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; - * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; + "exact" ) grep -i -E -l "(^|(?/dev/null;; + # Create array of regexps + # Iterate through each regexp and check whether it matches the domainQuery + # If it does, print the matching regexp and continue looping + # Input 1 - regexps | Input 2 - domainQuery + "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ + <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; + * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } @@ -100,8 +100,8 @@ scanDatabaseTable() { # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. case "${type}" in - "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; - * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; + * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac # Send prepared query to gravity database @@ -129,40 +129,42 @@ scanDatabaseTable() { } scanRegexDatabaseTable() { -local domain list -domain="${1}" -list="${2}" -mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_${list}" 2> /dev/null) - -# If we have regexps to process -if [[ "${#regexList[@]}" -ne 0 ]]; then - # Split regexps over a new line - str_regexList=$(printf '%s\n' "${regexList[@]}") - # Check domain against regexps - mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex") - # If there were regex matches - if [[ "${#regexMatches[@]}" -ne 0 ]]; then - # Split matching regexps over a new line - str_regexMatches=$(printf '%s\n' "${regexMatches[@]}") - # Form a "matched" message - str_message="${matchType^} found in ${COL_BOLD}Regex ${list}${COL_NC}" - # Form a "results" message - str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" - # If we are displaying more than just the source of the block - if [[ -z "${blockpage}" ]]; then - # Set the wildcard match flag - wcMatch=true - # Echo the "matched" message, indented by one space - echo " ${str_message}" - # Echo the "results" message, each line indented by three spaces - # shellcheck disable=SC2001 - echo "${str_result}" | sed 's/^/ /' - else - echo "π Regex ${list}" - exit 0 - fi - fi -fi + local domain list + domain="${1}" + list="${2}" + + # Query all regex from the corresponding database tables + mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_${list}" 2> /dev/null) + + # If we have regexps to process + if [[ "${#regexList[@]}" -ne 0 ]]; then + # Split regexps over a new line + str_regexList=$(printf '%s\n' "${regexList[@]}") + # Check domain against regexps + mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex") + # If there were regex matches + if [[ "${#regexMatches[@]}" -ne 0 ]]; then + # Split matching regexps over a new line + str_regexMatches=$(printf '%s\n' "${regexMatches[@]}") + # Form a "matched" message + str_message="${matchType^} found in ${COL_BOLD}Regex ${list}${COL_NC}" + # Form a "results" message + str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" + # If we are displaying more than just the source of the block + if [[ -z "${blockpage}" ]]; then + # Set the wildcard match flag + wcMatch=true + # Echo the "matched" message, indented by one space + echo " ${str_message}" + # Echo the "results" message, each line indented by three spaces + # shellcheck disable=SC2001 + echo "${str_result}" | sed 's/^/ /' + else + echo "π Regex ${list}" + exit 0 + fi + fi + fi } # Scan Whitelist and Blacklist From 6faddfcd3d0a0ae00c0de3833b43013c60efec96 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 23 Aug 2019 10:09:52 +0200 Subject: [PATCH 202/366] Print timestamps in local time zone of the Pi-hole. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c9280f45..84e34416 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1109,21 +1109,21 @@ show_groups() { } show_adlists() { - show_db_entries "Adlists" "SELECT id,address,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM adlist" "4 100 7 19 19 50" + show_db_entries "Adlists" "SELECT id,address,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM adlist" "4 100 7 19 19 50" show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } show_whitelist() { - show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM whitelist" "4 100 7 19 19 50" + show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM whitelist" "4 100 7 19 19 50" show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" + show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" } show_blacklist() { - show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM blacklist" "4 100 7 19 19 50" + show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM blacklist" "4 100 7 19 19 50" show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch') date_added,datetime(date_modified,'unixepoch') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" + show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" } From ca8982494ba37002377dd0bf78b023381a61bfce Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 1 Sep 2019 14:42:07 +0200 Subject: [PATCH 203/366] Store timestamp when the gravity table was last updated successfully. This fixes https://github.com/pi-hole/AdminLTE/issues/989 Signed-off-by: DL6ER --- gravity.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gravity.sh b/gravity.sh index 2fc26d49..98747f35 100755 --- a/gravity.sh +++ b/gravity.sh @@ -90,6 +90,16 @@ generate_gravity_database() { chmod g+w "${piholeDir}" "${gravityDBfile}" } +update_gravity_timestamp() { + # Update timestamp when the gravity table was last updated successfully + output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values (\"updated\",cast(strftime('%s', 'now') as int));"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" + fi +} + # Import domains from file and store them in the specified database table database_table_from_file() { # Define locals @@ -749,6 +759,8 @@ fi gravity_generateLocalList gravity_ShowCount +update_gravity_timestamp + gravity_Cleanup echo "" From a8af2e1837946d16f273eb38331a4e1fc5c67e3d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 4 Sep 2019 23:14:29 +0200 Subject: [PATCH 204/366] Store domains without sorting and unifying them first. This allows us to preserve the relationship of the individual domains to the lists they came from. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 5 + gravity.sh | 194 ++++++++---------- 2 files changed, 86 insertions(+), 113 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index a82d0d51..7d59a6a0 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -39,4 +39,9 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" version=3 fi + if [[ "$version" == "3" ]]; then + # This migration script upgrades ... + sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/3_to_4.sql" + version=3 + fi } diff --git a/gravity.sh b/gravity.sh index 98747f35..86bb6a2e 100755 --- a/gravity.sh +++ b/gravity.sh @@ -97,25 +97,39 @@ update_gravity_timestamp() { if [[ "${status}" -ne 0 ]]; then echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" + return 1 fi + return 0 } -# Import domains from file and store them in the specified database table -database_table_from_file() { - # Define locals - local table source backup_path backup_file +database_truncate_table() { + local table table="${1}" - source="${2}" - backup_path="${piholeDir}/migration_backup" - backup_file="${backup_path}/$(basename "${2}")" - # Truncate table output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" + return 1 + fi + return 0 +} + +# Import domains from file and store them in the specified database table +database_table_from_file() { + # Define locals + local table source backup_path backup_file arg + table="${1}" + source="${2}" + arg="${3}" + backup_path="${piholeDir}/migration_backup" + backup_file="${backup_path}/$(basename "${2}")" + + # Truncate table only if not gravity (we add multiple times to this table) + if [[ "${table}" != "gravity" ]]; then + database_truncate_table "${table}" fi local tmpFile @@ -123,31 +137,30 @@ database_table_from_file() { local timestamp timestamp="$(date --utc +'%s')" local inputfile - if [[ "${table}" == "gravity" ]]; then - # No need to modify the input data for the gravity table - inputfile="${source}" - else - # Apply format for white-, blacklist, regex, and adlist tables - # Read file line by line - local rowid - declare -i rowid - rowid=1 - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then - if [[ "${table}" == "domain_audit" ]]; then - # domain_audit table format (no enable or modified fields) - echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" - else - # White-, black-, and regexlist format - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" - fi - rowid+=1 + # Apply format for white-, blacklist, regex, and adlist tables + # Read file line by line + local rowid + declare -i rowid + rowid=1 + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ -n "${domain}" ]]; then + if [[ "${table}" == "domain_audit" ]]; then + # domain_audit table format (no enable or modified fields) + echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" + elif [[ "${table}" == "gravity" ]]; then + # gravity table format + echo "\"${domain}\",${arg}" >> "${tmpFile}" + else + # White-, black-, and regexlist format + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" fi - done - inputfile="${tmpFile}" - fi + rowid+=1 + fi + done + inputfile="${tmpFile}" + # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 @@ -263,12 +276,13 @@ gravity_CheckDNSResolutionAvailable() { } # Retrieve blocklist URLs and parse domains from adlist.list -gravity_GetBlocklistUrls() { +gravity_DownloadBlocklists() { echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..." # Retrieve source URLs from gravity database # We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true) mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" + mapfile -t sourceIDs <<< "$(sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( @@ -285,21 +299,23 @@ gravity_GetBlocklistUrls() { if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then echo -e "${OVER} ${TICK} ${str}" - return 0 else echo -e "${OVER} ${CROSS} ${str}" echo -e " ${INFO} No source list found, or it is empty" echo "" return 1 fi -} -# Define options for when retrieving blocklists -gravity_SetDownloadOptions() { local url domain agent cmd_ext str - echo "" + # Flush gravity table once before looping over sources + str="Flushing gravity table" + echo -ne " ${INFO} ${str}..." + if database_truncate_table "gravity"; then + echo -e "${OVER} ${TICK} ${str}" + fi + # Loop through $sources and download each one for ((i = 0; i < "${#sources[@]}"; i++)); do url="${sources[$i]}" @@ -319,7 +335,7 @@ gravity_SetDownloadOptions() { esac echo -e " ${INFO} Target: ${domain} (${url##*/})" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" echo "" done gravity_Blackbody=true @@ -327,7 +343,7 @@ gravity_SetDownloadOptions() { # Download specified URL and perform checks on HTTP status and file content gravity_DownloadBlocklistFromUrl() { - local url="${1}" cmd_ext="${2}" agent="${3}" heisenbergCompensator="" patternBuffer str httpCode success="" + local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" heisenbergCompensator="" patternBuffer str httpCode success="" # Create temp file to store content on disk instead of RAM patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb") @@ -408,11 +424,20 @@ gravity_DownloadBlocklistFromUrl() { # Determine if the blocklist was downloaded and saved correctly if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then - : # Do not attempt to re-parse file + # Add domains to database table + str="Adding to database table" + echo -ne " ${INFO} ${str}..." + database_table_from_file "gravity" "${saveLocation}" "${adlistID}" + echo -e "${OVER} ${TICK} ${str}" # Check if $patternbuffer is a non-zero length file elif [[ -s "${patternBuffer}" ]]; then # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" + # Add domains to database table + str="Adding to database table" + echo -ne " ${INFO} ${str}..." + database_table_from_file "gravity" "${saveLocation}" "${adlistID}" + echo -e "${OVER} ${TICK} ${str}" else # Fall back to previously cached list if $patternBuffer is empty echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" @@ -421,6 +446,11 @@ gravity_DownloadBlocklistFromUrl() { # Determine if cached list has read permission if [[ -r "${saveLocation}" ]]; then echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" + # Add domains to database table + str="Adding to database table" + echo -ne " ${INFO} ${str}..." + database_table_from_file "gravity" "${saveLocation}" "${adlistID}" + echo -e "${OVER} ${TICK} ${str}" else echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" fi @@ -432,7 +462,7 @@ gravity_ParseFileIntoDomains() { local source="${1}" destination="${2}" firstLine abpFilter # Determine if we are parsing a consolidated list - if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then + #if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then # Remove comments and print only the domain name # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious # This helps with that and makes it easier to read @@ -449,7 +479,7 @@ gravity_ParseFileIntoDomains() { sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}" chmod 644 "${destination}" return 0 - fi + #fi # Individual file parsing: Keep comments, while parsing domains from each line # We keep comments to respect the list maintainer's licensing @@ -536,80 +566,23 @@ gravity_ParseFileIntoDomains() { fi } -# Create (unfiltered) "Matter and Light" consolidated list -gravity_ConsolidateDownloadedBlocklists() { - local str lastLine - - str="Consolidating blocklists" - echo -ne " ${INFO} ${str}..." - - # Empty $matterAndLight if it already exists, otherwise, create it - : > "${piholeDir}/${matterAndLight}" - chmod 644 "${piholeDir}/${matterAndLight}" - - # Loop through each *.domains file - for i in "${activeDomains[@]}"; do - # Determine if file has read permissions, as download might have failed - if [[ -r "${i}" ]]; then - # Remove windows CRs from file, convert list to lower case, and append into $matterAndLight - tr -d '\r' < "${i}" | tr '[:upper:]' '[:lower:]' >> "${piholeDir}/${matterAndLight}" - - # Ensure that the first line of a new list is on a new line - lastLine=$(tail -1 "${piholeDir}/${matterAndLight}") - if [[ "${#lastLine}" -gt 0 ]]; then - echo "" >> "${piholeDir}/${matterAndLight}" - fi - fi - done - echo -e "${OVER} ${TICK} ${str}" - -} - -# Parse consolidated list into (filtered, unique) domains-only format -gravity_SortAndFilterConsolidatedList() { - local str num - - str="Extracting domains from blocklists" - echo -ne " ${INFO} ${str}..." - - # Parse into file - gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}" - - # Format $parsedMatter line total as currency - num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")") - - echo -e "${OVER} ${TICK} ${str}" - echo -e " ${INFO} Gravity pulled in ${COL_BLUE}${num}${COL_NC} domains" - - str="Removing duplicate domains" - echo -ne " ${INFO} ${str}..." - sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" - chmod 644 "${piholeDir}/${preEventHorizon}" - echo -e "${OVER} ${TICK} ${str}" - - # Format $preEventHorizon line total as currency - num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") - str="Storing ${COL_BLUE}${num}${COL_NC} unique blocking domains in database" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}" - echo -e "${OVER} ${TICK} ${str}" -} - # Report number of entries in a table gravity_Table_Count() { local table="${1}" local str="${2}" + local extra="${3}" local num - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} WHERE enabled = 1;")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} ${extra};")" echo -e " ${INFO} Number of ${str}: ${num}" } # Output count of blacklisted domains and regex filters gravity_ShowCount() { - gravity_Table_Count "blacklist" "exact blacklisted domains" - gravity_Table_Count "regex_blacklist" "regex blacklist filters" - gravity_Table_Count "whitelist" "exact whitelisted domains" - gravity_Table_Count "regex_whitelist" "regex whitelist filters" + gravity_Table_Count "gravity" "gravity domains" "" + gravity_Table_Count "blacklist" "exact blacklisted domains" "WHERE enabled = 1" + gravity_Table_Count "regex_blacklist" "regex blacklist filters" "WHERE enabled = 1" + gravity_Table_Count "whitelist" "exact whitelisted domains" "WHERE enabled = 1" + gravity_Table_Count "regex_whitelist" "regex whitelist filters" "WHERE enabled = 1" } # Parse list of domains into hosts format @@ -748,12 +721,7 @@ fi # Gravity downloads blocklists next gravity_CheckDNSResolutionAvailable -if gravity_GetBlocklistUrls; then - gravity_SetDownloadOptions - # Build preEventHorizon - gravity_ConsolidateDownloadedBlocklists - gravity_SortAndFilterConsolidatedList -fi +gravity_DownloadBlocklists # Create local.list gravity_generateLocalList From 525ec8cd01cfc069f4c670ac6e9c0aef0a17c02c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 7 Sep 2019 08:44:03 +0200 Subject: [PATCH 205/366] Signal to Firefox that the local network is unsuitable for DNS-over-HTTPS Signed-off-by: DL6ER --- advanced/01-pihole.conf | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index cd74e186..8aa35fe1 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -41,3 +41,8 @@ log-facility=/var/log/pihole.log local-ttl=2 log-async + +# Signal to Firefox that the local network is unsuitable for DNS-over-HTTPS +# This follows https://support.mozilla.org/en-US/kb/configuring-networks-disable-dns-over-https +# (sourced 7th September 2019) +server=/use-application-dns.net/ From ffc91a6c814be6badbc04e3fdc42c82fa0dbdb7e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 7 Sep 2019 11:17:53 +0200 Subject: [PATCH 206/366] Update view vw_gravity to only return domains from enabled adlists. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 3 ++- .../database_migration/gravity/3_to_4.sql | 25 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 advanced/Scripts/database_migration/gravity/3_to_4.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 7d59a6a0..40fa9655 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -40,7 +40,8 @@ upgrade_gravityDB(){ version=3 fi if [[ "$version" == "3" ]]; then - # This migration script upgrades ... + # This migration script upgrades the gravity and adlist views + # implementing necessary changes for per-client blocking sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/3_to_4.sql" version=3 fi diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql new file mode 100644 index 00000000..4a2f9925 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql @@ -0,0 +1,25 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +DROP TABLE gravity; +CREATE TABLE gravity +( + domain TEXT NOT NULL, + adlist_id INTEGER NOT NULL REFERENCES adlist (id), + PRIMARY KEY(domain, adlist_id) +); + +DROP VIEW vw_gravity; +CREATE VIEW vw_gravity AS SELECT domain, gravity.adlist_id + FROM gravity + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id + LEFT JOIN adlist ON adlist.id = gravity.adlist_id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1); + +UPDATE info SET value = 4 WHERE property = 'version'; + +COMMIT; From ff08add7c0a42536d8901af8e325d7ffe861d887 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 7 Sep 2019 13:01:36 +0200 Subject: [PATCH 207/366] Update vw_whitelist and vw_blacklist to return group_id alongside domain so we can filter if the current client wants to get this domain blocked or not. Signed-off-by: DL6ER --- .../database_migration/gravity/3_to_4.sql | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql index 4a2f9925..5bb96a47 100644 --- a/advanced/Scripts/database_migration/gravity/3_to_4.sql +++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql @@ -13,13 +13,35 @@ CREATE TABLE gravity ); DROP VIEW vw_gravity; -CREATE VIEW vw_gravity AS SELECT domain, gravity.adlist_id +CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id FROM gravity LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id LEFT JOIN adlist ON adlist.id = gravity.adlist_id LEFT JOIN "group" ON "group".id = adlist_by_group.group_id WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1); +DROP VIEW vw_whitelist; +CREATE VIEW vw_whitelist AS SELECT domain, whitelist_by_group.group_id AS group_id + FROM whitelist + LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id + LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id + WHERE whitelist.enabled = 1 AND (whitelist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY whitelist.id; + +DROP VIEW vw_blacklist; +CREATE VIEW vw_blacklist AS SELECT domain, blacklist_by_group.group_id AS group_id + FROM blacklist + LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id + LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id + WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY blacklist.id; + +CREATE TABLE client +( + ip TEXT NOL NULL PRIMARY KEY, + "groups" TEXT NOT NULL +); + UPDATE info SET value = 4 WHERE property = 'version'; COMMIT; From 1f36ec48e3af54d849a191f48bdb6559158ce5f8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 7 Sep 2019 23:11:20 +0200 Subject: [PATCH 208/366] Add use-application-dns.net = NXDOMAIN in ProcessDNSSettings rather than in the template so we can ensure that it will survive config-renewals. Signed-off-by: DL6ER --- advanced/01-pihole.conf | 5 ----- advanced/Scripts/webpage.sh | 5 +++++ 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index 8aa35fe1..cd74e186 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -41,8 +41,3 @@ log-facility=/var/log/pihole.log local-ttl=2 log-async - -# Signal to Firefox that the local network is unsuitable for DNS-over-HTTPS -# This follows https://support.mozilla.org/en-US/kb/configuring-networks-disable-dns-over-https -# (sourced 7th September 2019) -server=/use-application-dns.net/ diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 39299960..356c20ef 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -212,6 +212,11 @@ trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC68345710423 add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_DOMAIN}/${CONDITIONAL_FORWARDING_IP}" add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_REVERSE}/${CONDITIONAL_FORWARDING_IP}" fi + + # Prevent Firefox from automatically switching over to DNS-over-HTTPS + # This follows https://support.mozilla.org/en-US/kb/configuring-networks-disable-dns-over-https + # (sourced 7th September 2019) + add_dnsmasq_setting "server=/use-application-dns.net/" } SetDNSServers() { From 7b484319176d5058ff4e35242348ee8760238c31 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Sep 2019 00:03:57 +0200 Subject: [PATCH 209/366] Add client_by_group table like we have for the other lists. It stores associations between individual clients and list groups. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity/3_to_4.sql | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql index 5bb96a47..e1060da4 100644 --- a/advanced/Scripts/database_migration/gravity/3_to_4.sql +++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql @@ -38,8 +38,15 @@ CREATE VIEW vw_blacklist AS SELECT domain, blacklist_by_group.group_id AS group_ CREATE TABLE client ( - ip TEXT NOL NULL PRIMARY KEY, - "groups" TEXT NOT NULL + id INTEGER PRIMARY KEY AUTOINCREMENT, + ip TEXT NOL NULL UNIQUE +); + +CREATE TABLE client_by_group +( + client_id INTEGER NOT NULL REFERENCES client (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (client_id, group_id) ); UPDATE info SET value = 4 WHERE property = 'version'; From f582344b9ac6698a5e500b20739994a6fed27cab Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 17 Sep 2019 21:59:48 +0200 Subject: [PATCH 210/366] "No default index.lighttpd.html file found... not backing up" is not an error. Signed-off-by: DL6ER --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 25c66ab7..e9684254 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1692,7 +1692,7 @@ installPiholeWeb() { # Otherwise, else # don't do anything - printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" + printf "%b %b %s\\n" "${OVER}" "${INFO}" "${str}" printf " No default index.lighttpd.html file found... not backing up\\n" fi From 9a6deb5a1a4c1d811c5a906d107e06d6973e7fcb Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Tue, 17 Sep 2019 21:16:49 +0100 Subject: [PATCH 211/366] Fix tests Signed-off-by: Adam Warner --- test/test_automated_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_automated_install.py b/test/test_automated_install.py index cce11857..e8a4dede 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -338,7 +338,7 @@ def test_installPiholeWeb_fresh_install_no_errors(Pihole): expected_stdout = tick_box + (' Creating directory for blocking page, ' 'and copying files') assert expected_stdout in installWeb.stdout - expected_stdout = cross_box + ' Backing up index.lighttpd.html' + expected_stdout = info_box + ' Backing up index.lighttpd.html' assert expected_stdout in installWeb.stdout expected_stdout = ('No default index.lighttpd.html file found... ' 'not backing up') From a27c7b13985eed530a1fdf57f3cb5ef91c815aa2 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 18 Sep 2019 20:58:44 +0200 Subject: [PATCH 212/366] regex white- and blacklist views need to be re-created as well as we need the ID for storing internally whether or not we try to match a given regex for a specific client. Signed-off-by: DL6ER --- .../database_migration/gravity/3_to_4.sql | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql index e1060da4..182d24a1 100644 --- a/advanced/Scripts/database_migration/gravity/3_to_4.sql +++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql @@ -21,7 +21,7 @@ CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1); DROP VIEW vw_whitelist; -CREATE VIEW vw_whitelist AS SELECT domain, whitelist_by_group.group_id AS group_id +CREATE VIEW vw_whitelist AS SELECT domain, whitelist.id AS id, whitelist_by_group.group_id AS group_id FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id @@ -29,13 +29,29 @@ CREATE VIEW vw_whitelist AS SELECT domain, whitelist_by_group.group_id AS group_ ORDER BY whitelist.id; DROP VIEW vw_blacklist; -CREATE VIEW vw_blacklist AS SELECT domain, blacklist_by_group.group_id AS group_id +CREATE VIEW vw_blacklist AS SELECT domain, blacklist.id AS id, blacklist_by_group.group_id AS group_id FROM blacklist LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1) ORDER BY blacklist.id; +DROP VIEW vw_regex_whitelist; +CREATE VIEW vw_regex_whitelist AS SELECT DISTINCT domain, regex_whitelist.id AS id, regex_whitelist_by_group.group_id AS group_id + FROM regex_whitelist + LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_whitelist_id = regex_whitelist.id + LEFT JOIN "group" ON "group".id = regex_whitelist_by_group.group_id + WHERE regex_whitelist.enabled = 1 AND (regex_whitelist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY regex_whitelist.id; + +DROP VIEW vw_regex_blacklist; +CREATE VIEW vw_regex_blacklist AS SELECT DISTINCT domain, regex_blacklist.id AS id, regex_blacklist_by_group.group_id AS group_id + FROM regex_blacklist + LEFT JOIN regex_blacklist_by_group ON regex_blacklist_by_group.regex_blacklist_id = regex_blacklist.id + LEFT JOIN "group" ON "group".id = regex_blacklist_by_group.group_id + WHERE regex_blacklist.enabled = 1 AND (regex_blacklist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY regex_blacklist.id; + CREATE TABLE client ( id INTEGER PRIMARY KEY AUTOINCREMENT, From 3cb4f6d9d4980c66a025f0e038acee9c3f3a20c4 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 26 Sep 2019 13:50:54 +0200 Subject: [PATCH 213/366] We cannot create vw_gravity before having created vw_whitelist as the former depends onthe later. This commit changes the order in which the tables are created. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index 09d581f0..d0c744f4 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -89,10 +89,6 @@ CREATE TABLE info INSERT INTO info VALUES("version","1"); -CREATE VIEW vw_gravity AS SELECT domain - FROM gravity - WHERE domain NOT IN (SELECT domain from vw_whitelist); - CREATE VIEW vw_whitelist AS SELECT DISTINCT domain FROM whitelist LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id @@ -141,3 +137,6 @@ CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; END; +CREATE VIEW vw_gravity AS SELECT domain + FROM gravity + WHERE domain NOT IN (SELECT domain from vw_whitelist); From 2e0370367cacd89a76f4a75954f70dc1f070ff07 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 26 Sep 2019 14:02:20 +0200 Subject: [PATCH 214/366] Print when we upgrade gravity database version. This will make possibly failed upgrades easier to debug. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index a82d0d51..0fe90d8a 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -22,6 +22,7 @@ upgrade_gravityDB(){ if [[ "$version" == "1" ]]; then # This migration script upgrades the gravity.db file by # adding the domain_audit table + echo -e " ${INFO} Upgrading gravity database from version 1 to 2" sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" version=2 @@ -36,6 +37,7 @@ upgrade_gravityDB(){ # This migration script upgrades the gravity.db file by # renaming the regex table to regex_blacklist, and # creating a new regex_whitelist table + corresponding linking table and views + echo -e " ${INFO} Upgrading gravity database from version 2 to 3" sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" version=3 fi From 149fb0c2160919f0cb1be0308c4bda7cf4fc6f23 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 27 Sep 2019 23:02:29 +0200 Subject: [PATCH 215/366] Do not install a blank regex file. Signed-off-by: DL6ER --- automated install/basic-install.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 19a6d919..cc78afbf 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -70,7 +70,6 @@ PI_HOLE_BLOCKPAGE_DIR="${webroot}/pihole" useUpdateVars=false adlistFile="/etc/pihole/adlists.list" -regexFile="/etc/pihole/regex.list" # Pi-hole needs an IP address; to begin, these variables are empty since we don't know what the IP is until # this script can run IPV4_ADDRESS="" @@ -1385,11 +1384,6 @@ installConfigs() { return 1 fi fi - # Install an empty regex file - if [[ ! -f "${regexFile}" ]]; then - # Let PHP edit the regex file, if installed - install -o pihole -g "${LIGHTTPD_GROUP:-pihole}" -m 664 /dev/null "${regexFile}" - fi # If the user chose to install the dashboard, if [[ "${INSTALL_WEB_SERVER}" == true ]]; then # and if the Web server conf directory does not exist, From d883854aadb5f56075a6fe51cbe3ec59daf6751c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 3 Oct 2019 12:12:32 +0200 Subject: [PATCH 216/366] Use constant for long path. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 7eba43c0..773898b0 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -10,6 +10,8 @@ # This file is copyright under the latest version of the EUPL. # Please see LICENSE file for your rights under this license. +readonly scriptPath="/etc/.pihole/advanced/Scripts/database_migration/gravity" + upgrade_gravityDB(){ local database piholeDir auditFile version database="${1}" @@ -23,7 +25,7 @@ upgrade_gravityDB(){ # This migration script upgrades the gravity.db file by # adding the domain_audit table echo -e " ${INFO} Upgrading gravity database from version 1 to 2" - sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" + sqlite3 "${database}" < "${scriptPath}/1_to_2.sql" version=2 # Store audit domains in database table @@ -38,13 +40,13 @@ upgrade_gravityDB(){ # renaming the regex table to regex_blacklist, and # creating a new regex_whitelist table + corresponding linking table and views echo -e " ${INFO} Upgrading gravity database from version 2 to 3" - sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" + sqlite3 "${database}" < "${scriptPath}/2_to_3.sql" version=3 fi if [[ "$version" == "3" ]]; then # This migration script upgrades the gravity and adlist views # implementing necessary changes for per-client blocking - sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/3_to_4.sql" + sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" version=3 fi } From 4f21f677758d62c20b4a9f9165e043c2a50713d2 Mon Sep 17 00:00:00 2001 From: John Crisp Date: Mon, 16 Sep 2019 14:46:09 +0200 Subject: [PATCH 217/366] Update pihole Fix spelling typos --- pihole | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pihole b/pihole index 971595d5..4a358443 100755 --- a/pihole +++ b/pihole @@ -11,8 +11,8 @@ readonly PI_HOLE_SCRIPT_DIR="/opt/pihole" -# setupVars and PI_HOLE_BIN_DIR are not readonly here because in some funcitons (checkout), -# it might get set again when the installer is sourced. This causes an +# setupVars and PI_HOLE_BIN_DIR are not readonly here because in some functions (checkout), +# they might get set again when the installer is sourced. This causes an # error due to modifying a readonly variable. setupVars="/etc/pihole/setupVars.conf" PI_HOLE_BIN_DIR="/usr/local/bin" From fc0899b2ad75cf42f85668fddd4646b651a9429a Mon Sep 17 00:00:00 2001 From: bcambl Date: Sun, 13 Oct 2019 14:35:38 -0600 Subject: [PATCH 218/366] fix fedora dependency check/install stdout Signed-off-by: bcambl --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index cc78afbf..11b78fcd 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1645,9 +1645,9 @@ install_dependent_packages() { for i in "$@"; do printf " %b Checking for %s..." "${INFO}" "${i}" if "${PKG_MANAGER}" -q list installed "${i}" &> /dev/null; then - printf "%b %b Checking for %s" "${OVER}" "${TICK}" "${i}" + printf "%b %b Checking for %s\\n" "${OVER}" "${TICK}" "${i}" else - printf "%b %b Checking for %s (will be installed)" "${OVER}" "${INFO}" "${i}" + echo -e "${OVER} ${INFO} Checking for $i (will be installed)" installArray+=("${i}") fi done From 81ca78e7f40e395a38502ae55f84969796e65b13 Mon Sep 17 00:00:00 2001 From: bcambl Date: Mon, 14 Oct 2019 12:14:45 -0600 Subject: [PATCH 219/366] exit installer if SELinux is enforcing The Pi-hole project does not ship a custom SELinux policy as the required policy would lower the overall system security. Users who require SELinux to be enforcing are encouraged to create an custom policy on a case-by-case basis. Signed-off-by: bcambl --- automated install/basic-install.sh | 50 +++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 14 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index cc78afbf..091c543a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1959,20 +1959,42 @@ installPihole() { # SELinux checkSelinux() { - # If the getenforce command exists, - if is_command getenforce ; then - # Store the current mode in a variable - enforceMode=$(getenforce) - printf "\\n %b SELinux mode detected: %s\\n" "${INFO}" "${enforceMode}" - - # If it's enforcing, - if [[ "${enforceMode}" == "Enforcing" ]]; then - # Explain Pi-hole does not support it yet - whiptail --defaultno --title "SELinux Enforcing Detected" --yesno "SELinux is being ENFORCED on your system! \\n\\nPi-hole currently does not support SELinux, but you may still continue with the installation.\\n\\nNote: Web Admin will not be fully functional unless you set your policies correctly\\n\\nContinue installing Pi-hole?" "${r}" "${c}" || \ - { printf "\\n %bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } - printf " %b Continuing installation with SELinux Enforcing\\n" "${INFO}" - printf " %b Please refer to official SELinux documentation to create a custom policy\\n" "${INFO}" - fi + local DEFAULT_SELINUX + local CURRENT_SELINUX + local SELINUX_ENFORCING=0 + # Check if a SELinux configuration file exists + if [[ -f /etc/selinux/config ]]; then + # If a SELinux configuration file was found, check the default SELinux mode. + DEFAULT_SELINUX=$(awk -F= '/^SELINUX=/ {print $2}' /etc/selinux/config) + case "${DEFAULT_SELINUX,,}" in + enforcing) + echo -e "${CROSS} ${COL_RED}Default SELinux: $DEFAULT_SELINUX${COL_NC}" + SELINUX_ENFORCING=1 + ;; + *) # 'permissive' and 'disabled' + echo -e "${TICK} ${COL_GREEN}Default SELinux: $DEFAULT_SELINUX${COL_NC}"; + ;; + esac + # Check the current state of SELinux + CURRENT_SELINUX=$(getenforce) + case "${CURRENT_SELINUX,,}" in + enforcing) + echo -e "${CROSS} ${COL_RED}Current SELinux: $CURRENT_SELINUX${COL_NC}" + SELINUX_ENFORCING=1 + ;; + *) # 'permissive' and 'disabled' + echo -e "${TICK} ${COL_GREEN}Current SELinux: $CURRENT_SELINUX${COL_NC}"; + ;; + esac + else + echo -e "${INFO} ${COL_GREEN}SELinux not detected${COL_NC}"; + fi + # Exit the installer if any SELinux checks toggled the flag + if [[ "${SELINUX_ENFORCING}" -eq 1 ]] && [[ -z "${PIHOLE_SELINUX}" ]]; then + echo -e "Pi-hole does not provide an SELinux policy as the required changes modify the security of your system." + echo -e "Please refer to https://wiki.centos.org/HowTos/SELinux if SELinux is required for your deployment." + printf "\\n%bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; + exit 1; fi } From cd9b1fcb8c55c11bb0ff8220f0a1de4b29da6b42 Mon Sep 17 00:00:00 2001 From: bcambl Date: Mon, 14 Oct 2019 12:26:39 -0600 Subject: [PATCH 220/366] update tests for SELinux changes Signed-off-by: bcambl --- test/test_automated_install.py | 65 ++---------------------------- test/test_centos_fedora_support.py | 62 ++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 61 deletions(-) diff --git a/test/test_automated_install.py b/test/test_automated_install.py index e8a4dede..aeaac3dc 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -254,73 +254,16 @@ def test_configureFirewall_IPTables_enabled_not_exist_no_errors(Pihole): assert len(re.findall(r'tcp --dport 4711:4720', firewall_calls)) == 2 -def test_selinux_enforcing_default_exit(Pihole): +def test_selinux_not_detected(Pihole): ''' - confirms installer prompts to exit when SELinux is Enforcing by default + confirms installer continues when SELinux configuration file does not exist ''' - # getenforce returns the running state of SELinux - mock_command('getenforce', {'*': ('Enforcing', '0')}, Pihole) - # Whiptail dialog returns Cancel for user prompt - mock_command('whiptail', {'*': ('', '1')}, Pihole) - check_selinux = Pihole.run(''' - source /opt/pihole/basic-install.sh - checkSelinux - ''') - expected_stdout = info_box + ' SELinux mode detected: Enforcing' - assert expected_stdout in check_selinux.stdout - expected_stdout = 'SELinux Enforcing detected, exiting installer' - assert expected_stdout in check_selinux.stdout - assert check_selinux.rc == 1 - - -def test_selinux_enforcing_continue(Pihole): - ''' - confirms installer prompts to continue with custom policy warning - ''' - # getenforce returns the running state of SELinux - mock_command('getenforce', {'*': ('Enforcing', '0')}, Pihole) - # Whiptail dialog returns Continue for user prompt - mock_command('whiptail', {'*': ('', '0')}, Pihole) - check_selinux = Pihole.run(''' - source /opt/pihole/basic-install.sh - checkSelinux - ''') - expected_stdout = info_box + ' SELinux mode detected: Enforcing' - assert expected_stdout in check_selinux.stdout - expected_stdout = info_box + (' Continuing installation with SELinux ' - 'Enforcing') - assert expected_stdout in check_selinux.stdout - expected_stdout = info_box + (' Please refer to official SELinux ' - 'documentation to create a custom policy') - assert expected_stdout in check_selinux.stdout - assert check_selinux.rc == 0 - - -def test_selinux_permissive(Pihole): - ''' - confirms installer continues when SELinux is Permissive - ''' - # getenforce returns the running state of SELinux - mock_command('getenforce', {'*': ('Permissive', '0')}, Pihole) - check_selinux = Pihole.run(''' - source /opt/pihole/basic-install.sh - checkSelinux - ''') - expected_stdout = info_box + ' SELinux mode detected: Permissive' - assert expected_stdout in check_selinux.stdout - assert check_selinux.rc == 0 - - -def test_selinux_disabled(Pihole): - ''' - confirms installer continues when SELinux is Disabled - ''' - mock_command('getenforce', {'*': ('Disabled', '0')}, Pihole) check_selinux = Pihole.run(''' + rm -f /etc/selinux/config source /opt/pihole/basic-install.sh checkSelinux ''') - expected_stdout = info_box + ' SELinux mode detected: Disabled' + expected_stdout = info_box + ' SELinux not detected' assert expected_stdout in check_selinux.stdout assert check_selinux.rc == 0 diff --git a/test/test_centos_fedora_support.py b/test/test_centos_fedora_support.py index df53d73f..78910b99 100644 --- a/test/test_centos_fedora_support.py +++ b/test/test_centos_fedora_support.py @@ -7,6 +7,68 @@ from conftest import ( mock_command_2, ) +def mock_selinux_config(state, Pihole): + ''' + Creates a mock SELinux config file with expected content + ''' + # validate state string + valid_states = ['enforcing', 'permissive', 'disabled'] + assert state in valid_states + # getenforce returns the running state of SELinux + mock_command('getenforce', {'*': (state.capitalize(), '0')}, Pihole) + # create mock configuration with desired content + Pihole.run(''' + mkdir /etc/selinux + echo "SELINUX={state}" > /etc/selinux/config + '''.format(state=state.lower())) + + +@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ]) +def test_selinux_enforcing_exit(Pihole): + ''' + confirms installer prompts to exit when SELinux is Enforcing by default + ''' + mock_selinux_config("enforcing", Pihole) + check_selinux = Pihole.run(''' + source /opt/pihole/basic-install.sh + checkSelinux + ''') + expected_stdout = cross_box + ' Current SELinux: Enforcing' + assert expected_stdout in check_selinux.stdout + expected_stdout = 'SELinux Enforcing detected, exiting installer' + assert expected_stdout in check_selinux.stdout + assert check_selinux.rc == 1 + + +@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ]) +def test_selinux_permissive(Pihole): + ''' + confirms installer continues when SELinux is Permissive + ''' + mock_selinux_config("permissive", Pihole) + check_selinux = Pihole.run(''' + source /opt/pihole/basic-install.sh + checkSelinux + ''') + expected_stdout = tick_box + ' Current SELinux: Permissive' + assert expected_stdout in check_selinux.stdout + assert check_selinux.rc == 0 + + +@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ]) +def test_selinux_disabled(Pihole): + ''' + confirms installer continues when SELinux is Disabled + ''' + mock_selinux_config("disabled", Pihole) + check_selinux = Pihole.run(''' + source /opt/pihole/basic-install.sh + checkSelinux + ''') + expected_stdout = tick_box + ' Current SELinux: Disabled' + assert expected_stdout in check_selinux.stdout + assert check_selinux.rc == 0 + @pytest.mark.parametrize("tag", [('fedora'), ]) def test_epel_and_remi_not_installed_fedora(Pihole): From cf2b02150207288c268cc4660f288d146ddc78b4 Mon Sep 17 00:00:00 2001 From: bcambl Date: Mon, 14 Oct 2019 13:29:43 -0600 Subject: [PATCH 221/366] linting: E302 expected 2 blank lines, found 1 Signed-off-by: bcambl --- test/test_centos_fedora_support.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/test_centos_fedora_support.py b/test/test_centos_fedora_support.py index 78910b99..aee16212 100644 --- a/test/test_centos_fedora_support.py +++ b/test/test_centos_fedora_support.py @@ -7,6 +7,7 @@ from conftest import ( mock_command_2, ) + def mock_selinux_config(state, Pihole): ''' Creates a mock SELinux config file with expected content From 5bac1ad58b2b2b179f308da2e997f69467d9037f Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 14 Oct 2019 22:59:58 +0100 Subject: [PATCH 222/366] backend changes to allow comment when adding new adlist Signed-off-by: Adam Warner --- advanced/Scripts/webpage.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 411cc1f6..ce404f31 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -404,13 +404,15 @@ SetWebUILayout() { CustomizeAdLists() { local address address="${args[3]}" + local comment + comment="${args[4]}" if [[ "${args[2]}" == "enable" ]]; then sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" elif [[ "${args[2]}" == "disable" ]]; then sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" elif [[ "${args[2]}" == "add" ]]; then - sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address) VALUES ('${address}')" + sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" elif [[ "${args[2]}" == "del" ]]; then sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" else From a86f5781391727a37f17081349b514ff53e2ab95 Mon Sep 17 00:00:00 2001 From: bcambl Date: Mon, 14 Oct 2019 20:06:23 -0600 Subject: [PATCH 223/366] replace echo with printf in checkSelinux() Signed-off-by: bcambl --- automated install/basic-install.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 091c543a..0b00d968 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1968,22 +1968,22 @@ checkSelinux() { DEFAULT_SELINUX=$(awk -F= '/^SELINUX=/ {print $2}' /etc/selinux/config) case "${DEFAULT_SELINUX,,}" in enforcing) - echo -e "${CROSS} ${COL_RED}Default SELinux: $DEFAULT_SELINUX${COL_NC}" + printf "%b %bDefault SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${DEFAULT_SELINUX}" "${COL_NC}" SELINUX_ENFORCING=1 ;; *) # 'permissive' and 'disabled' - echo -e "${TICK} ${COL_GREEN}Default SELinux: $DEFAULT_SELINUX${COL_NC}"; + printf "%b %bDefault SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${DEFAULT_SELINUX}" "${COL_NC}" ;; esac # Check the current state of SELinux CURRENT_SELINUX=$(getenforce) case "${CURRENT_SELINUX,,}" in enforcing) - echo -e "${CROSS} ${COL_RED}Current SELinux: $CURRENT_SELINUX${COL_NC}" + printf "%b %bCurrent SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${CURRENT_SELINUX}" "${COL_NC}" SELINUX_ENFORCING=1 ;; *) # 'permissive' and 'disabled' - echo -e "${TICK} ${COL_GREEN}Current SELinux: $CURRENT_SELINUX${COL_NC}"; + printf "%b %bCurrent SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${CURRENT_SELINUX}" "${COL_NC}" ;; esac else @@ -1991,8 +1991,8 @@ checkSelinux() { fi # Exit the installer if any SELinux checks toggled the flag if [[ "${SELINUX_ENFORCING}" -eq 1 ]] && [[ -z "${PIHOLE_SELINUX}" ]]; then - echo -e "Pi-hole does not provide an SELinux policy as the required changes modify the security of your system." - echo -e "Please refer to https://wiki.centos.org/HowTos/SELinux if SELinux is required for your deployment." + printf "Pi-hole does not provide an SELinux policy as the required changes modify the security of your system.\\n" + printf "Please refer to https://wiki.centos.org/HowTos/SELinux if SELinux is required for your deployment.\\n" printf "\\n%bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; fi From 612d408034cc5e80d6678ebd48e24767140d402d Mon Sep 17 00:00:00 2001 From: bcambl Date: Mon, 14 Oct 2019 20:16:40 -0600 Subject: [PATCH 224/366] replace echo with printf in install_dependent_packages() Signed-off-by: bcambl --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 11b78fcd..c887a6c6 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1628,7 +1628,7 @@ install_dependent_packages() { if dpkg-query -W -f='${Status}' "${i}" 2>/dev/null | grep "ok installed" &> /dev/null; then printf "%b %b Checking for %s\\n" "${OVER}" "${TICK}" "${i}" else - echo -e "${OVER} ${INFO} Checking for $i (will be installed)" + printf "%b %b Checking for %s (will be installed)\\n" "${OVER}" "${INFO}" "${i}" installArray+=("${i}") fi done @@ -1647,7 +1647,7 @@ install_dependent_packages() { if "${PKG_MANAGER}" -q list installed "${i}" &> /dev/null; then printf "%b %b Checking for %s\\n" "${OVER}" "${TICK}" "${i}" else - echo -e "${OVER} ${INFO} Checking for $i (will be installed)" + printf "%b %b Checking for %s (will be installed)\\n" "${OVER}" "${INFO}" "${i}" installArray+=("${i}") fi done From c6f9fe3af2b351b484ff14de426dde268d56441c Mon Sep 17 00:00:00 2001 From: John Krull Date: Tue, 15 Oct 2019 21:29:55 -0500 Subject: [PATCH 225/366] Fix spelling of the word "permitting" Signed-off-by: John Krull --- advanced/Scripts/webpage.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 411cc1f6..8aa3fa08 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -524,10 +524,10 @@ Interfaces: fi if [[ "${args[2]}" == "all" ]]; then - echo -e " ${INFO} Listening on all interfaces, permiting all origins. Please use a firewall!" + echo -e " ${INFO} Listening on all interfaces, permitting all origins. Please use a firewall!" change_setting "DNSMASQ_LISTENING" "all" elif [[ "${args[2]}" == "local" ]]; then - echo -e " ${INFO} Listening on all interfaces, permiting origins from one hop away (LAN)" + echo -e " ${INFO} Listening on all interfaces, permitting origins from one hop away (LAN)" change_setting "DNSMASQ_LISTENING" "local" else echo -e " ${INFO} Listening only on interface ${PIHOLE_INTERFACE}" From f9d16c2b1525a9dde136be5968583a56723b3a7d Mon Sep 17 00:00:00 2001 From: Pierre Ghiot Date: Sun, 27 Oct 2019 02:07:08 +0200 Subject: [PATCH 226/366] Update webpage.sh Signed-off-by: Mograine --- advanced/Scripts/webpage.sh | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 600a45a5..e990cc22 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -17,6 +17,7 @@ readonly FTLconf="/etc/pihole/pihole-FTL.conf" # 03 -> wildcards readonly dhcpstaticconfig="/etc/dnsmasq.d/04-pihole-static-dhcp.conf" readonly PI_HOLE_BIN_DIR="/usr/local/bin" +readonly dnscustomfile="/etc/pihole/custom.list" coltable="/opt/pihole/COL_TABLE" if [[ -f ${coltable} ]]; then @@ -564,6 +565,17 @@ SetPrivacyLevel() { fi } +AddCustomDNSAddress() { + ip="${args[2]}" + host="${args[3]}" + echo "${ip} ${host}" >> "${dnscustomfile}" +} + +RemoveCustomDNSAddress() { + host="${args[2]}" + sed -i "/.*${host}/d" "${dnscustomfile}" +} + main() { args=("$@") @@ -595,6 +607,8 @@ main() { "audit" ) addAudit "$@";; "clearaudit" ) clearAudit;; "-l" | "privacylevel" ) SetPrivacyLevel;; + "addcustomdns" ) AddCustomDNSAddress;; + "removecustomdns" ) RemoveCustomDNSAddress;; * ) helpFunc;; esac From bb8dbe9da5d703d8a15328ee33c0bda01959e5c4 Mon Sep 17 00:00:00 2001 From: Pierre Ghiot Date: Sun, 27 Oct 2019 02:09:29 +0200 Subject: [PATCH 227/366] Update 01-pihole.conf Signed-off-by: Mograine --- advanced/01-pihole.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index 38d2c0b5..5919034d 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -21,6 +21,7 @@ addn-hosts=/etc/pihole/gravity.list addn-hosts=/etc/pihole/black.list addn-hosts=/etc/pihole/local.list +addn-hosts=/etc/pihole/custom.list domain-needed From 193ff38ab3d6fd49a9912c5e07cd8e098267bf3a Mon Sep 17 00:00:00 2001 From: Mograine Date: Mon, 28 Oct 2019 13:21:05 +0100 Subject: [PATCH 228/366] Allow more precise deletion by passing ip as parameter Signed-off-by: Mograine --- advanced/Scripts/webpage.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index e990cc22..dc2c83af 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -572,8 +572,9 @@ AddCustomDNSAddress() { } RemoveCustomDNSAddress() { - host="${args[2]}" - sed -i "/.*${host}/d" "${dnscustomfile}" + ip="${args[2]}" + host="${args[3]}" + sed -i "/${ip} ${host}/d" "${dnscustomfile}" } main() { From 71903eb27f7270b0df6eaf45e2ae507983d36099 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 28 Oct 2019 22:35:01 +0000 Subject: [PATCH 229/366] Add in checks to reset cloned repo to the lastest available release Signed-off-by: Adam Warner --- automated install/basic-install.sh | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c887a6c6..47ecd125 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -408,6 +408,8 @@ make_repo() { # Set named variables for better readability local directory="${1}" local remoteRepo="${2}" + local curdir + # The message to display when this function is running str="Clone ${remoteRepo} into ${directory}" # Display the message and use the color table to preface the message with an "info" indicator @@ -421,10 +423,21 @@ make_repo() { git clone -q --depth 20 "${remoteRepo}" "${directory}" &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) chmod -R a+rX "${directory}" - + # Make sure we know what directory we are in so we can move back into it + curdir="${PWD}" + # Move into the directory that was passed as an argument + cd "${directory}" &> /dev/null || return 1 + # Check current branch. If it is master, then reset to the latest availible tag. + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + curBranch = $(git rev-parse --abbrev-ref HEAD) + if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. + git reset --hard $(git describe --abbrev=0) || return $? + fi # Show a colored message showing it's status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" - # Always return 0? Not sure this is correct + + # Move back into the original directory + cd "${curdir}" &> /dev/null || return 1 return 0 } @@ -436,6 +449,7 @@ update_repo() { # This helps prevent the wrong value from being assigned if you were to set the variable as a GLOBAL one local directory="${1}" local curdir + local curBranch # A variable to store the message we want to display; # Again, it's useful to store these in variables in case we need to reuse or change the message; @@ -453,6 +467,12 @@ update_repo() { git clean --quiet --force -d || true # Okay for already clean directory # Pull the latest commits git pull --quiet &> /dev/null || return $? + # Check current branch. If it is master, then reset to the latest availible tag. + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + curBranch = $(git rev-parse --abbrev-ref HEAD) + if [[ "${curBranch}" == "master" ]]; then + git reset --hard $(git describe --abbrev=0) || return $? + fi # Show a completion message printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) From 3fbb0ac8dde14b8edc1982ae3a2a021f3cf68477 Mon Sep 17 00:00:00 2001 From: Adam Hill Date: Tue, 29 Oct 2019 22:26:46 -0500 Subject: [PATCH 230/366] Adding docker+arm detection & FTL download Signed-off-by: Adam Hill --- automated install/basic-install.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c887a6c6..bbc8b2ac 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2305,9 +2305,15 @@ get_binary_name() { binary="pihole-FTL-arm-linux-gnueabi" fi else - printf "%b %b Detected ARM architecture\\n" "${OVER}" "${TICK}" - # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabi" + if [[ -f "/.dockerenv" ]]; then + printf "%b %b Detected ARM architecture in docker\\n" "${OVER}" "${TICK}" + # set the binary to be used + binary="pihole-FTL-armel-native" + else + printf "%b %b Detected ARM architecture\\n" "${OVER}" "${TICK}" + # set the binary to be used + binary="pihole-FTL-arm-linux-gnueabi" + fi fi elif [[ "${machine}" == "x86_64" ]]; then # This gives the architecture of packages dpkg installs (for example, "i386") From 476975540a6286eef126c14654765dcb856216d2 Mon Sep 17 00:00:00 2001 From: chrunchyjesus Date: Tue, 5 Nov 2019 22:11:47 +0100 Subject: [PATCH 231/366] make some shebangs comply to posix standard --- advanced/Scripts/wildcard_regex_converter.sh | 2 +- advanced/Templates/pihole-FTL.service | 2 +- pihole | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/advanced/Scripts/wildcard_regex_converter.sh b/advanced/Scripts/wildcard_regex_converter.sh index 8c9578a3..b4b6b4a1 100644 --- a/advanced/Scripts/wildcard_regex_converter.sh +++ b/advanced/Scripts/wildcard_regex_converter.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Pi-hole: A black hole for Internet advertisements # (c) 2017 Pi-hole, LLC (https://pi-hole.net) # Network-wide ad blocking via your own hardware. diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service index 8a4c7ce6..5dbf080e 100644 --- a/advanced/Templates/pihole-FTL.service +++ b/advanced/Templates/pihole-FTL.service @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash ### BEGIN INIT INFO # Provides: pihole-FTL # Required-Start: $remote_fs $syslog diff --git a/pihole b/pihole index 4a358443..f0195843 100755 --- a/pihole +++ b/pihole @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Pi-hole: A black hole for Internet advertisements # (c) 2017 Pi-hole, LLC (https://pi-hole.net) From ea67c828cde17764242fdf846eb374d5943bbd12 Mon Sep 17 00:00:00 2001 From: MichaIng Date: Thu, 7 Nov 2019 13:59:44 +0100 Subject: [PATCH 232/366] Minor installer output enhancements + Print restart hint after setting IPv4 address on a separate line with [i] prefix to not break text alignment + Print final upstream DNS choice as a single printf call and by this fix missing info and linebreak on "Custom" choices. + Minor if/then/else code alignment Signed-off-by: MichaIng --- automated install/basic-install.sh | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c887a6c6..c2f8ced5 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -838,7 +838,8 @@ setDHCPCD() { # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system - printf " %b Set IP address to %s \\n You may need to restart after the install is complete\\n" "${TICK}" "${IPV4_ADDRESS%/*}" + printf " %b Set IP address to %s\\n" "${TICK}" "${IPV4_ADDRESS%/*}" + printf " %b You may need to restart after the install is complete\\n" "${INFO}" fi } @@ -984,8 +985,6 @@ setDNS() { # exit if Cancel is selected { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; } - # Display the selection - printf " %b Using " "${INFO}" # Depending on the user's choice, set the GLOBAl variables to the IP of the respective provider if [[ "${DNSchoices}" == "Custom" ]] then @@ -1037,14 +1036,14 @@ setDNS() { if [[ "${PIHOLE_DNS_2}" == "${strInvalid}" ]]; then PIHOLE_DNS_2="" fi - # Since the settings will not work, stay in the loop - DNSSettingsCorrect=False + # Since the settings will not work, stay in the loop + DNSSettingsCorrect=False # Otherwise, else # Show the settings if (whiptail --backtitle "Specify Upstream DNS Provider(s)" --title "Upstream DNS Provider(s)" --yesno "Are these settings correct?\\n DNS Server 1: $PIHOLE_DNS_1\\n DNS Server 2: ${PIHOLE_DNS_2}" "${r}" "${c}"); then - # and break from the loop since the servers are valid - DNSSettingsCorrect=True + # and break from the loop since the servers are valid + DNSSettingsCorrect=True # Otherwise, else # If the settings are wrong, the loop continues @@ -1052,7 +1051,7 @@ setDNS() { fi fi done - else + else # Save the old Internal Field Separator in a variable OIFS=$IFS # and set the new one to newline @@ -1062,7 +1061,6 @@ setDNS() { DNSName="$(cut -d';' -f1 <<< "${DNSServer}")" if [[ "${DNSchoices}" == "${DNSName}" ]] then - printf "%s\\n" "${DNSName}" PIHOLE_DNS_1="$(cut -d';' -f2 <<< "${DNSServer}")" PIHOLE_DNS_2="$(cut -d';' -f3 <<< "${DNSServer}")" break @@ -1071,6 +1069,9 @@ setDNS() { # Restore the IFS to what it was IFS=${OIFS} fi + + # Display final selection + printf " %b Using upstream DNS: %s %s\\n" "${INFO}" "${PIHOLE_DNS_1}" "${PIHOLE_DNS_2}" } # Allow the user to enable/disable logging From 62c00ae1d83ecc7b29e1b2f1f31ba75e9f563b51 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Fri, 8 Nov 2019 19:11:55 +0000 Subject: [PATCH 233/366] pushd/popd instead of juggling with a variable Signed-off-by: Adam Warner --- automated install/basic-install.sh | 37 +++++++++++------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 47ecd125..d805e927 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -378,17 +378,13 @@ fi is_repo() { # Use a named, local variable instead of the vague $1, which is the first argument passed to this function # These local variables should always be lowercase - local directory="${1}" - # A local variable for the current directory - local curdir + local directory="${1}" # A variable to store the return code - local rc - # Assign the current directory variable by using pwd - curdir="${PWD}" + local rc # If the first argument passed to this function is a directory, if [[ -d "${directory}" ]]; then # move into the directory - cd "${directory}" + pushd "${directory}" &> /dev/null || return 1 # Use git to check if the directory is a repo # git -C is not used here to support git versions older than 1.8.4 git status --short &> /dev/null || rc=$? @@ -398,7 +394,7 @@ is_repo() { rc=1 fi # Move back into the directory the user started in - cd "${curdir}" + popd &> /dev/null || return 1 # Return the code; if one is not set, return 0 return "${rc:-0}" } @@ -408,7 +404,6 @@ make_repo() { # Set named variables for better readability local directory="${1}" local remoteRepo="${2}" - local curdir # The message to display when this function is running str="Clone ${remoteRepo} into ${directory}" @@ -422,11 +417,9 @@ make_repo() { # Clone the repo and return the return code from this command git clone -q --depth 20 "${remoteRepo}" "${directory}" &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+rX "${directory}" - # Make sure we know what directory we are in so we can move back into it - curdir="${PWD}" + chmod -R a+rX "${directory}" # Move into the directory that was passed as an argument - cd "${directory}" &> /dev/null || return 1 + pushd "${directory}" &> /dev/null || return 1 # Check current branch. If it is master, then reset to the latest availible tag. # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch = $(git rev-parse --abbrev-ref HEAD) @@ -437,7 +430,7 @@ make_repo() { printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Move back into the original directory - cd "${curdir}" &> /dev/null || return 1 + popd &> /dev/null || return 1 return 0 } @@ -448,18 +441,14 @@ update_repo() { # but since they are local, their scope does not go beyond this function # This helps prevent the wrong value from being assigned if you were to set the variable as a GLOBAL one local directory="${1}" - local curdir local curBranch # A variable to store the message we want to display; # Again, it's useful to store these in variables in case we need to reuse or change the message; # we only need to make one change here local str="Update repo in ${1}" - - # Make sure we know what directory we are in so we can move back into it - curdir="${PWD}" - # Move into the directory that was passed as an argument - cd "${directory}" &> /dev/null || return 1 + # Move into the directory that was passed as an argument + pushd "${directory}" &> /dev/null || return 1 # Let the user know what's happening printf " %b %s..." "${INFO}" "${str}" # Stash any local commits as they conflict with our working code @@ -478,7 +467,7 @@ update_repo() { # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) chmod -R a+rX "${directory}" # Move back into the original directory - cd "${curdir}" &> /dev/null || return 1 + popd &> /dev/null || return 1 return 0 } @@ -517,7 +506,7 @@ resetRepo() { # Use named variables for arguments local directory="${1}" # Move into the directory - cd "${directory}" &> /dev/null || return 1 + pushd "${directory}" &> /dev/null || return 1 # Store the message in a variable str="Resetting repository within ${1}..." # Show the message @@ -528,7 +517,9 @@ resetRepo() { chmod -R a+rX "${directory}" # And show the status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" - # Returning success anyway? + # Return to where we came from + popd &> /dev/null || return 1 + # Returning success anyway? return 0 } From c8b9e42649d296366adce0da35e08a4e5e629b97 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Fri, 8 Nov 2019 19:18:35 +0000 Subject: [PATCH 234/366] Please Codefactor. Signed-off-by: Adam Warner --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index d805e927..d2295f96 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -422,7 +422,7 @@ make_repo() { pushd "${directory}" &> /dev/null || return 1 # Check current branch. If it is master, then reset to the latest availible tag. # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) - curBranch = $(git rev-parse --abbrev-ref HEAD) + curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. git reset --hard $(git describe --abbrev=0) || return $? fi @@ -458,7 +458,7 @@ update_repo() { git pull --quiet &> /dev/null || return $? # Check current branch. If it is master, then reset to the latest availible tag. # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) - curBranch = $(git rev-parse --abbrev-ref HEAD) + curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then git reset --hard $(git describe --abbrev=0) || return $? fi From 73d9abae3e29ca1ce2c618c8707c4ca35255b06b Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Fri, 8 Nov 2019 20:58:42 +0000 Subject: [PATCH 235/366] And finally, we please stickler Signed-off-by: Adam Warner --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index d2295f96..535fec2d 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -424,7 +424,7 @@ make_repo() { # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. - git reset --hard $(git describe --abbrev=0) || return $? + git reset --hard "$(git describe --abbrev=0)" || return $? fi # Show a colored message showing it's status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" @@ -460,7 +460,7 @@ update_repo() { # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then - git reset --hard $(git describe --abbrev=0) || return $? + git reset --hard "$(git describe --abbrev=0)" || return $? fi # Show a completion message printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" From a7e81c8ea0ac63d66dc05ea37bdaa6ee9b5cacb4 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 11 Nov 2019 20:12:31 +0000 Subject: [PATCH 236/366] remove extra space Signed-off-by: Adam Warner --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 535fec2d..f00f7689 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -378,9 +378,9 @@ fi is_repo() { # Use a named, local variable instead of the vague $1, which is the first argument passed to this function # These local variables should always be lowercase - local directory="${1}" + local directory="${1}" # A variable to store the return code - local rc + local rc # If the first argument passed to this function is a directory, if [[ -d "${directory}" ]]; then # move into the directory From 6571a63ffa6b297598d9b89ebdbccc99c2a40980 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 11 Nov 2019 20:36:51 +0000 Subject: [PATCH 237/366] Add --tags to descibe command Signed-off-by: Adam Warner --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f00f7689..f7358fb8 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -424,7 +424,7 @@ make_repo() { # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. - git reset --hard "$(git describe --abbrev=0)" || return $? + git reset --hard "$(git describe --abbrev=0 --tags)" || return $? fi # Show a colored message showing it's status printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" @@ -460,7 +460,7 @@ update_repo() { # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then - git reset --hard "$(git describe --abbrev=0)" || return $? + git reset --hard "$(git describe --abbrev=0 --tags)" || return $? fi # Show a completion message printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" From d457d40e0b98d8b42a2b17e99d97d05debdb9800 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 12 Nov 2019 20:49:46 +0100 Subject: [PATCH 238/366] Add php-xml package as new dependency. Signed-off-by: DL6ER --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c887a6c6..744b8b4f 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -250,7 +250,7 @@ if is_command apt-get ; then PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}") + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml") # The Web server user, LIGHTTPD_USER="www-data" # group, @@ -290,7 +290,7 @@ elif is_command rpm ; then PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(dialog git iproute newt procps-ng which chkconfig) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) - PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo) + PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml) LIGHTTPD_USER="lighttpd" LIGHTTPD_GROUP="lighttpd" LIGHTTPD_CFG="lighttpd.conf.fedora" From 4840bdb03158410d474cd7be88d77b41299cdd3f Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Thu, 14 Nov 2019 19:06:23 +0000 Subject: [PATCH 239/366] add a double space to the beginning of some outputs Signed-off-by: Adam Warner --- automated install/basic-install.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 2d06f526..a92a35a7 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1968,32 +1968,32 @@ checkSelinux() { DEFAULT_SELINUX=$(awk -F= '/^SELINUX=/ {print $2}' /etc/selinux/config) case "${DEFAULT_SELINUX,,}" in enforcing) - printf "%b %bDefault SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${DEFAULT_SELINUX}" "${COL_NC}" + printf " %b %bDefault SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${DEFAULT_SELINUX}" "${COL_NC}" SELINUX_ENFORCING=1 ;; *) # 'permissive' and 'disabled' - printf "%b %bDefault SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${DEFAULT_SELINUX}" "${COL_NC}" + printf " %b %bDefault SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${DEFAULT_SELINUX}" "${COL_NC}" ;; esac # Check the current state of SELinux CURRENT_SELINUX=$(getenforce) case "${CURRENT_SELINUX,,}" in enforcing) - printf "%b %bCurrent SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${CURRENT_SELINUX}" "${COL_NC}" + printf " %b %bCurrent SELinux: %s%b\\n" "${CROSS}" "${COL_RED}" "${CURRENT_SELINUX}" "${COL_NC}" SELINUX_ENFORCING=1 ;; *) # 'permissive' and 'disabled' - printf "%b %bCurrent SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${CURRENT_SELINUX}" "${COL_NC}" + printf " %b %bCurrent SELinux: %s%b\\n" "${TICK}" "${COL_GREEN}" "${CURRENT_SELINUX}" "${COL_NC}" ;; esac else - echo -e "${INFO} ${COL_GREEN}SELinux not detected${COL_NC}"; + echo -e " ${INFO} ${COL_GREEN}SELinux not detected${COL_NC}"; fi # Exit the installer if any SELinux checks toggled the flag if [[ "${SELINUX_ENFORCING}" -eq 1 ]] && [[ -z "${PIHOLE_SELINUX}" ]]; then - printf "Pi-hole does not provide an SELinux policy as the required changes modify the security of your system.\\n" - printf "Please refer to https://wiki.centos.org/HowTos/SELinux if SELinux is required for your deployment.\\n" - printf "\\n%bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; + printf " Pi-hole does not provide an SELinux policy as the required changes modify the security of your system.\\n" + printf " Please refer to https://wiki.centos.org/HowTos/SELinux if SELinux is required for your deployment.\\n" + printf "\\n %bSELinux Enforcing detected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; exit 1; fi } From 12817c09bb22afc02eaeb635071206bf454f9848 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Thu, 14 Nov 2019 18:52:07 +0000 Subject: [PATCH 240/366] (Squashed commits) Always ensure we have the correct machine arch by storing to/reading from a file rather than depending on global variable that for some reason is not always populated... Signed-off-by: Adam Warner no need for global variable Signed-off-by: Adam Warner Use a file in the temporary FTL download directory Signed-off-by: Dan Schaper Local binary variable named to l_binary. Disambiguate from global binary. Allow 'binary' to be shadowed for testing. Use ./ftlbinary in all operations. Signed-off-by: Dan Schaper Revert shadow ability on binary variable. Signed-off-by: Dan Schaper Remove unused tests, binary variable can not be overridden. Signed-off-by: Dan Schaper This should work here, too Signed-off-by: Adam Warner binary name is passed through from pihole checkout Signed-off-by: Adam Warner Add comments Signed-off-by: Adam Warner OK, let's try it this way again Signed-off-by: Adam Warner we might be getting somewhere.. squash after this I think! Signed-off-by: Adam Warner This is a test to see if it fixes the aarch64 test (we are definitely squashing these commits Signed-off-by: Adam Warner fix the rest of the tests Signed-off-by: Adam Warner Remove trailing whitespace in the files we've touched here Signed-off-by: Adam Warner --- advanced/Scripts/piholeCheckout.sh | 9 ++-- advanced/Scripts/update.sh | 1 - automated install/basic-install.sh | 47 +++++++++++------ test/test_automated_install.py | 82 +++++++++--------------------- 4 files changed, 63 insertions(+), 76 deletions(-) diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh index 673ded0b..31009dd9 100644 --- a/advanced/Scripts/piholeCheckout.sh +++ b/advanced/Scripts/piholeCheckout.sh @@ -46,6 +46,12 @@ checkout() { local corebranches local webbranches + # Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + # Avoid globbing set -f @@ -86,7 +92,6 @@ checkout() { fi #echo -e " ${TICK} Pi-hole Core" - get_binary_name local path path="development/${binary}" echo "development" > /etc/pihole/ftlbranch @@ -101,7 +106,6 @@ checkout() { fetch_checkout_pull_branch "${webInterfaceDir}" "master" || { echo " ${CROSS} Unable to pull Web master branch"; exit 1; } fi #echo -e " ${TICK} Web Interface" - get_binary_name local path path="master/${binary}" echo "master" > /etc/pihole/ftlbranch @@ -161,7 +165,6 @@ checkout() { fi checkout_pull_branch "${webInterfaceDir}" "${2}" elif [[ "${1}" == "ftl" ]] ; then - get_binary_name local path path="${2}/${binary}" diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 4d352777..443dfb1f 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -31,7 +31,6 @@ source "/opt/pihole/COL_TABLE" # make_repo() sourced from basic-install.sh # update_repo() source from basic-install.sh # getGitFiles() sourced from basic-install.sh -# get_binary_name() sourced from basic-install.sh # FTLcheckUpdate() sourced from basic-install.sh GitCheckUpdateAvail() { diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 2d06f526..e99d2b9a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -138,9 +138,6 @@ else OVER="\\r\\033[K" fi -# Define global binary variable -binary="tbd" - # A simple function that just echoes out our logo in ASCII format # This lets users know that it is a Pi-hole, LLC product show_ascii_berry() { @@ -2189,7 +2186,10 @@ clone_or_update_repos() { } # Download FTL binary to random temp directory and install FTL binary +# Disable directive for SC2120 a value _can_ be passed to this function, but it is passed from an external script that sources this one +# shellcheck disable=SC2120 FTLinstall() { + # Local, named variables local latesttag local str="Downloading and Installing FTL" @@ -2219,6 +2219,9 @@ FTLinstall() { ftlBranch="master" fi + local binary + binary="${1}" + # Determine which version of FTL to download if [[ "${ftlBranch}" == "master" ]];then url="https://github.com/pi-hole/FTL/releases/download/${latesttag%$'\r'}" @@ -2297,6 +2300,8 @@ get_binary_name() { local machine machine=$(uname -m) + local l_binary + local str="Detecting architecture" printf " %b %s..." "${INFO}" "${str}" # If the machine is arm or aarch @@ -2312,24 +2317,24 @@ get_binary_name() { if [[ "${lib}" == "/lib/ld-linux-aarch64.so.1" ]]; then printf "%b %b Detected ARM-aarch64 architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-aarch64-linux-gnu" + l_binary="pihole-FTL-aarch64-linux-gnu" # elif [[ "${lib}" == "/lib/ld-linux-armhf.so.3" ]]; then # if [[ "${rev}" -gt 6 ]]; then printf "%b %b Detected ARM-hf architecture (armv7+)\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabihf" + l_binary="pihole-FTL-arm-linux-gnueabihf" # Otherwise, else printf "%b %b Detected ARM-hf architecture (armv6 or lower) Using ARM binary\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabi" + l_binary="pihole-FTL-arm-linux-gnueabi" fi else printf "%b %b Detected ARM architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabi" + l_binary="pihole-FTL-arm-linux-gnueabi" fi elif [[ "${machine}" == "x86_64" ]]; then # This gives the architecture of packages dpkg installs (for example, "i386") @@ -2342,12 +2347,12 @@ get_binary_name() { # in the past (see https://github.com/pi-hole/pi-hole/pull/2004) if [[ "${dpkgarch}" == "i386" ]]; then printf "%b %b Detected 32bit (i686) architecture\\n" "${OVER}" "${TICK}" - binary="pihole-FTL-linux-x86_32" + l_binary="pihole-FTL-linux-x86_32" else # 64bit printf "%b %b Detected x86_64 architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-linux-x86_64" + l_binary="pihole-FTL-linux-x86_64" fi else # Something else - we try to use 32bit executable and warn the user @@ -2358,13 +2363,13 @@ get_binary_name() { else printf "%b %b Detected 32bit (i686) architecture\\n" "${OVER}" "${TICK}" fi - binary="pihole-FTL-linux-x86_32" + l_binary="pihole-FTL-linux-x86_32" fi + + echo ${l_binary} } FTLcheckUpdate() { - get_binary_name - #In the next section we check to see if FTL is already installed (in case of pihole -r). #If the installed version matches the latest version, then check the installed sha1sum of the binary vs the remote sha1sum. If they do not match, then download printf " %b Checking for existing FTL binary...\\n" "${INFO}" @@ -2380,6 +2385,9 @@ FTLcheckUpdate() { ftlBranch="master" fi + local binary + binary="${1}" + local remoteSha1 local localSha1 @@ -2458,8 +2466,10 @@ FTLcheckUpdate() { FTLdetect() { printf "\\n %b FTL Checks...\\n\\n" "${INFO}" - if FTLcheckUpdate ; then - FTLinstall || return 1 + printf " %b" "${2}" + + if FTLcheckUpdate "${1}"; then + FTLinstall "${1}" || return 1 fi } @@ -2622,8 +2632,15 @@ main() { fi # Create the pihole user create_pihole_user + # Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole - if ! FTLdetect; then + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + local theRest + theRest="${funcOutput%pihole-FTL*}" # Print the rest of get_binary_name's output to display (cut out from first instance of "pihole-FTL") + if ! FTLdetect "${binary}" "${theRest}"; then printf " %b FTL Engine not installed\\n" "${CROSS}" exit 1 fi diff --git a/test/test_automated_install.py b/test/test_automated_install.py index aeaac3dc..567ea241 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -342,7 +342,10 @@ def test_FTL_detect_aarch64_no_errors(Pihole): detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -363,7 +366,10 @@ def test_FTL_detect_armv6l_no_errors(Pihole): detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -385,7 +391,10 @@ def test_FTL_detect_armv7l_no_errors(Pihole): detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -402,7 +411,10 @@ def test_FTL_detect_x86_64_no_errors(Pihole): detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -419,7 +431,10 @@ def test_FTL_detect_unknown_no_errors(Pihole): detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = 'Not able to detect architecture (unknown: mips)' assert expected_stdout in detectPlatform.stdout @@ -438,64 +453,14 @@ def test_FTL_download_aarch64_no_errors(Pihole): ''') download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh - binary="pihole-FTL-aarch64-linux-gnu" create_pihole_user - FTLinstall + FTLinstall "pihole-FTL-aarch64-linux-gnu" ''') expected_stdout = tick_box + ' Downloading and Installing FTL' assert expected_stdout in download_binary.stdout assert 'error' not in download_binary.stdout.lower() -def test_FTL_download_unknown_fails_no_errors(Pihole): - ''' - confirms unknown binary is not downloaded for FTL engine - ''' - # mock whiptail answers and ensure installer dependencies - mock_command('whiptail', {'*': ('', '0')}, Pihole) - Pihole.run(''' - source /opt/pihole/basic-install.sh - distro_check - install_dependent_packages ${INSTALLER_DEPS[@]} - ''') - download_binary = Pihole.run(''' - source /opt/pihole/basic-install.sh - binary="pihole-FTL-mips" - create_pihole_user - FTLinstall - ''') - expected_stdout = cross_box + ' Downloading and Installing FTL' - assert expected_stdout in download_binary.stdout - error1 = 'Error: URL https://github.com/pi-hole/FTL/releases/download/' - assert error1 in download_binary.stdout - error2 = 'not found' - assert error2 in download_binary.stdout - - -def test_FTL_download_binary_unset_no_errors(Pihole): - ''' - confirms unset binary variable does not download FTL engine - ''' - # mock whiptail answers and ensure installer dependencies - mock_command('whiptail', {'*': ('', '0')}, Pihole) - Pihole.run(''' - source /opt/pihole/basic-install.sh - distro_check - install_dependent_packages ${INSTALLER_DEPS[@]} - ''') - download_binary = Pihole.run(''' - source /opt/pihole/basic-install.sh - create_pihole_user - FTLinstall - ''') - expected_stdout = cross_box + ' Downloading and Installing FTL' - assert expected_stdout in download_binary.stdout - error1 = 'Error: URL https://github.com/pi-hole/FTL/releases/download/' - assert error1 in download_binary.stdout - error2 = 'not found' - assert error2 in download_binary.stdout - - def test_FTL_binary_installed_and_responsive_no_errors(Pihole): ''' confirms FTL binary is copied and functional in installed location @@ -503,7 +468,10 @@ def test_FTL_binary_installed_and_responsive_no_errors(Pihole): installed_binary = Pihole.run(''' source /opt/pihole/basic-install.sh create_pihole_user - FTLdetect + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" pihole-FTL version ''') expected_stdout = 'v' From 037d52104a84ac867842e017c30879d17ee79cfe Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 26 Nov 2019 10:58:39 +0100 Subject: [PATCH 241/366] New command "pihole -g -r" recreates gravity.db based on files backed up in /etc/pihole/migration_update. This is useful to restore a working version of the database when the user destroyed the original database. Also, update gravity.db to version 5 because of a fix we needed to implement. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 12 ++++++++++-- .../database_migration/gravity/4_to_5.sql | 17 +++++++++++++++++ gravity.sh | 17 ++++++++++++++--- 3 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 advanced/Scripts/database_migration/gravity/4_to_5.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 773898b0..1fe6a61f 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -44,9 +44,17 @@ upgrade_gravityDB(){ version=3 fi if [[ "$version" == "3" ]]; then - # This migration script upgrades the gravity and adlist views + # This migration script upgrades the gravity and list views # implementing necessary changes for per-client blocking + echo -e " ${INFO} Upgrading gravity database from version 3 to 4" sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" - version=3 + version=4 + fi + if [[ "$version" == "4" ]]; then + # This migration script upgrades the adlist view + # to return an ID used in gravity.sh + echo -e " ${INFO} Upgrading gravity database from version 4 to 5" + sqlite3 "${database}" < "${scriptPath}/4_to_5.sql" + version=5 fi } diff --git a/advanced/Scripts/database_migration/gravity/4_to_5.sql b/advanced/Scripts/database_migration/gravity/4_to_5.sql new file mode 100644 index 00000000..22b75d58 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/4_to_5.sql @@ -0,0 +1,17 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +DROP VIEW vw_adlist; +CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id + FROM adlist + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY adlist.id; + +UPDATE info SET value = 5 WHERE property = 'version'; + +COMMIT; diff --git a/gravity.sh b/gravity.sh index 7a9e4f67..3225b28d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -229,7 +229,7 @@ gravity_CheckDNSResolutionAvailable() { fi # Determine if $lookupDomain is resolvable - if timeout 1 getent hosts "${lookupDomain}" &> /dev/null; then + if timeout 4 getent hosts "${lookupDomain}" &> /dev/null; then # Print confirmation of resolvability if it had previously failed if [[ -n "${secs:-}" ]]; then echo -e "${OVER} ${TICK} DNS resolution is now available\\n" @@ -243,7 +243,7 @@ gravity_CheckDNSResolutionAvailable() { # If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN. # This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventualy fails # So we check the output of the last command and if it failed, attempt to use dig +short as a fallback - if timeout 1 dig +short "${lookupDomain}" &> /dev/null; then + if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then if [[ -n "${secs:-}" ]]; then echo -e "${OVER} ${TICK} DNS resolution is now available\\n" fi @@ -425,7 +425,7 @@ gravity_DownloadBlocklistFromUrl() { if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then # Add domains to database table - str="Adding to database table" + str="Adding adlist with ID ${adlistID} to database table" echo -ne " ${INFO} ${str}..." database_table_from_file "gravity" "${saveLocation}" "${adlistID}" echo -e "${OVER} ${TICK} ${str}" @@ -660,6 +660,7 @@ for var in "$@"; do case "${var}" in "-f" | "--force" ) forceDelete=true;; "-o" | "--optimize" ) optimize_database=true;; + "-r" | "--recreate" ) recreate_database=true;; "-h" | "--help" ) helpFunc;; esac done @@ -667,6 +668,16 @@ done # Trap Ctrl-C gravity_Trap +if [[ "${recreate_database:-}" == true ]]; then + str="Restoring from migration backup" + echo -ne "${INFO} ${str}..." + rm "${gravityDBfile}" + pushd "${piholeDir}" > /dev/null + cp migration_backup/* . + popd > /dev/null + echo -e "${OVER} ${TICK} ${str}" +fi + # Move possibly existing legacy files to the gravity database migrate_to_database From c809c34024ac302e11ba90761526d2c57a006f45 Mon Sep 17 00:00:00 2001 From: Mograine Date: Wed, 27 Nov 2019 00:27:57 +0100 Subject: [PATCH 242/366] Add user feedback Signed-off-by: Mograine --- advanced/Scripts/webpage.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 7568d119..88ee00f8 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -599,15 +599,25 @@ SetPrivacyLevel() { } AddCustomDNSAddress() { + echo -e " ${TICK} Adding custom DNS entry..." + ip="${args[2]}" host="${args[3]}" echo "${ip} ${host}" >> "${dnscustomfile}" + + # Restart dnsmasq to load new custom DNS entries + RestartDNS } RemoveCustomDNSAddress() { + echo -e " ${TICK} Removing custom DNS entry..." + ip="${args[2]}" host="${args[3]}" sed -i "/${ip} ${host}/d" "${dnscustomfile}" + + # Restart dnsmasq to update removed custom DNS entries + RestartDNS } main() { From 8e5abc1f154f6fa568a73d076f416b8f9f615ea3 Mon Sep 17 00:00:00 2001 From: Jason Cooke Date: Fri, 29 Nov 2019 13:46:05 +1300 Subject: [PATCH 243/366] docs: fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fb2179eb..97459442 100644 --- a/README.md +++ b/README.md @@ -175,7 +175,7 @@ While quite outdated at this point, [this original blog post about Pi-hole](http ----- ## Coverage -- [Lifehacker: Turn A Raspberry Pi Into An Ad Blocker With A Single Command](https://www.lifehacker.com.au/2015/02/turn-a-raspberry-pi-into-an-ad-blocker-with-a-single-command/) (Feburary, 2015) +- [Lifehacker: Turn A Raspberry Pi Into An Ad Blocker With A Single Command](https://www.lifehacker.com.au/2015/02/turn-a-raspberry-pi-into-an-ad-blocker-with-a-single-command/) (February, 2015) - [MakeUseOf: Adblock Everywhere: The Raspberry Pi-Hole Way](http://www.makeuseof.com/tag/adblock-everywhere-raspberry-pi-hole-way/) (March, 2015) - [Catchpoint: Ad-Blocking on Apple iOS9: Valuing the End User Experience](http://blog.catchpoint.com/2015/09/14/ad-blocking-apple/) (September, 2015) - [Security Now Netcast: Pi-hole](https://www.youtube.com/watch?v=p7-osq_y8i8&t=100m26s) (October, 2015) From 185319d560829d9304f38db568c61e8e6955bc90 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 30 Nov 2019 12:33:16 +0000 Subject: [PATCH 244/366] Unite four domain tables into a single domainlist table. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 7 ++ .../database_migration/gravity/3_to_4.sql | 96 +++++++++++++++++++ advanced/Scripts/list.sh | 34 +++++-- 3 files changed, 128 insertions(+), 9 deletions(-) create mode 100644 advanced/Scripts/database_migration/gravity/3_to_4.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 0fe90d8a..d7de0ec5 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -41,4 +41,11 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" version=3 fi + if [[ "$version" == "3" ]]; then + # This migration script unifies the formally separated domain + # lists into a single table with a UNIQUE domain constraint + echo -e " ${INFO} Upgrading gravity database from version 3 to 4" + sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" + version=6 + fi } diff --git a/advanced/Scripts/database_migration/gravity/3_to_4.sql b/advanced/Scripts/database_migration/gravity/3_to_4.sql new file mode 100644 index 00000000..8d1c1d26 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/3_to_4.sql @@ -0,0 +1,96 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +CREATE TABLE domainlist +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type INTEGER NOT NULL DEFAULT 0, + domain TEXT UNIQUE NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT 1, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT +); + +ALTER TABLE whitelist ADD COLUMN type INTEGER; +UPDATE whitelist SET type = 0; +INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment) + SELECT type,domain,enabled,date_added,date_modified,comment FROM whitelist; + +ALTER TABLE blacklist ADD COLUMN type INTEGER; +UPDATE blacklist SET type = 1; +INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment) + SELECT type,domain,enabled,date_added,date_modified,comment FROM blacklist; + +ALTER TABLE regex_whitelist ADD COLUMN type INTEGER; +UPDATE regex_whitelist SET type = 2; +INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment) + SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_whitelist; + +ALTER TABLE regex_blacklist ADD COLUMN type INTEGER; +UPDATE regex_blacklist SET type = 3; +INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment) + SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_blacklist; + +DROP TABLE whitelist_by_group; +DROP TABLE blacklist_by_group; +DROP TABLE regex_whitelist_by_group; +DROP TABLE regex_blacklist_by_group; +CREATE TABLE domainlist_by_group +( + domainlist_id INTEGER NOT NULL REFERENCES domainlist (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (domainlist_id, group_id) +); + +DROP TRIGGER tr_whitelist_update; +DROP TRIGGER tr_blacklist_update; +DROP TRIGGER tr_regex_whitelist_update; +DROP TRIGGER tr_regex_blacklist_update; +CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist + BEGIN + UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; + +DROP VIEW vw_whitelist; +CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 0 + ORDER BY domainlist.id; + +DROP VIEW vw_blacklist; +CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 1 + ORDER BY domainlist.id; + +DROP VIEW vw_regex_whitelist; +CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 2 + ORDER BY domainlist.id; + +DROP VIEW vw_regex_blacklist; +CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 3 + ORDER BY domainlist.id; + +UPDATE info SET value = 6 WHERE property = 'version'; + +COMMIT; diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 6a606665..483b7153 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -27,6 +27,17 @@ listname="" colfile="/opt/pihole/COL_TABLE" source ${colfile} +getTypeID() { + if [[ "$1" == "whitelist" ]]; then + echo "0" + elif [[ "$1" == "blacklist" ]]; then + echo "1" + elif [[ "$1" == "regex_whitelist" ]]; then + echo "2" + elif [[ "$1" == "regex_blacklist" ]]; then + echo "3" + fi +} helpFunc() { if [[ "${listType}" == "whitelist" ]]; then @@ -129,13 +140,14 @@ ProcessDomainList() { } AddDomain() { - local domain list num + local domain list num typeID # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" list="$2" + typeID="$(getTypeID "${list}")" # Is the domain in the list we want to add it to? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = '${domain}';")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};")" if [[ "${num}" -ne 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -151,17 +163,18 @@ AddDomain() { reload=true # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO ${list} (domain) VALUES ('${domain}');" + sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeID});" } RemoveDomain() { - local domain list num + local domain list num typeID # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" list="$2" + typeID="$(getTypeID "${list}")" # Is the domain in the list we want to remove it from? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${list} WHERE domain = '${domain}';")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};")" if [[ "${num}" -eq 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -176,14 +189,15 @@ RemoveDomain() { fi reload=true # Remove it from the current list - sqlite3 "${gravityDBfile}" "DELETE FROM ${list} WHERE domain = '${domain}';" + sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};" } Displaylist() { - local list listname count num_pipes domain enabled status nicedate + local list listname count num_pipes domain enabled status nicedate typeID listname="${listType}" - data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM ${listType};" 2> /dev/null)" + typeID="$(getTypeID "${listType}")" + data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeID};" 2> /dev/null)" if [[ -z $data ]]; then echo -e "Not showing empty list" @@ -221,7 +235,9 @@ Displaylist() { } NukeList() { - sqlite3 "${gravityDBfile}" "DELETE FROM ${listType};" + local typeID + typeID=$(getTypeID "${list}") + sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeID};" } for var in "$@"; do From a1f120b2ff5fd7801202a481723c12871a8ab902 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 30 Nov 2019 12:43:07 +0000 Subject: [PATCH 245/366] Address stickler's complaint Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index d7de0ec5..a12be864 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -10,6 +10,8 @@ # This file is copyright under the latest version of the EUPL. # Please see LICENSE file for your rights under this license. +scriptPath="/etc/.pihole/advanced/Scripts/database_migration/gravity" + upgrade_gravityDB(){ local database piholeDir auditFile version database="${1}" @@ -23,7 +25,7 @@ upgrade_gravityDB(){ # This migration script upgrades the gravity.db file by # adding the domain_audit table echo -e " ${INFO} Upgrading gravity database from version 1 to 2" - sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/1_to_2.sql" + sqlite3 "${database}" < "${scriptPath}/1_to_2.sql" version=2 # Store audit domains in database table @@ -38,7 +40,7 @@ upgrade_gravityDB(){ # renaming the regex table to regex_blacklist, and # creating a new regex_whitelist table + corresponding linking table and views echo -e " ${INFO} Upgrading gravity database from version 2 to 3" - sqlite3 "${database}" < "/etc/.pihole/advanced/Scripts/database_migration/gravity/2_to_3.sql" + sqlite3 "${database}" < "${scriptPath}/2_to_3.sql" version=3 fi if [[ "$version" == "3" ]]; then From d0de5fda3086614b1f8266a9a4ed12f0b6ef9c97 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 13:13:26 +0000 Subject: [PATCH 246/366] Simplify removal of domain from one list when it is requested for another Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 36 +++++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 483b7153..55272cde 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -39,6 +39,18 @@ getTypeID() { fi } +getListnameFromType() { + if [[ "$1" == "0" ]]; then + echo "whitelist" + elif [[ "$1" == "1" ]]; then + echo "blacklist" + elif [[ "$1" == "2" ]]; then + echo "regex_whitelist" + elif [[ "$1" == "3" ]]; then + echo "regex_blacklist" + fi +} + helpFunc() { if [[ "${listType}" == "whitelist" ]]; then param="w" @@ -105,19 +117,15 @@ HandleOther() { } ProcessDomainList() { - local is_regexlist if [[ "${listType}" == "regex_blacklist" ]]; then # Regex black filter list listname="regex blacklist filters" - is_regexlist=true elif [[ "${listType}" == "regex_whitelist" ]]; then # Regex white filter list listname="regex whitelist filters" - is_regexlist=true else # Whitelist / Blacklist listname="${listType}" - is_regexlist=false fi for dom in "${domList[@]}"; do @@ -130,9 +138,6 @@ ProcessDomainList() { # if delmode then remove from desired list but do not add to the other if ${addmode}; then AddDomain "${dom}" "${listType}" - if ! ${is_regexlist}; then - RemoveDomain "${dom}" "${listAlt}" - fi else RemoveDomain "${dom}" "${listType}" fi @@ -140,18 +145,27 @@ ProcessDomainList() { } AddDomain() { - local domain list num typeID + local domain list num currTypeID currListName typeID # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" list="$2" typeID="$(getTypeID "${list}")" # Is the domain in the list we want to add it to? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" if [[ "${num}" -ne 0 ]]; then - if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" + currTypeID="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" + if [[ "${currTypeID}" == "${typeID}" ]]; then + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" + fi + else + currListName="$(getListnameFromType "${currTypeID}")" + sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeID} WHERE domain='${domain}';" + if [[ "${verbose}" == true ]]; then + echo -e " ${INFO} ${1} already exists in ${currListName}, it has been updated to the requested list type." + fi fi return fi From 6a881545b0d99952af2e26b50e9e859415fe7254 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 13:19:58 +0000 Subject: [PATCH 247/366] tweak wording Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 55272cde..596a6fb7 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -164,7 +164,7 @@ AddDomain() { currListName="$(getListnameFromType "${currTypeID}")" sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeID} WHERE domain='${domain}';" if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${currListName}, it has been updated to the requested list type." + echo -e " ${INFO} ${1} already exists in ${currListName}, it has been moved to ${listname}" fi fi return From 77bfb3fb671a1049119ae3d075c450277b668187 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 14:18:12 +0000 Subject: [PATCH 248/366] tidy up variable usage in list.sh Remove some that are redundant Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 104 ++++++++++++++------------------------- 1 file changed, 38 insertions(+), 66 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 596a6fb7..43f84008 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -21,25 +21,12 @@ web=false domList=() -listType="" -listname="" +typeId="" colfile="/opt/pihole/COL_TABLE" source ${colfile} -getTypeID() { - if [[ "$1" == "whitelist" ]]; then - echo "0" - elif [[ "$1" == "blacklist" ]]; then - echo "1" - elif [[ "$1" == "regex_whitelist" ]]; then - echo "2" - elif [[ "$1" == "regex_blacklist" ]]; then - echo "3" - fi -} - -getListnameFromType() { +getListnameFromTypeId() { if [[ "$1" == "0" ]]; then echo "whitelist" elif [[ "$1" == "1" ]]; then @@ -52,19 +39,19 @@ getListnameFromType() { } helpFunc() { - if [[ "${listType}" == "whitelist" ]]; then + if [[ "${typeId}" == "0" ]]; then param="w" type="whitelist" - elif [[ "${listType}" == "regex_blacklist" && "${wildcard}" == true ]]; then + elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then param="-wild" type="wildcard blacklist" - elif [[ "${listType}" == "regex_blacklist" ]]; then + elif [[ "${typeId}" == "3" ]]; then param="-regex" type="regex blacklist filter" - elif [[ "${listType}" == "regex_whitelist" && "${wildcard}" == true ]]; then + elif [[ "${typeId}" == "2" && "${wildcard}" == true ]]; then param="-white-wild" type="wildcard whitelist" - elif [[ "${listType}" == "regex_whitelist" ]]; then + elif [[ "${typeId}" == "2" ]]; then param="-white-regex" type="regex whitelist filter" else @@ -101,7 +88,7 @@ HandleOther() { # Check validity of domain (don't check for regex entries) if [[ "${#domain}" -le 253 ]]; then - if [[ ( "${listType}" == "regex_blacklist" || "${listType}" == "regex_whitelist" ) && "${wildcard}" == false ]]; then + if [[ ( "${typeId}" == "3" || "${typeId}" == "2" ) && "${wildcard}" == false ]]; then validDomain="${domain}" else validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check @@ -117,17 +104,6 @@ HandleOther() { } ProcessDomainList() { - if [[ "${listType}" == "regex_blacklist" ]]; then - # Regex black filter list - listname="regex blacklist filters" - elif [[ "${listType}" == "regex_whitelist" ]]; then - # Regex white filter list - listname="regex whitelist filters" - else - # Whitelist / Blacklist - listname="${listType}" - fi - for dom in "${domList[@]}"; do # Format domain into regex filter if requested if [[ "${wildcard}" == true ]]; then @@ -137,34 +113,33 @@ ProcessDomainList() { # Logic: If addmode then add to desired list and remove from the other; # if delmode then remove from desired list but do not add to the other if ${addmode}; then - AddDomain "${dom}" "${listType}" + AddDomain "${dom}" else - RemoveDomain "${dom}" "${listType}" + RemoveDomain "${dom}" fi done } AddDomain() { - local domain list num currTypeID currListName typeID + local domain num requestedListname existingTypeId existingListname # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" - list="$2" - typeID="$(getTypeID "${list}")" # Is the domain in the list we want to add it to? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" + requestedListname="$(getListnameFromTypeId "${typeId}")" if [[ "${num}" -ne 0 ]]; then - currTypeID="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" - if [[ "${currTypeID}" == "${typeID}" ]]; then + existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" + if [[ "${existingTypeId}" == "${typeId}" ]]; then if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!" + echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!" fi else - currListName="$(getListnameFromType "${currTypeID}")" - sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeID} WHERE domain='${domain}';" + existingListname="$(getListnameFromTypeId "${existingTypeId}")" + sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} already exists in ${currListName}, it has been moved to ${listname}" + echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!" fi fi return @@ -172,51 +147,50 @@ AddDomain() { # Domain not found in the table, add it! if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Adding ${1} to the ${listname}..." + echo -e " ${INFO} Adding ${domain} to the ${requestedListname}..." fi reload=true # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeID});" + sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" } RemoveDomain() { - local domain list num typeID + local domain num requestedListname # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" - list="$2" - typeID="$(getTypeID "${list}")" # Is the domain in the list we want to remove it from? - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};")" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" + + requestedListname="$(getListnameFromTypeId "${typeId}")" if [[ "${num}" -eq 0 ]]; then if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} ${1} does not exist in ${list}, no need to remove!" + echo -e " ${INFO} ${domain} does not exist in ${requestedListname}, no need to remove!" fi return fi # Domain found in the table, remove it! if [[ "${verbose}" == true ]]; then - echo -e " ${INFO} Removing ${1} from the ${listname}..." + echo -e " ${INFO} Removing ${domain} from the ${requestedListname}..." fi reload=true # Remove it from the current list - sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeID};" + sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};" } Displaylist() { - local list listname count num_pipes domain enabled status nicedate typeID + local count num_pipes domain enabled status nicedate requestedListname - listname="${listType}" - typeID="$(getTypeID "${listType}")" - data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeID};" 2> /dev/null)" + requestedListname="$(getListnameFromTypeId "${typeId}")" + data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" if [[ -z $data ]]; then echo -e "Not showing empty list" else - echo -e "Displaying ${listname}:" + echo -e "Displaying ${requestedListname}:" count=1 while IFS= read -r line do @@ -249,19 +223,17 @@ Displaylist() { } NukeList() { - local typeID - typeID=$(getTypeID "${list}") - sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeID};" + sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" } for var in "$@"; do case "${var}" in - "-w" | "whitelist" ) listType="whitelist"; listAlt="blacklist";; - "-b" | "blacklist" ) listType="blacklist"; listAlt="whitelist";; - "--wild" | "wildcard" ) listType="regex_blacklist"; wildcard=true;; - "--regex" | "regex" ) listType="regex_blacklist";; - "--white-regex" | "white-regex" ) listType="regex_whitelist";; - "--white-wild" | "white-wild" ) listType="regex_whitelist"; wildcard=true;; + "-w" | "whitelist" ) typeId=0;; + "-b" | "blacklist" ) typeId=1;; + "--white-regex" | "white-regex" ) typeId=2;; + "--white-wild" | "white-wild" ) typeId=2; wildcard=true;; + "--wild" | "wildcard" ) typeId=3; wildcard=true;; + "--regex" | "regex" ) typeId=3;; "-nr"| "--noreload" ) reload=false;; "-d" | "--delmode" ) addmode=false;; "-q" | "--quiet" ) verbose=false;; From edaee4e9626223574b4432a03bc7d86f99baa205 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 16:02:50 +0000 Subject: [PATCH 249/366] remove redundant function and comments Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 43f84008..7707ceea 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -74,14 +74,6 @@ Options: exit 0 } -EscapeRegexp() { - # This way we may safely insert an arbitrary - # string in our regular expressions - # This sed is intentionally executed in three steps to ease maintainability - # The first sed removes any amount of leading dots - echo $* | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g" -} - HandleOther() { # Convert to lowercase domain="${1,,}" @@ -122,7 +114,6 @@ ProcessDomainList() { AddDomain() { local domain num requestedListname existingTypeId existingListname - # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" # Is the domain in the list we want to add it to? @@ -157,7 +148,6 @@ AddDomain() { RemoveDomain() { local domain num requestedListname - # Use printf to escape domain. %q prints the argument in a form that can be reused as shell input domain="$1" # Is the domain in the list we want to remove it from? From 4b8a72fda71bae5cc0ed2cfb6ba5bbb70a623b89 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 16:26:26 +0000 Subject: [PATCH 250/366] functionise parameter discovery Rename HandleOther to ValidateDomain Capital letters on the new functions Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 59 ++++++++++++++++++++-------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 7707ceea..5fbe831f 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -26,7 +26,7 @@ typeId="" colfile="/opt/pihole/COL_TABLE" source ${colfile} -getListnameFromTypeId() { +GetListnameFromTypeId() { if [[ "$1" == "0" ]]; then echo "whitelist" elif [[ "$1" == "1" ]]; then @@ -38,43 +38,44 @@ getListnameFromTypeId() { fi } -helpFunc() { +GetListParamFromTypeId() { if [[ "${typeId}" == "0" ]]; then - param="w" - type="whitelist" - elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then - param="-wild" - type="wildcard blacklist" - elif [[ "${typeId}" == "3" ]]; then - param="-regex" - type="regex blacklist filter" - elif [[ "${typeId}" == "2" && "${wildcard}" == true ]]; then - param="-white-wild" - type="wildcard whitelist" - elif [[ "${typeId}" == "2" ]]; then - param="-white-regex" - type="regex whitelist filter" - else - param="b" - type="blacklist" + echo "w" + elif [[ "${typeId}" == "1" ]]; then + echo "b" + elif [[ "${typeId}" == "2" && "${wildcard}" == true ]]; then + echo "-white-wild" + elif [[ "${typeId}" == "2" ]]; then + echo "regex_blacklist" + elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then + echo "-regex" + elif [[ "${typeId}" == "3" ]]; then + echo "-wild" fi +} + +helpFunc() { + local listname param + + listname="$(GetListnameFromTypeId "${typeId}")" + param="$(GetListParamFromTypeId)" echo "Usage: pihole -${param} [options] Example: 'pihole -${param} site.com', or 'pihole -${param} site1.com site2.com' -${type^} one or more domains +${listname^} one or more domains Options: - -d, --delmode Remove domain(s) from the ${type} - -nr, --noreload Update ${type} without reloading the DNS server + -d, --delmode Remove domain(s) from the ${listname} + -nr, --noreload Update ${listname} without reloading the DNS server -q, --quiet Make output less verbose -h, --help Show this help dialog - -l, --list Display all your ${type}listed domains + -l, --list Display all your ${listname}listed domains --nuke Removes all entries in a list" exit 0 } -HandleOther() { +ValidateDomain() { # Convert to lowercase domain="${1,,}" @@ -118,7 +119,7 @@ AddDomain() { # Is the domain in the list we want to add it to? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" - requestedListname="$(getListnameFromTypeId "${typeId}")" + requestedListname="$(GetListnameFromTypeId "${typeId}")" if [[ "${num}" -ne 0 ]]; then existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" @@ -127,7 +128,7 @@ AddDomain() { echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!" fi else - existingListname="$(getListnameFromTypeId "${existingTypeId}")" + existingListname="$(GetListnameFromTypeId "${existingTypeId}")" sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" if [[ "${verbose}" == true ]]; then echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!" @@ -153,7 +154,7 @@ RemoveDomain() { # Is the domain in the list we want to remove it from? num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" - requestedListname="$(getListnameFromTypeId "${typeId}")" + requestedListname="$(GetListnameFromTypeId "${typeId}")" if [[ "${num}" -eq 0 ]]; then if [[ "${verbose}" == true ]]; then @@ -174,7 +175,7 @@ RemoveDomain() { Displaylist() { local count num_pipes domain enabled status nicedate requestedListname - requestedListname="$(getListnameFromTypeId "${typeId}")" + requestedListname="$(GetListnameFromTypeId "${typeId}")" data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" if [[ -z $data ]]; then @@ -231,7 +232,7 @@ for var in "$@"; do "-l" | "--list" ) Displaylist;; "--nuke" ) NukeList;; "--web" ) web=true;; - * ) HandleOther "${var}";; + * ) ValidateDomain "${var}";; esac done From 76460f01e9c70905b7ac82d9a294855c6bfd19bc Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 30 Nov 2019 17:45:07 +0000 Subject: [PATCH 251/366] Change the regex used for domain validation Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 5fbe831f..50541872 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -84,8 +84,8 @@ ValidateDomain() { if [[ ( "${typeId}" == "3" || "${typeId}" == "2" ) && "${wildcard}" == false ]]; then validDomain="${domain}" else - validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check - validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label + # Use regex to check the validity of the passed domain. see https://regexr.com/3abjr + validDomain=$(grep -P "^((?!-))(xn--)?[a-z0-9][a-z0-9-_]{0,61}[a-z0-9]{0,1}\.(xn--)?([a-z0-9\-]{1,61}|[a-z0-9-]{1,30}\.[a-z]{2,})$" <<< "${domain}") fi fi From 44e1455b12e0dafba4ba9cfca7708bdb9aa2d188 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 1 Dec 2019 12:44:48 +0000 Subject: [PATCH 252/366] Update advanced/Scripts/list.sh Co-Authored-By: DL6ER --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 50541872..40fabd54 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -46,7 +46,7 @@ GetListParamFromTypeId() { elif [[ "${typeId}" == "2" && "${wildcard}" == true ]]; then echo "-white-wild" elif [[ "${typeId}" == "2" ]]; then - echo "regex_blacklist" + echo "-white-regex" elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then echo "-regex" elif [[ "${typeId}" == "3" ]]; then From 0251117c779f9f3ebb2056862e1009b8a0e88e28 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 1 Dec 2019 12:45:06 +0000 Subject: [PATCH 253/366] Update advanced/Scripts/list.sh Co-Authored-By: DL6ER --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 40fabd54..064181f0 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -50,7 +50,7 @@ GetListParamFromTypeId() { elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then echo "-regex" elif [[ "${typeId}" == "3" ]]; then - echo "-wild" + echo "-regex" fi } From 63e407cfdc9f7fd1ffddd6c49b3bf6b2dfd63c63 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 1 Dec 2019 12:45:22 +0000 Subject: [PATCH 254/366] Update advanced/Scripts/list.sh Co-Authored-By: DL6ER --- advanced/Scripts/list.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 064181f0..a5c84661 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -48,7 +48,7 @@ GetListParamFromTypeId() { elif [[ "${typeId}" == "2" ]]; then echo "-white-regex" elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then - echo "-regex" + echo "-wild" elif [[ "${typeId}" == "3" ]]; then echo "-regex" fi From 869473172c2d8620e180aa037fb9cde12cef1fb3 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 1 Dec 2019 12:50:24 +0000 Subject: [PATCH 255/366] remove _ from regex descibers Signed-off-by: Adam Warner --- advanced/Scripts/list.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index a5c84661..a7d12657 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -32,9 +32,9 @@ GetListnameFromTypeId() { elif [[ "$1" == "1" ]]; then echo "blacklist" elif [[ "$1" == "2" ]]; then - echo "regex_whitelist" + echo "regex whitelist" elif [[ "$1" == "3" ]]; then - echo "regex_blacklist" + echo "regex blacklist" fi } From b6cd7b8e3d36a5d7208136a671e65913076aef79 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 2 Dec 2019 17:27:32 +0000 Subject: [PATCH 256/366] Use more descriptive names instead of directly using the IDs in list.sh Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index a7d12657..c5bf5b2a 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -26,30 +26,37 @@ typeId="" colfile="/opt/pihole/COL_TABLE" source ${colfile} +# IDs are hard-wired to domain interpretation in the gravity database scheme +# Clients (including FTL) will read them through the corresponding views +readonly whitelist="0" +readonly blacklist="1" +readonly regex_whitelist="2" +readonly regex_blacklist="3" + GetListnameFromTypeId() { - if [[ "$1" == "0" ]]; then + if [[ "$1" == "${whitelist}" ]]; then echo "whitelist" - elif [[ "$1" == "1" ]]; then + elif [[ "$1" == "${blacklist}" ]]; then echo "blacklist" - elif [[ "$1" == "2" ]]; then + elif [[ "$1" == "${regex_whitelist}" ]]; then echo "regex whitelist" - elif [[ "$1" == "3" ]]; then + elif [[ "$1" == "${regex_blacklist}" ]]; then echo "regex blacklist" fi } GetListParamFromTypeId() { - if [[ "${typeId}" == "0" ]]; then + if [[ "${typeId}" == "${whitelist}" ]]; then echo "w" - elif [[ "${typeId}" == "1" ]]; then + elif [[ "${typeId}" == "${blacklist}" ]]; then echo "b" - elif [[ "${typeId}" == "2" && "${wildcard}" == true ]]; then + elif [[ "${typeId}" == "${regex_whitelist}" && "${wildcard}" == true ]]; then echo "-white-wild" - elif [[ "${typeId}" == "2" ]]; then + elif [[ "${typeId}" == "${regex_whitelist}" ]]; then echo "-white-regex" - elif [[ "${typeId}" == "3" && "${wildcard}" == true ]]; then + elif [[ "${typeId}" == "${regex_blacklist}" && "${wildcard}" == true ]]; then echo "-wild" - elif [[ "${typeId}" == "3" ]]; then + elif [[ "${typeId}" == "${regex_blacklist}" ]]; then echo "-regex" fi } @@ -81,7 +88,7 @@ ValidateDomain() { # Check validity of domain (don't check for regex entries) if [[ "${#domain}" -le 253 ]]; then - if [[ ( "${typeId}" == "3" || "${typeId}" == "2" ) && "${wildcard}" == false ]]; then + if [[ ( "${typeId}" == "${regex_blacklist}" || "${typeId}" == "${regex_whitelist}" ) && "${wildcard}" == false ]]; then validDomain="${domain}" else # Use regex to check the validity of the passed domain. see https://regexr.com/3abjr From 85673b8273f5b27de92ae9bbde942569453fdbdf Mon Sep 17 00:00:00 2001 From: MichaIng Date: Wed, 4 Dec 2019 18:59:25 +0100 Subject: [PATCH 257/366] Print name of chosen upstream DNS as well Signed-off-by: MichaIng --- automated install/basic-install.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index c2f8ced5..59a55001 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1071,7 +1071,9 @@ setDNS() { fi # Display final selection - printf " %b Using upstream DNS: %s %s\\n" "${INFO}" "${PIHOLE_DNS_1}" "${PIHOLE_DNS_2}" + local DNSIP=${PIHOLE_DNS_1} + [[ -z ${PIHOLE_DNS_2} ]] || DNSIP+=", ${PIHOLE_DNS_2}" + printf " %b Using upstream DNS: %s (%s)\\n" "${INFO}" "${DNSchoices}" "${DNSIP}" } # Allow the user to enable/disable logging From eaf1244932ecb3e3eece033649453241df513b69 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 4 Dec 2019 20:09:34 +0000 Subject: [PATCH 258/366] :dominik: Detect binary name before calling FTLcheckUpdate in update.sh Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 443dfb1f..e45be5cf 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -128,7 +128,12 @@ main() { fi fi - if FTLcheckUpdate > /dev/null; then + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + + if FTLcheckUpdate "${binary}" > /dev/null; then FTL_update=true echo -e " ${INFO} FTL:\\t\\t${COL_YELLOW}update available${COL_NC}" else From 0c5185f8ba0b658a8de6c2d6c9705fc54e4b0f65 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 4 Dec 2019 21:02:46 +0000 Subject: [PATCH 259/366] Also display how many unique domains we have caught in the event horizon. Signed-off-by: DL6ER --- gravity.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/gravity.sh b/gravity.sh index 3225b28d..72f39ad4 100755 --- a/gravity.sh +++ b/gravity.sh @@ -529,19 +529,24 @@ gravity_ParseFileIntoDomains() { gravity_Table_Count() { local table="${1}" local str="${2}" - local extra="${3}" local num - num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} ${extra};")" - echo -e " ${INFO} Number of ${str}: ${num}" + num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")" + if [[ "${table}" == "vw_gravity" ]]; then + local unique + unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")" + echo -e " ${INFO} Number of ${str}: ${num} (${unique} unique domains)" + else + echo -e " ${INFO} Number of ${str}: ${num}" + fi } # Output count of blacklisted domains and regex filters gravity_ShowCount() { - gravity_Table_Count "gravity" "gravity domains" "" - gravity_Table_Count "blacklist" "exact blacklisted domains" "WHERE enabled = 1" - gravity_Table_Count "regex_blacklist" "regex blacklist filters" "WHERE enabled = 1" - gravity_Table_Count "whitelist" "exact whitelisted domains" "WHERE enabled = 1" - gravity_Table_Count "regex_whitelist" "regex whitelist filters" "WHERE enabled = 1" + gravity_Table_Count "vw_gravity" "gravity domains" "" + gravity_Table_Count "vw_blacklist" "exact blacklisted domains" + gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters" + gravity_Table_Count "vw_whitelist" "exact whitelisted domains" + gravity_Table_Count "vw_regex_whitelist" "regex whitelist filters" } # Parse list of domains into hosts format From 8a119d72e2dba1551713807295485e8fa9d63bbd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Dec 2019 12:17:55 +0000 Subject: [PATCH 260/366] Ensure database permissions are set up correctly by the service script. Signed-off-by: DL6ER --- advanced/Templates/pihole-FTL.service | 2 ++ 1 file changed, 2 insertions(+) diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service index 5dbf080e..43f9e184 100644 --- a/advanced/Templates/pihole-FTL.service +++ b/advanced/Templates/pihole-FTL.service @@ -48,6 +48,8 @@ start() { chown pihole:pihole /etc/pihole /etc/pihole/dhcp.leases 2> /dev/null chown pihole:pihole /var/log/pihole-FTL.log /var/log/pihole.log chmod 0644 /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log + # Chown database files to the user FTL runs as. We ignore errors as the files may not (yet) exist + chown pihole:pihole /etc/pihole/pihole-FTL.db /etc/pihole/gravity.db 2> /dev/null echo "nameserver 127.0.0.1" | /sbin/resolvconf -a lo.piholeFTL if setcap CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN+eip "$(which pihole-FTL)"; then su -s /bin/sh -c "/usr/bin/pihole-FTL" "$FTLUSER" From 620e1e9c73277fb1f6c1f0be3a8cc3bd9d2b82f5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Dec 2019 12:23:42 +0000 Subject: [PATCH 261/366] Do not force nameserver 127.0.0.1 through resolvconf in pihole-FTL.service Signed-off-by: DL6ER --- advanced/Templates/pihole-FTL.service | 2 -- 1 file changed, 2 deletions(-) diff --git a/advanced/Templates/pihole-FTL.service b/advanced/Templates/pihole-FTL.service index 5dbf080e..410b9b2c 100644 --- a/advanced/Templates/pihole-FTL.service +++ b/advanced/Templates/pihole-FTL.service @@ -48,7 +48,6 @@ start() { chown pihole:pihole /etc/pihole /etc/pihole/dhcp.leases 2> /dev/null chown pihole:pihole /var/log/pihole-FTL.log /var/log/pihole.log chmod 0644 /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log - echo "nameserver 127.0.0.1" | /sbin/resolvconf -a lo.piholeFTL if setcap CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN+eip "$(which pihole-FTL)"; then su -s /bin/sh -c "/usr/bin/pihole-FTL" "$FTLUSER" else @@ -62,7 +61,6 @@ start() { # Stop the service stop() { if is_running; then - /sbin/resolvconf -d lo.piholeFTL kill "$(get_pid)" for i in {1..5}; do if ! is_running; then From 3231e5c3ba833e715e8acf408b862e6b0c6fd3bd Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Dec 2019 16:52:03 +0000 Subject: [PATCH 262/366] Address stickler requests. Signed-off-by: DL6ER --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index 72f39ad4..5db14765 100755 --- a/gravity.sh +++ b/gravity.sh @@ -434,7 +434,7 @@ gravity_DownloadBlocklistFromUrl() { # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" # Add domains to database table - str="Adding to database table" + str="Adding adlist with ID ${adlistID} to database table" echo -ne " ${INFO} ${str}..." database_table_from_file "gravity" "${saveLocation}" "${adlistID}" echo -e "${OVER} ${TICK} ${str}" @@ -677,9 +677,9 @@ if [[ "${recreate_database:-}" == true ]]; then str="Restoring from migration backup" echo -ne "${INFO} ${str}..." rm "${gravityDBfile}" - pushd "${piholeDir}" > /dev/null + pushd "${piholeDir}" > /dev/null || exit cp migration_backup/* . - popd > /dev/null + popd > /dev/null || exit echo -e "${OVER} ${TICK} ${str}" fi From 1f03faddef51b2b942ab432fe2487917dae7ee5a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 9 Dec 2019 21:35:54 +0000 Subject: [PATCH 263/366] shell check recomends Signed-off-by: Adam Warner --- gravity.sh | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/gravity.sh b/gravity.sh index 5db14765..b04e8553 100755 --- a/gravity.sh +++ b/gravity.sh @@ -40,9 +40,6 @@ gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql" optimize_database=false domainsExtension="domains" -matterAndLight="${basename}.0.matterandlight.txt" -parsedMatter="${basename}.1.parsedmatter.txt" -preEventHorizon="list.preEventHorizon" resolver="pihole-FTL" @@ -92,7 +89,7 @@ generate_gravity_database() { update_gravity_timestamp() { # Update timestamp when the gravity table was last updated successfully - output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values (\"updated\",cast(strftime('%s', 'now') as int));"; } 2>&1 ) + output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%s', 'now') as int));"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -459,7 +456,7 @@ gravity_DownloadBlocklistFromUrl() { # Parse source files into domains format gravity_ParseFileIntoDomains() { - local source="${1}" destination="${2}" firstLine abpFilter + local source="${1}" destination="${2}" firstLine # Determine if we are parsing a consolidated list #if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then @@ -612,7 +609,7 @@ gravity_Cleanup() { # Ensure this function only runs when gravity_SetDownloadOptions() has completed if [[ "${gravity_Blackbody:-}" == true ]]; then # Remove any unused .domains files - for file in ${piholeDir}/*.${domainsExtension}; do + for file in "${piholeDir}"/*."${domainsExtension}"; do # If list is not in active array, then remove it if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then rm -f "${file}" 2> /dev/null || \ From d29947ba32de5313ffbd9bedb76e0f8218812d05 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 9 Dec 2019 22:30:41 +0000 Subject: [PATCH 264/366] optimise gravity list inserts Signed-off-by: Adam Warner --- gravity.sh | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/gravity.sh b/gravity.sh index b04e8553..1d9919a1 100755 --- a/gravity.sh +++ b/gravity.sh @@ -139,23 +139,29 @@ database_table_from_file() { local rowid declare -i rowid rowid=1 - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then - if [[ "${table}" == "domain_audit" ]]; then - # domain_audit table format (no enable or modified fields) - echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" - elif [[ "${table}" == "gravity" ]]; then - # gravity table format - echo "\"${domain}\",${arg}" >> "${tmpFile}" - else - # White-, black-, and regexlist format - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + + if [[ "${table}" == "gravity" ]]; then + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${arg}/" "${source}" > "${tmpFile}" + sed -i '/^$/d' "${tmpFile}" + else + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ -n "${domain}" ]]; then + if [[ "${table}" == "domain_audit" ]]; then + # domain_audit table format (no enable or modified fields) + echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" + else + # White-, black-, and regexlist format + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + fi + rowid+=1 fi - rowid+=1 - fi - done + done + fi + + inputfile="${tmpFile}" # Store domains in database table specified by ${table} From 69a909fc4cb3d061cc02649ab78ae055ef08fc67 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 11 Dec 2019 21:47:46 +0000 Subject: [PATCH 265/366] On modification of lists, we should send real-time signal 0 instead of SIGHUP. This also preserves the DNS cache of not-blocked domains. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 2 +- pihole | 14 +++++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index c5bf5b2a..320d4ae8 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -257,5 +257,5 @@ echo "DONE" fi if [[ "${reload}" != false ]]; then - pihole restartdns reload + pihole restartdns reload-lists fi diff --git a/pihole b/pihole index f0195843..cc7e1b7c 100755 --- a/pihole +++ b/pihole @@ -105,17 +105,25 @@ restartDNS() { svcOption="${1:-restart}" # Determine if we should reload or restart - if [[ "${svcOption}" =~ "reload" ]]; then - # Reload has been requested + if [[ "${svcOption}" =~ "reload-lists" ]]; then + # Reloading of the lists has been requested + # Note: This will NOT re-read any *.conf files + # Note 2: We cannot use killall here as it does + # not know about real-time signals + svc="kill -SIGRTMIN $(pidof ${resolver})" + str="Reloading DNS lists" + elif [[ "${svcOption}" =~ "reload" ]]; then + # Reloading of the DNS cache has been requested # Note: This will NOT re-read any *.conf files svc="killall -s SIGHUP ${resolver}" + str="Flushing DNS cache" else # A full restart has been requested svc="service ${resolver} restart" + str="Restarting DNS server" fi # Print output to Terminal, but not to Web Admin - str="${svcOption^}ing DNS service" [[ -t 1 ]] && echo -ne " ${INFO} ${str}..." output=$( { ${svc}; } 2>&1 ) From bd1b004d9417b6578c7673fd5fc4f391534230a4 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 10:13:51 +0000 Subject: [PATCH 266/366] Remove possible duplicates found in lower-quality adlists Signed-off-by: DL6ER --- gravity.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index 1d9919a1..58d60ff4 100755 --- a/gravity.sh +++ b/gravity.sh @@ -160,10 +160,11 @@ database_table_from_file() { fi done fi - - inputfile="${tmpFile}" + # Remove possible duplicates found in lower-quality adlists + uniq -u "${inputfile}" "${inputfile}" + # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 From 570a7a5c11ab6dead9b91d62beb79ab924cb626d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 10:17:54 +0000 Subject: [PATCH 267/366] Use sort -u instead of uniq as it is guaranteed to be safe when doing inline file operations. Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 58d60ff4..49a4fc1c 100755 --- a/gravity.sh +++ b/gravity.sh @@ -163,7 +163,7 @@ database_table_from_file() { inputfile="${tmpFile}" # Remove possible duplicates found in lower-quality adlists - uniq -u "${inputfile}" "${inputfile}" + sort -u -o "${inputfile}" "${inputfile}" # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines From 779fe670f785b2a23f76f2bbbf70bc46520d9068 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 10:29:44 +0000 Subject: [PATCH 268/366] Show full URL during gravity download instead of only domain and file Signed-off-by: DL6ER --- gravity.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index 1d9919a1..c894e99a 100755 --- a/gravity.sh +++ b/gravity.sh @@ -337,7 +337,7 @@ gravity_DownloadBlocklists() { *) cmd_ext="";; esac - echo -e " ${INFO} Target: ${domain} (${url##*/})" + echo -e " ${INFO} Target: ${url}" gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" echo "" done From 922ce7359c4774122c6e47f303ac8a4358afdf56 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 10:58:41 +0000 Subject: [PATCH 269/366] pihole -q should also scan gravity table Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 78 ++++++++++++++------------------------- 1 file changed, 27 insertions(+), 51 deletions(-) mode change 100644 => 100755 advanced/Scripts/query.sh diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh old mode 100644 new mode 100755 index 1e1b159c..467fe6f4 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -53,7 +53,6 @@ Example: 'pihole -q -exact domain.com' Query the adlists for a specified domain Options: - -adlist Print the name of the block list URL -exact Search the block lists for exact domain matches -all Return all query matches within a block list -h, --help Show this help dialog" @@ -64,7 +63,6 @@ fi if [[ "${options}" == *"-bp"* ]]; then exact="exact"; blockpage=true else - [[ "${options}" == *"-adlist"* ]] && adlist=true [[ "${options}" == *"-all"* ]] && all=true if [[ "${options}" == *"-exact"* ]]; then exact="exact"; matchType="exact ${matchType}" @@ -99,10 +97,17 @@ scanDatabaseTable() { # Underscores are SQLite wildcards matching exactly one character. We obviously want to suppress this # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. - case "${type}" in - "exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";; - * ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; - esac + if [[ "${table}" == "gravity" ]]; then + case "${type}" in + "exact" ) querystr="SELECT gravity.domain,adlist.address FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";; + * ) querystr="SELECT gravity.domain,adlist.address FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + esac + else + case "${type}" in + "exact" ) querystr="SELECT domain FROM ${table} WHERE domain = '${domain}'";; + * ) querystr="SELECT domain FROM ${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + esac + fi # Send prepared query to gravity database result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null @@ -111,6 +116,11 @@ scanDatabaseTable() { return fi + if [[ "${table}" == "gravity" ]]; then + echo "${result}" + return + fi + # Mark domain as having been white-/blacklist matched (global variable) wbMatch=true @@ -170,32 +180,15 @@ scanRegexDatabaseTable() { } # Scan Whitelist and Blacklist -scanDatabaseTable "${domainQuery}" "whitelist" "${exact}" -scanDatabaseTable "${domainQuery}" "blacklist" "${exact}" +scanDatabaseTable "${domainQuery}" "vw_whitelist" "${exact}" +scanDatabaseTable "${domainQuery}" "vw_blacklist" "${exact}" # Scan Regex table scanRegexDatabaseTable "${domainQuery}" "whitelist" scanRegexDatabaseTable "${domainQuery}" "blacklist" -# Get version sorted *.domains filenames (without dir path) -lists=("$(cd "$piholeDir" || exit 0; printf "%s\\n" -- *.domains | sort -V)") - -# Query blocklists for occurences of domain -mapfile -t results <<< "$(scanList "${domainQuery}" "${lists[*]}" "${exact}")" - -# Remove unwanted content from $results -# Each line in $results is formatted as such: [fileName]:[line] -# 1. Delete lines starting with # -# 2. Remove comments after domain -# 3. Remove hosts format IP address -# 4. Remove any lines that no longer contain the queried domain name (in case the matched domain name was in a comment) -esc_domain="${domainQuery//./\\.}" -mapfile -t results <<< "$(IFS=$'\n'; sed \ - -e "/:#/d" \ - -e "s/[ \\t]#.*//g" \ - -e "s/:.*[ \\t]/:/g" \ - -e "/${esc_domain}/!d" \ - <<< "${results[*]}")" +# Query block lists +mapfile -t results <<< "$(scanDatabaseTable "${domainQuery}" "gravity" "${exact}")" # Handle notices if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then @@ -210,12 +203,6 @@ elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then exit 0 fi -# Get adlist file content as array -if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then - # Retrieve source URLs from gravity database - mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" -fi - # Print "Exact matches for" title if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es" @@ -223,28 +210,17 @@ if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then fi for result in "${results[@]}"; do - fileName="${result/:*/}" - - # Determine *.domains URL using filename's number - if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then - fileNum="${fileName/list./}"; fileNum="${fileNum%%.*}" - fileName="${adlists[$fileNum]}" - - # Discrepency occurs when adlists has been modified, but Gravity has not been run - if [[ -z "${fileName}" ]]; then - fileName="${COL_LIGHT_RED}(no associated adlists URL found)${COL_NC}" - fi - fi + adlistAddress="${result/*|/}" if [[ -n "${blockpage}" ]]; then - echo "${fileNum} ${fileName}" + echo "${fileNum} ${adlistAddress}" elif [[ -n "${exact}" ]]; then - echo " ${fileName}" + echo " - ${adlistAddress}" else - if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then + if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then count="" - echo " ${matchType^} found in ${COL_BOLD}${fileName}${COL_NC}:" - fileName_prev="${fileName}" + echo " ${matchType^} found in ${COL_BOLD}${adlistAddress}${COL_NC}:" + adlistAddress_prev="${adlistAddress}" fi : $((count++)) @@ -254,7 +230,7 @@ for result in "${results[@]}"; do [[ "${count}" -gt "${max_count}" ]] && continue echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}" else - echo " ${result#*:}" + echo " ${result/*|//}" fi fi done From 52dd72dfa5f497d7c09d37b834aa0164edb66121 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 11:08:19 +0000 Subject: [PATCH 270/366] Ensure output is always correct and also display if domain has been found but is disabled Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 467fe6f4..aaaf811c 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -99,8 +99,8 @@ scanDatabaseTable() { # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. if [[ "${table}" == "gravity" ]]; then case "${type}" in - "exact" ) querystr="SELECT gravity.domain,adlist.address FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";; - * ) querystr="SELECT gravity.domain,adlist.address FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + "exact" ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";; + * ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac else case "${type}" in @@ -210,12 +210,20 @@ if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then fi for result in "${results[@]}"; do - adlistAddress="${result/*|/}" + match="${result/|*/}" + extra="${result#*|}" + adlistAddress="${extra/|*/}" + enabled="${extra#*|}" + if [[ "${enabled}" == "0" ]]; then + enabled="(disabled)" + else + enabled="" + fi if [[ -n "${blockpage}" ]]; then echo "${fileNum} ${adlistAddress}" elif [[ -n "${exact}" ]]; then - echo " - ${adlistAddress}" + echo " - ${adlistAddress} ${enabled}" else if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then count="" @@ -230,7 +238,7 @@ for result in "${results[@]}"; do [[ "${count}" -gt "${max_count}" ]] && continue echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}" else - echo " ${result/*|//}" + echo " ${match} ${enabled}" fi fi done From 40e8657137ede303614b88c2b39a487b17eb7d6e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 11:18:46 +0000 Subject: [PATCH 271/366] Please Mr. Stickler Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index aaaf811c..8f1c8a86 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -221,7 +221,7 @@ for result in "${results[@]}"; do fi if [[ -n "${blockpage}" ]]; then - echo "${fileNum} ${adlistAddress}" + echo "0 ${adlistAddress}" elif [[ -n "${exact}" ]]; then echo " - ${adlistAddress} ${enabled}" else From f0439c8d12be26744097d78ae3cf3d51de93c46a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 16:39:02 +0000 Subject: [PATCH 272/366] Add special group zero to gravity database. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 8 +++++ .../database_migration/gravity/6_to_7.sql | 29 +++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 advanced/Scripts/database_migration/gravity/6_to_7.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 508d9c56..28054643 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -64,4 +64,12 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/5_to_6.sql" version=6 fi + if [[ "$version" == "6" ]]; then + # This migration script adds a special group with ID 0 + # which is automatically associated to all clients not + # having their own group assignments + echo -e " ${INFO} Upgrading gravity database from version 6 to 7" + sqlite3 "${database}" < "${scriptPath}/6_to_7.sql" + version=7 + fi } diff --git a/advanced/Scripts/database_migration/gravity/6_to_7.sql b/advanced/Scripts/database_migration/gravity/6_to_7.sql new file mode 100644 index 00000000..095429b6 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/6_to_7.sql @@ -0,0 +1,29 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + +INSERT INTO adlist_by_group (adlist_id, group_id) SELECT id, 0 FROM adlist; +INSERT INTO domainlist_by_group (domainlist_id, group_id) SELECT id, 0 FROM domainlist; + +CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist + BEGIN + INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist + BEGIN + INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" + BEGIN + INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + END; + +UPDATE info SET value = 7 WHERE property = 'version'; + +COMMIT; From a720fe17893ffbf48714eb11132089a7e858971e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 12 Dec 2019 22:49:21 +0000 Subject: [PATCH 273/366] Add client trigger. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/6_to_7.sql | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/database_migration/gravity/6_to_7.sql b/advanced/Scripts/database_migration/gravity/6_to_7.sql index 095429b6..22d9dfaf 100644 --- a/advanced/Scripts/database_migration/gravity/6_to_7.sql +++ b/advanced/Scripts/database_migration/gravity/6_to_7.sql @@ -6,14 +6,20 @@ BEGIN TRANSACTION; INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); -INSERT INTO adlist_by_group (adlist_id, group_id) SELECT id, 0 FROM adlist; INSERT INTO domainlist_by_group (domainlist_id, group_id) SELECT id, 0 FROM domainlist; +INSERT INTO client_by_group (client_id, group_id) SELECT id, 0 FROM client; +INSERT INTO adlist_by_group (adlist_id, group_id) SELECT id, 0 FROM adlist; CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist BEGIN INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); END; +CREATE TRIGGER tr_client_add AFTER INSERT ON client + BEGIN + INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0); + END; + CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist BEGIN INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); From 4be7ebe61f50516d4ab5ac845700afafbe99da61 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 15 Dec 2019 11:46:14 +0000 Subject: [PATCH 274/366] Scan domainlist instead of view to also catch disabled domains. Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 47 ++++++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 8f1c8a86..0ba9ae40 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -88,7 +88,7 @@ if [[ -n "${str:-}" ]]; then fi scanDatabaseTable() { - local domain table type querystr result + local domain table type querystr result extra domain="$(printf "%q" "${1}")" table="${2}" type="${3:-}" @@ -98,14 +98,14 @@ scanDatabaseTable() { # behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched # as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores. if [[ "${table}" == "gravity" ]]; then - case "${type}" in + case "${exact}" in "exact" ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";; * ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac else - case "${type}" in - "exact" ) querystr="SELECT domain FROM ${table} WHERE domain = '${domain}'";; - * ) querystr="SELECT domain FROM ${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; + case "${exact}" in + "exact" ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain = '${domain}'";; + * ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";; esac fi @@ -126,7 +126,7 @@ scanDatabaseTable() { # Print table name if [[ -z "${blockpage}" ]]; then - echo " ${matchType^} found in ${COL_BOLD}${table^}${COL_NC}" + echo " ${matchType^} found in ${COL_BOLD}exact ${table}${COL_NC}" fi # Loop over results and print them @@ -136,7 +136,13 @@ scanDatabaseTable() { echo "π ${result}" exit 0 fi - echo " ${result}" + domain="${result/|*}" + if [[ "${result#*|}" == "0" ]]; then + extra=" (disabled)" + else + extra="" + fi + echo " ${domain}${extra}" done } @@ -144,9 +150,10 @@ scanRegexDatabaseTable() { local domain list domain="${1}" list="${2}" + type="${3:-}" # Query all regex from the corresponding database tables - mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM vw_regex_${list}" 2> /dev/null) + mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = "${type} 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then @@ -159,7 +166,7 @@ scanRegexDatabaseTable() { # Split matching regexps over a new line str_regexMatches=$(printf '%s\n' "${regexMatches[@]}") # Form a "matched" message - str_message="${matchType^} found in ${COL_BOLD}Regex ${list}${COL_NC}" + str_message="${matchType^} found in ${COL_BOLD}regex ${list}${COL_NC}" # Form a "results" message str_result="${COL_BOLD}${str_regexMatches}${COL_NC}" # If we are displaying more than just the source of the block @@ -180,15 +187,15 @@ scanRegexDatabaseTable() { } # Scan Whitelist and Blacklist -scanDatabaseTable "${domainQuery}" "vw_whitelist" "${exact}" -scanDatabaseTable "${domainQuery}" "vw_blacklist" "${exact}" +scanDatabaseTable "${domainQuery}" "whitelist" "0" +scanDatabaseTable "${domainQuery}" "blacklist" "1" # Scan Regex table -scanRegexDatabaseTable "${domainQuery}" "whitelist" -scanRegexDatabaseTable "${domainQuery}" "blacklist" +scanRegexDatabaseTable "${domainQuery}" "whitelist" "2" +scanRegexDatabaseTable "${domainQuery}" "blacklist" "3" # Query block lists -mapfile -t results <<< "$(scanDatabaseTable "${domainQuery}" "gravity" "${exact}")" +mapfile -t results <<< "$(scanDatabaseTable "${domainQuery}" "gravity")" # Handle notices if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then @@ -213,17 +220,17 @@ for result in "${results[@]}"; do match="${result/|*/}" extra="${result#*|}" adlistAddress="${extra/|*/}" - enabled="${extra#*|}" - if [[ "${enabled}" == "0" ]]; then - enabled="(disabled)" + extra="${extra#*|}" + if [[ "${extra}" == "0" ]]; then + extra="(disabled)" else - enabled="" + extra="" fi if [[ -n "${blockpage}" ]]; then echo "0 ${adlistAddress}" elif [[ -n "${exact}" ]]; then - echo " - ${adlistAddress} ${enabled}" + echo " - ${adlistAddress} ${extra}" else if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then count="" @@ -238,7 +245,7 @@ for result in "${results[@]}"; do [[ "${count}" -gt "${max_count}" ]] && continue echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}" else - echo " ${match} ${enabled}" + echo " ${match} ${extra}" fi fi done From 2444296348d6ea1b99a01160ba5f6d3e2670c013 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 15 Dec 2019 11:55:19 +0000 Subject: [PATCH 275/366] Again, Mr. Stickler Signed-off-by: DL6ER --- advanced/Scripts/query.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 0ba9ae40..a96129e0 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -13,7 +13,6 @@ piholeDir="/etc/pihole" gravityDBfile="${piholeDir}/gravity.db" options="$*" -adlist="" all="" exact="" blockpage="" @@ -153,7 +152,7 @@ scanRegexDatabaseTable() { type="${3:-}" # Query all regex from the corresponding database tables - mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = "${type} 2> /dev/null) + mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then From 948f4a8827dfca24ce38e4b9a1a3c1d0072d847b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 16 Dec 2019 09:55:46 +0000 Subject: [PATCH 276/366] Ensure permissions and ownership of gravity.db are correctly set on each run of pihole -g. This would have prevented https://github.com/pi-hole/AdminLTE/issues/1077 Signed-off-by: DL6ER --- gravity.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gravity.sh b/gravity.sh index f2e82b43..659263b5 100755 --- a/gravity.sh +++ b/gravity.sh @@ -81,10 +81,6 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { sqlite3 "${gravityDBfile}" < "${gravityDBschema}" - - # Ensure proper permissions are set for the newly created database - chown pihole:pihole "${gravityDBfile}" - chmod g+w "${piholeDir}" "${gravityDBfile}" } update_gravity_timestamp() { @@ -690,6 +686,10 @@ fi # Move possibly existing legacy files to the gravity database migrate_to_database +# Ensure proper permissions are set for the newly created database +chown pihole:pihole "${gravityDBfile}" +chmod g+w "${piholeDir}" "${gravityDBfile}" + if [[ "${forceDelete:-}" == true ]]; then str="Deleting existing list cache" echo -ne "${INFO} ${str}..." From b32b5ad6e97044dd410d2717340631388bcb631b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 20 Dec 2019 00:09:10 +0000 Subject: [PATCH 277/366] Update gravity database to version 8. This enforces uniqueness on the group name. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 7 ++++++ .../database_migration/gravity/7_to_8.sql | 23 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 advanced/Scripts/database_migration/gravity/7_to_8.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 28054643..6dc88bf3 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -72,4 +72,11 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/6_to_7.sql" version=7 fi + if [[ "$version" == "7" ]]; then + # This migration script recreated the group table + # to ensure uniqueness on the group name + echo -e " ${INFO} Upgrading gravity database from version 7 to 8" + sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" + version=7 + fi } diff --git a/advanced/Scripts/database_migration/gravity/7_to_8.sql b/advanced/Scripts/database_migration/gravity/7_to_8.sql new file mode 100644 index 00000000..8f79bde8 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/7_to_8.sql @@ -0,0 +1,23 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +ALTER TABLE "group" RENAME TO "group__"; + +CREATE TABLE "group" +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + enabled BOOLEAN NOT NULL DEFAULT 1, + name TEXT UNIQUE NOT NULL, + description TEXT +); + +INSERT OR IGNORE INTO "group" SELECT * FROM "group__"; + +DROP TABLE "group__"; + +UPDATE info SET value = 8 WHERE property = 'version'; + +COMMIT; From e589e665a785142e9c22383a6089f8810d36027b Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 20 Dec 2019 00:14:58 +0000 Subject: [PATCH 278/366] Also add date_added and date_modified fields to group table. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity-db.sh | 3 ++- advanced/Scripts/database_migration/gravity/7_to_8.sql | 9 ++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 6dc88bf3..bc650d3f 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -75,8 +75,9 @@ upgrade_gravityDB(){ if [[ "$version" == "7" ]]; then # This migration script recreated the group table # to ensure uniqueness on the group name + # We also add date_added and date_modified columns echo -e " ${INFO} Upgrading gravity database from version 7 to 8" sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" - version=7 + version=8 fi } diff --git a/advanced/Scripts/database_migration/gravity/7_to_8.sql b/advanced/Scripts/database_migration/gravity/7_to_8.sql index 8f79bde8..0e58408e 100644 --- a/advanced/Scripts/database_migration/gravity/7_to_8.sql +++ b/advanced/Scripts/database_migration/gravity/7_to_8.sql @@ -11,10 +11,17 @@ CREATE TABLE "group" id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, name TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), description TEXT ); -INSERT OR IGNORE INTO "group" SELECT * FROM "group__"; +CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" + BEGIN + UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; + END; + +INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__"; DROP TABLE "group__"; From eda7f40fefc4717e5cebc40f13a03cbb18ab492c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 20 Dec 2019 00:42:59 +0000 Subject: [PATCH 279/366] Reinstall trigger that prevents group zero from being deleted. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/7_to_8.sql | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/advanced/Scripts/database_migration/gravity/7_to_8.sql b/advanced/Scripts/database_migration/gravity/7_to_8.sql index 0e58408e..412475da 100644 --- a/advanced/Scripts/database_migration/gravity/7_to_8.sql +++ b/advanced/Scripts/database_migration/gravity/7_to_8.sql @@ -21,6 +21,11 @@ CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; END; +CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" + BEGIN + INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + END; + INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__"; DROP TABLE "group__"; From cda0133dd1c5876677fa8a45c5924efd6a806b9c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 21 Dec 2019 11:15:18 +0000 Subject: [PATCH 280/366] Revert "Change the regex used for domain validation" This reverts commit 76460f01e9c70905b7ac82d9a294855c6bfd19bc. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 320d4ae8..4f2e046f 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -91,8 +91,8 @@ ValidateDomain() { if [[ ( "${typeId}" == "${regex_blacklist}" || "${typeId}" == "${regex_whitelist}" ) && "${wildcard}" == false ]]; then validDomain="${domain}" else - # Use regex to check the validity of the passed domain. see https://regexr.com/3abjr - validDomain=$(grep -P "^((?!-))(xn--)?[a-z0-9][a-z0-9-_]{0,61}[a-z0-9]{0,1}\.(xn--)?([a-z0-9\-]{1,61}|[a-z0-9-]{1,30}\.[a-z]{2,})$" <<< "${domain}") + validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check + validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label fi fi From aa4c0ff3290372b19071d4dd3316fec2aa2135fe Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 29 Dec 2019 20:35:11 +0000 Subject: [PATCH 281/366] Don't create trigger with duplicate name until after old table is deleted Signed-off-by: Adam Warner --- advanced/Scripts/database_migration/gravity/7_to_8.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity/7_to_8.sql b/advanced/Scripts/database_migration/gravity/7_to_8.sql index 412475da..ccf0c148 100644 --- a/advanced/Scripts/database_migration/gravity/7_to_8.sql +++ b/advanced/Scripts/database_migration/gravity/7_to_8.sql @@ -21,15 +21,15 @@ CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; END; +INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__"; + +DROP TABLE "group__"; + CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" BEGIN INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); END; -INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__"; - -DROP TABLE "group__"; - UPDATE info SET value = 8 WHERE property = 'version'; COMMIT; From c944f6a320704562dcb2c49502ce0d89aa8fdad3 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 29 Dec 2019 23:32:31 +0000 Subject: [PATCH 282/366] Add a new migration script to fix the previous one Signed-off-by: Adam Warner --- .../Scripts/database_migration/gravity-db.sh | 7 +++++ .../database_migration/gravity/8_to_9.sql | 26 +++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 advanced/Scripts/database_migration/gravity/8_to_9.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index bc650d3f..184b3a4a 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -80,4 +80,11 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" version=8 fi + if [[ "$version" == "8" ]]; then + # This migration fixes some issues that were introduced + # in the previous migration script. + echo -e " ${INFO} Upgrading gravity database from version 8 to 9" + sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" + version=9 + fi } diff --git a/advanced/Scripts/database_migration/gravity/8_to_9.sql b/advanced/Scripts/database_migration/gravity/8_to_9.sql new file mode 100644 index 00000000..fa5d43a6 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/8_to_9.sql @@ -0,0 +1,26 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +DROP TRIGGER IF EXISTS tr_group_update; +DROP TRIGGER IF EXISTS tr_group_zero; + +PRAGMA legacy_alter_table=ON; +ALTER TABLE "group" RENAME TO "group__"; +PRAGMA legacy_alter_table=OFF; +ALTER TABLE "group__" RENAME TO "group"; + +CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" + BEGIN + UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; + END; + +CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" + BEGIN + INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + END; + + +COMMIT; From bb30c818abea6f38c7b637b84f65d88e0f403cde Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 30 Dec 2019 09:21:30 +0000 Subject: [PATCH 283/366] Update database version during migration. Signed-off-by: DL6ER --- advanced/Scripts/database_migration/gravity/8_to_9.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Scripts/database_migration/gravity/8_to_9.sql b/advanced/Scripts/database_migration/gravity/8_to_9.sql index fa5d43a6..0d873e2a 100644 --- a/advanced/Scripts/database_migration/gravity/8_to_9.sql +++ b/advanced/Scripts/database_migration/gravity/8_to_9.sql @@ -22,5 +22,6 @@ CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); END; +UPDATE info SET value = 9 WHERE property = 'version'; COMMIT; From 9dff55b2126e3c0d3bc8a2f9bced4f216c0fae1e Mon Sep 17 00:00:00 2001 From: MichaIng Date: Thu, 29 Aug 2019 13:13:44 +0200 Subject: [PATCH 284/366] Installer | Remove "dialog" from Debian/Ubuntu installer deps + The installer uses `whiptail`, thus `dialog` is not required. Signed-off-by: MichaIng --- automated install/basic-install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 30beda91..2b01fcbf 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -188,10 +188,10 @@ if is_command apt-get ; then # grep -c will return 1 retVal on 0 matches, block this throwing the set -e with an OR TRUE PKG_COUNT="${PKG_MANAGER} -s -o Debug::NoLocking=true upgrade | grep -c ^Inst || true" # Some distros vary slightly so these fixes for dependencies may apply - # on Ubuntu 18.04.1 LTS we need to add the universe repository to gain access to dialog and dhcpcd5 + # on Ubuntu 18.04.1 LTS we need to add the universe repository to gain access to dhcpcd5 APT_SOURCES="/etc/apt/sources.list" if awk 'BEGIN{a=1;b=0}/bionic main/{a=0}/bionic.*universe/{b=1}END{exit a + b}' ${APT_SOURCES}; then - if ! whiptail --defaultno --title "Dependencies Require Update to Allowed Repositories" --yesno "Would you like to enable 'universe' repository?\\n\\nThis repository is required by the following packages:\\n\\n- dhcpcd5\\n- dialog" "${r}" "${c}"; then + if ! whiptail --defaultno --title "Dependencies Require Update to Allowed Repositories" --yesno "Would you like to enable 'universe' repository?\\n\\nThis repository is required by the following packages:\\n\\n- dhcpcd5" "${r}" "${c}"; then printf " %b Aborting installation: dependencies could not be installed.\\n" "${CROSS}" exit # exit the installer else @@ -242,7 +242,7 @@ if is_command apt-get ; then fi # Since our install script is so large, we need several other programs to successfully get a machine provisioned # These programs are stored in an array so they can be looped through later - INSTALLER_DEPS=(apt-utils dialog debconf dhcpcd5 git "${iproute_pkg}" whiptail) + INSTALLER_DEPS=(apt-utils debconf dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed From ebb1a730c11e2f516a072decf57a506e9d9a2960 Mon Sep 17 00:00:00 2001 From: bcambl Date: Sun, 15 Sep 2019 16:04:35 -0600 Subject: [PATCH 285/366] remove unused fedora/centos dependency: dialog Signed-off-by: bcambl --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 2b01fcbf..91858f0b 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -285,7 +285,7 @@ elif is_command rpm ; then UPDATE_PKG_CACHE=":" PKG_INSTALL=("${PKG_MANAGER}" install -y) PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" - INSTALLER_DEPS=(dialog git iproute newt procps-ng which chkconfig) + INSTALLER_DEPS=(git iproute newt procps-ng which chkconfig) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml) LIGHTTPD_USER="lighttpd" From 07cc5b501cef7c3ce1df6a11d17a87d13bfb5561 Mon Sep 17 00:00:00 2001 From: bcambl Date: Sun, 13 Oct 2019 11:44:15 -0600 Subject: [PATCH 286/366] replace debconf-apt-progress with apt-get in install_dependent_packages() Removes the need for conditional debconf-apt-progress dependency checking Signed-off-by: bcambl --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 91858f0b..9352fcb0 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1632,7 +1632,7 @@ install_dependent_packages() { # amount of download traffic. # NOTE: We may be able to use this installArray in the future to create a list of package that were # installed by us, and remove only the installed packages, and not the entire list. - if is_command debconf-apt-progress ; then + if is_command apt-get ; then # For each package, for i in "$@"; do printf " %b Checking for %s..." "${INFO}" "${i}" @@ -1645,7 +1645,7 @@ install_dependent_packages() { done if [[ "${#installArray[@]}" -gt 0 ]]; then test_dpkg_lock - debconf-apt-progress -- "${PKG_INSTALL[@]}" "${installArray[@]}" + "${PKG_INSTALL[@]}" "${installArray[@]}" return fi printf "\\n" From cbb146101021dc7109ee117c953febeec0d31317 Mon Sep 17 00:00:00 2001 From: bcambl Date: Wed, 1 Jan 2020 12:41:33 -0600 Subject: [PATCH 287/366] add stdout horizontal rule to install_dependent_packages() Signed-off-by: bcambl --- automated install/basic-install.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 9352fcb0..39f3a53c 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1645,7 +1645,10 @@ install_dependent_packages() { done if [[ "${#installArray[@]}" -gt 0 ]]; then test_dpkg_lock + printf " %b Processing %s install(s) for: %s, please wait...\\n" "${INFO}" "${PKG_MANAGER}" "${installArray[*]}" + printf '%*s\n' "$columns" '' | tr " " -; "${PKG_INSTALL[@]}" "${installArray[@]}" + printf '%*s\n' "$columns" '' | tr " " -; return fi printf "\\n" @@ -1663,7 +1666,10 @@ install_dependent_packages() { fi done if [[ "${#installArray[@]}" -gt 0 ]]; then - "${PKG_INSTALL[@]}" "${installArray[@]}" &> /dev/null + printf " %b Processing %s install(s) for: %s, please wait...\\n" "${INFO}" "${PKG_MANAGER}" "${installArray[*]}" + printf '%*s\n' "$columns" '' | tr " " -; + "${PKG_INSTALL[@]}" "${installArray[@]}" + printf '%*s\n' "$columns" '' | tr " " -; return fi printf "\\n" From 60c51886e07f2e1c235d43eba9c06642be7a0a8d Mon Sep 17 00:00:00 2001 From: bcambl Date: Wed, 1 Jan 2020 13:23:48 -0600 Subject: [PATCH 288/366] remove unused debian deps (apt-utils debconf) Signed-off-by: bcambl --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 39f3a53c..4ba5de06 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -242,7 +242,7 @@ if is_command apt-get ; then fi # Since our install script is so large, we need several other programs to successfully get a machine provisioned # These programs are stored in an array so they can be looped through later - INSTALLER_DEPS=(apt-utils debconf dhcpcd5 git "${iproute_pkg}" whiptail) + INSTALLER_DEPS=(dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed From ec8f4050d0625fa19b170c7e851cdee7a28f9b97 Mon Sep 17 00:00:00 2001 From: bcambl Date: Wed, 1 Jan 2020 17:29:10 -0600 Subject: [PATCH 289/366] Update installer to support CentOS 8 PHP dependency php-json is now required for both the latest Fedora and CentOS. Package php-json will now be a default web dependency and removed from PIHOLE_WEB_DEPS when installing on CentOS7. Signed-off-by: bcambl --- automated install/basic-install.sh | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 30beda91..b7326df5 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -287,15 +287,14 @@ elif is_command rpm ; then PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(dialog git iproute newt procps-ng which chkconfig) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) - PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml) + PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json) LIGHTTPD_USER="lighttpd" LIGHTTPD_GROUP="lighttpd" LIGHTTPD_CFG="lighttpd.conf.fedora" # If the host OS is Fedora, if grep -qiE 'fedora|fedberry' /etc/redhat-release; then # all required packages should be available by default with the latest fedora release - # ensure 'php-json' is installed on Fedora (installed as dependency on CentOS7 + Remi repository) - PIHOLE_WEB_DEPS+=('php-json') + : # continue # or if host OS is CentOS, elif grep -qiE 'centos|scientific' /etc/redhat-release; then # Pi-Hole currently supports CentOS 7+ with PHP7+ @@ -310,7 +309,21 @@ elif is_command rpm ; then # exit the installer exit fi - # on CentOS we need to add the EPEL repository to gain access to Fedora packages + # php-json is not required on CentOS 7 as it is already compiled into php + # verifiy via `php -m | grep json` + if [[ $CURRENT_CENTOS_VERSION -eq 7 ]]; then + # create a temporary array as arrays are not designed for use as mutable data structures + CENTOS7_PIHOLE_WEB_DEPS=() + for i in "${!PIHOLE_WEB_DEPS[@]}"; do + if [[ ${PIHOLE_WEB_DEPS[i]} != "php-json" ]]; then + CENTOS7_PIHOLE_WEB_DEPS+=( "${PIHOLE_WEB_DEPS[i]}" ) + fi + done + # re-assign the clean dependency array back to PIHOLE_WEB_DEPS + PIHOLE_WEB_DEPS=("${CENTOS7_PIHOLE_WEB_DEPS[@]}") + unset CENTOS7_PIHOLE_WEB_DEPS + fi + # CentOS requires the EPEL repository to gain access to Fedora packages EPEL_PKG="epel-release" rpm -q ${EPEL_PKG} &> /dev/null || rc=$? if [[ $rc -ne 0 ]]; then From cfa909a93d4ec566fa8375e700b6d87d448eb117 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 12 Jan 2020 14:09:14 +0100 Subject: [PATCH 290/366] Add package php-intl for AdminLTE#1130. Signed-off-by: DL6ER --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index df6c532d..6b0927de 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -247,7 +247,7 @@ if is_command apt-get ; then PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml") + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "${phpVer}-intl") # The Web server user, LIGHTTPD_USER="www-data" # group, @@ -287,7 +287,7 @@ elif is_command rpm ; then PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(git iproute newt procps-ng which chkconfig) PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) - PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json) + PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json php-intl) LIGHTTPD_USER="lighttpd" LIGHTTPD_GROUP="lighttpd" LIGHTTPD_CFG="lighttpd.conf.fedora" From 8f22203d248ba1a80ec860dbb3003b1413139bd2 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 14 Jan 2020 19:57:45 +0100 Subject: [PATCH 291/366] Wait 30 seconds for obtaining a database lock instead of immediately failing if the database is busy. Signed-off-by: DL6ER --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index 659263b5..105febb7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -85,7 +85,7 @@ generate_gravity_database() { update_gravity_timestamp() { # Update timestamp when the gravity table was last updated successfully - output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%s', 'now') as int));"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -99,7 +99,7 @@ database_truncate_table() { local table table="${1}" - output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nDELETE FROM %s;" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -164,7 +164,7 @@ database_table_from_file() { # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 10000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then From 276b19184500b65eb34c9d480fe641373dba28f7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 19 Jan 2020 21:39:49 +0100 Subject: [PATCH 292/366] Remove dead code causing failure from the blocking page. Signed-off-by: DL6ER --- advanced/index.php | 6 ------ 1 file changed, 6 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index 62e45091..b0c4a7c3 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -96,12 +96,6 @@ if ($serverName === "pi.hole") { // Define admin email address text based off $svEmail presence $bpAskAdmin = !empty($svEmail) ? '' : ""; -// Determine if at least one block list has been generated -$blocklistglob = glob("/etc/pihole/list.0.*.domains"); -if ($blocklistglob === array()) { - die("[ERROR] There are no domain lists generated lists within /etc/pihole/! Please update gravity by running pihole -g, or repair Pi-hole using pihole -r."); -} - // Get possible non-standard location of FTL's database $FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf"); if (isset($FTLsettings["GRAVITYDB"])) { From 633e56e8a99f54970d8ed7f3bf3a25c53430003d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 20 Jan 2020 17:59:24 +0100 Subject: [PATCH 293/366] Add gravity database 9->10 update script. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 7 +++++ .../database_migration/gravity/9_to_10.sql | 29 +++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 advanced/Scripts/database_migration/gravity/9_to_10.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 184b3a4a..6a51e353 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -87,4 +87,11 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" version=9 fi + if [[ "$version" == "9" ]]; then + # This migration drops unused tables and creates triggers to remove + # obsolete groups assignments when the linked items are deleted + echo -e " ${INFO} Upgrading gravity database from version 9 to 10" + sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" + version=10 + fi } diff --git a/advanced/Scripts/database_migration/gravity/9_to_10.sql b/advanced/Scripts/database_migration/gravity/9_to_10.sql new file mode 100644 index 00000000..a5636a23 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/9_to_10.sql @@ -0,0 +1,29 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +DROP TABLE IF EXISTS whitelist; +DROP TABLE IF EXISTS blacklist; +DROP TABLE IF EXISTS regex_whitelist; +DROP TABLE IF EXISTS regex_blacklist; + +CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist + BEGIN + DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id; + END; + +CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist + BEGIN + DELETE FROM adlist_by_group WHERE adlist_id = OLD.id; + END; + +CREATE TRIGGER tr_client_delete AFTER DELETE ON client + BEGIN + DELETE FROM client_by_group WHERE client_id = OLD.id; + END; + +UPDATE info SET value = 10 WHERE property = 'version'; + +COMMIT; From 3f9e79f152b404e9d92d786977e796d510a5e63a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 20 Jan 2020 20:07:25 +0100 Subject: [PATCH 294/366] Print human-readable timestamps in the debugger's gravity output Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 84e34416..76a409f9 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1105,7 +1105,7 @@ show_db_entries() { } show_groups() { - show_db_entries "Groups" "SELECT * FROM \"group\"" "4 4 30 50" + show_db_entries "Groups" "SELECT id,name,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,description FROM \"group\"" "4 50 7 19 19 50" } show_adlists() { From a8096243569d3bb91e1c68204d65f569e9833ddb Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 23 Jan 2020 19:18:22 +0100 Subject: [PATCH 295/366] Update blocked strings for pihole -t. Signed-off-by: DL6ER --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index cc7e1b7c..e1758645 100755 --- a/pihole +++ b/pihole @@ -307,7 +307,7 @@ tailFunc() { # Colour everything else as gray tail -f /var/log/pihole.log | sed -E \ -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ - -e "s,(.*(gravity |black |regex | config ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ + -e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ -e "s,.*,${COL_GRAY}&${COL_NC}," exit 0 From 10c2dad48ad2e600d016004166ab5d88bec16424 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 24 Jan 2020 18:39:13 +0100 Subject: [PATCH 296/366] Improve gravity performance (#3100) * Gravity performance improvements. Signed-off-by: DL6ER * Do not move downloaded lists into migration_backup directory. Signed-off-by: DL6ER * Do not (strictly) sort domains. Random-leaf access is faster than always-last-leaf access (on average). Signed-off-by: DL6ER * Append instead of overwrite gravity_new collection list. Signed-off-by: DL6ER * Rename table gravity_new to gravity_temp to clarify that this is only an intermediate table. Signed-off-by: DL6ER * Add timers for each of the calls to compute intense parts. They are to be removed before this finally hits the release/v5.0 branch. Signed-off-by: DL6ER * Fix legacy list files import. It currently doesn't work when the gravity database has already been updated to using the single domainlist table. Signed-off-by: DL6ER * Simplify database_table_from_file(), remove all to this function for gravity lost downloads. Signed-off-by: DL6ER * Update gravity.db.sql to version 10 to have newle created databases already reflect the most recent state. Signed-off-by: DL6ER * Create second gravity database and swap them on success. This has a number of advantages such as instantaneous gravity updates (as seen from FTL) and always available gravity blocking. Furthermore, this saves disk space as the old database is removed on completion. * Add timing output for the database swapping SQLite3 call. Signed-off-by: DL6ER * Explicitly generate index as a separate process. Signed-off-by: DL6ER * Remove time measurements. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 190 ++++++++++++++---------- advanced/Templates/gravity_copy.sql | 21 +++ gravity.sh | 219 +++++++++++++++++----------- 3 files changed, 272 insertions(+), 158 deletions(-) create mode 100644 advanced/Templates/gravity_copy.sql diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index d0c744f4..a7dc12df 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,16 +1,21 @@ -PRAGMA FOREIGN_KEYS=ON; +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; CREATE TABLE "group" ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, - name TEXT NOT NULL, + name TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), description TEXT ); +INSERT INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); -CREATE TABLE whitelist +CREATE TABLE domainlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, + type INTEGER NOT NULL DEFAULT 0, domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), @@ -18,125 +23,158 @@ CREATE TABLE whitelist comment TEXT ); -CREATE TABLE whitelist_by_group -( - whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), - group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (whitelist_id, group_id) -); - -CREATE TABLE blacklist +CREATE TABLE adlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, + address TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), comment TEXT ); -CREATE TABLE blacklist_by_group +CREATE TABLE adlist_by_group ( - blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), + adlist_id INTEGER NOT NULL REFERENCES adlist (id), group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (blacklist_id, group_id) + PRIMARY KEY (adlist_id, group_id) ); -CREATE TABLE regex +CREATE TABLE gravity ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + domain TEXT NOT NULL, + adlist_id INTEGER NOT NULL REFERENCES adlist (id) ); -CREATE TABLE regex_by_group +CREATE TABLE info ( - regex_id INTEGER NOT NULL REFERENCES regex (id), - group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (regex_id, group_id) + property TEXT PRIMARY KEY, + value TEXT NOT NULL ); -CREATE TABLE adlist +INSERT INTO "info" VALUES('version','10'); + +CREATE TABLE domain_audit ( id INTEGER PRIMARY KEY AUTOINCREMENT, - address TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT + domain TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)) ); -CREATE TABLE adlist_by_group +CREATE TABLE domainlist_by_group ( - adlist_id INTEGER NOT NULL REFERENCES adlist (id), + domainlist_id INTEGER NOT NULL REFERENCES domainlist (id), group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (adlist_id, group_id) + PRIMARY KEY (domainlist_id, group_id) ); -CREATE TABLE gravity +CREATE TABLE client ( - domain TEXT PRIMARY KEY + id INTEGER PRIMARY KEY AUTOINCREMENT, + ip TEXT NOL NULL UNIQUE ); -CREATE TABLE info +CREATE TABLE client_by_group ( - property TEXT PRIMARY KEY, - value TEXT NOT NULL + client_id INTEGER NOT NULL REFERENCES client (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (client_id, group_id) ); -INSERT INTO info VALUES("version","1"); +CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist + BEGIN + UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; + END; + +CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist + BEGIN + UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; + +CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 0 + ORDER BY domainlist.id; + +CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 1 + ORDER BY domainlist.id; + +CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 2 + ORDER BY domainlist.id; + +CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 3 + ORDER BY domainlist.id; + +CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id + FROM gravity + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id + LEFT JOIN adlist ON adlist.id = gravity.adlist_id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1); + +CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id + FROM adlist + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY adlist.id; -CREATE VIEW vw_whitelist AS SELECT DISTINCT domain - FROM whitelist - LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id - LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND (whitelist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY whitelist.id; +CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist + BEGIN + INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); + END; -CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist +CREATE TRIGGER tr_client_add AFTER INSERT ON client BEGIN - UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0); END; -CREATE VIEW vw_blacklist AS SELECT DISTINCT domain - FROM blacklist - LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id - LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY blacklist.id; +CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist + BEGIN + INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); + END; -CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist +CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" BEGIN - UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; END; -CREATE VIEW vw_regex AS SELECT DISTINCT domain - FROM regex - LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id - LEFT JOIN "group" ON "group".id = regex_by_group.group_id - WHERE regex.enabled = 1 AND (regex_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY regex.id; +CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" + BEGIN + INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + END; -CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex +CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist BEGIN - UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id; END; -CREATE VIEW vw_adlist AS SELECT DISTINCT address - FROM adlist - LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id - LEFT JOIN "group" ON "group".id = adlist_by_group.group_id - WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY adlist.id; +CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist + BEGIN + DELETE FROM adlist_by_group WHERE adlist_id = OLD.id; + END; -CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist +CREATE TRIGGER tr_client_delete AFTER DELETE ON client BEGIN - UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; + DELETE FROM client_by_group WHERE client_id = OLD.id; END; -CREATE VIEW vw_gravity AS SELECT domain - FROM gravity - WHERE domain NOT IN (SELECT domain from vw_whitelist); +COMMIT; diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql new file mode 100644 index 00000000..e14d9d8c --- /dev/null +++ b/advanced/Templates/gravity_copy.sql @@ -0,0 +1,21 @@ +.timeout 30000 + +ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD; + +BEGIN TRANSACTION; + +INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; +INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; + +INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist; +INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group; + +INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist; +INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group; + +INSERT OR REPLACE INTO info SELECT * FROM OLD.info; + +INSERT OR REPLACE INTO client SELECT * FROM OLD.client; +INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; + +COMMIT; diff --git a/gravity.sh b/gravity.sh index 105febb7..26bedae7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -36,7 +36,9 @@ VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" gravityDBfile="${piholeDir}/gravity.db" +gravityTEMPfile="${piholeDir}/gravity_temp.db" gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql" +gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql" optimize_database=false domainsExtension="domains" @@ -80,31 +82,49 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { - sqlite3 "${gravityDBfile}" < "${gravityDBschema}" + sqlite3 "${1}" < "${gravityDBschema}" } -update_gravity_timestamp() { - # Update timestamp when the gravity table was last updated successfully - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) +# Copy data from old to new database file and swap them +gravity_swap_databases() { + local str + str="Building tree" + echo -ne " ${INFO} ${str}..." + + # The index is intentionally not UNIQUE as prro quality adlists may contain domains more than once + output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to build gravity tree in ${gravityTEMPfile}\\n ${output}" return 1 fi - return 0 -} + echo -e "${OVER} ${TICK} ${str}" -database_truncate_table() { - local table - table="${1}" + str="Swapping databases" + echo -ne " ${INFO} ${str}..." - output=$( { printf ".timeout 30000\\nDELETE FROM %s;" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBcopy}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" + echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}" + return 1 + fi + echo -e "${OVER} ${TICK} ${str}" + + # Swap databases and remove old database + rm "${gravityDBfile}" + mv "${gravityTEMPfile}" "${gravityDBfile}" +} + +# Update timestamp when the gravity table was last updated successfully +update_gravity_timestamp() { + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityTEMPfile}\\n ${output}" return 1 fi return 0 @@ -113,73 +133,80 @@ database_truncate_table() { # Import domains from file and store them in the specified database table database_table_from_file() { # Define locals - local table source backup_path backup_file arg + local table source backup_path backup_file tmpFile type table="${1}" source="${2}" - arg="${3}" backup_path="${piholeDir}/migration_backup" backup_file="${backup_path}/$(basename "${2}")" - - # Truncate table only if not gravity (we add multiple times to this table) - if [[ "${table}" != "gravity" ]]; then - database_truncate_table "${table}" - fi - - local tmpFile tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")" + local timestamp timestamp="$(date --utc +'%s')" - local inputfile - # Apply format for white-, blacklist, regex, and adlist tables - # Read file line by line + local rowid declare -i rowid rowid=1 - if [[ "${table}" == "gravity" ]]; then - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${arg}/" "${source}" > "${tmpFile}" - sed -i '/^$/d' "${tmpFile}" - else - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then - if [[ "${table}" == "domain_audit" ]]; then - # domain_audit table format (no enable or modified fields) - echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" - else - # White-, black-, and regexlist format - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" - fi - rowid+=1 - fi - done + # Special handling for domains to be imported into the common domainlist table + if [[ "${table}" == "whitelist" ]]; then + type="0" + table="domainlist" + elif [[ "${table}" == "blacklist" ]]; then + type="1" + table="domainlist" + elif [[ "${table}" == "regex" ]]; then + type="3" + table="domainlist" fi - inputfile="${tmpFile}" - # Remove possible duplicates found in lower-quality adlists - sort -u -o "${inputfile}" "${inputfile}" + # Get MAX(id) from domainlist when INSERTing into this table + if [[ "${table}" == "domainlist" ]]; then + rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" + if [[ -z "$rowid" ]]; then + rowid=0 + fi + rowid+=1 + fi + + # Loop over all domains in ${source} file + # Read file line by line + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ -n "${domain}" ]]; then + if [[ "${table}" == "domain_audit" ]]; then + # domain_audit table format (no enable or modified fields) + echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" + elif [[ "${table}" == "adlist" ]]; then + # Adlist table format + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + else + # White-, black-, and regexlist table format + echo "${rowid},${type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + fi + rowid+=1 + fi + done # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to fill table ${table} in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to fill table ${table}${type} in database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" fi - # Delete tmpfile - rm "${tmpFile}" > /dev/null 2>&1 || \ - echo -e " ${CROSS} Unable to remove ${tmpFile}" - # Move source file to backup directory, create directory if not existing mkdir -p "${backup_path}" mv "${source}" "${backup_file}" 2> /dev/null || \ echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}" + + # Delete tmpFile + rm "${tmpFile}" > /dev/null 2>&1 || \ + echo -e " ${CROSS} Unable to remove ${tmpFile}" } # Migrate pre-v5.0 list files to database-based Pi-hole versions @@ -188,7 +215,10 @@ migrate_to_database() { if [ ! -e "${gravityDBfile}" ]; then # Create new database file - note that this will be created in version 1 echo -e " ${INFO} Creating new gravity database" - generate_gravity_database + generate_gravity_database "${gravityDBfile}" + + # Check if gravity database needs to be updated + upgrade_gravityDB "${gravityDBfile}" "${piholeDir}" # Migrate list files to new database if [ -e "${adListFile}" ]; then @@ -306,16 +336,25 @@ gravity_DownloadBlocklists() { return 1 fi - local url domain agent cmd_ext str + local url domain agent cmd_ext str target echo "" - # Flush gravity table once before looping over sources - str="Flushing gravity table" + # Prepare new gravity database + str="Preparing new gravity database" echo -ne " ${INFO} ${str}..." - if database_truncate_table "gravity"; then + rm "${gravityTEMPfile}" > /dev/null 2>&1 + output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create new database ${gravityTEMPfile}\\n ${output}" + gravity_Cleanup "error" + else echo -e "${OVER} ${TICK} ${str}" fi + target="$(mktemp -p "/tmp" --suffix=".gravity")" + # Loop through $sources and download each one for ((i = 0; i < "${#sources[@]}"; i++)); do url="${sources[$i]}" @@ -335,15 +374,32 @@ gravity_DownloadBlocklists() { esac echo -e " ${INFO} Target: ${url}" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" echo "" done + + str="Storing downloaded domains in new gravity database" + echo -ne " ${INFO} ${str}..." + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to fill gravity table in database ${gravityTEMPfile}\\n ${output}" + gravity_Cleanup "error" + else + echo -e "${OVER} ${TICK} ${str}" + fi + + rm "${target}" > /dev/null 2>&1 || \ + echo -e " ${CROSS} Unable to remove ${target}" + gravity_Blackbody=true } # Download specified URL and perform checks on HTTP status and file content gravity_DownloadBlocklistFromUrl() { - local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" heisenbergCompensator="" patternBuffer str httpCode success="" + local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}" + local heisenbergCompensator="" patternBuffer str httpCode success="" # Create temp file to store content on disk instead of RAM patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb") @@ -424,20 +480,15 @@ gravity_DownloadBlocklistFromUrl() { # Determine if the blocklist was downloaded and saved correctly if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then - # Add domains to database table - str="Adding adlist with ID ${adlistID} to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + # Add domains to database table file + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" # Check if $patternbuffer is a non-zero length file elif [[ -s "${patternBuffer}" ]]; then # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" - # Add domains to database table - str="Adding adlist with ID ${adlistID} to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" else # Fall back to previously cached list if $patternBuffer is empty echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" @@ -446,11 +497,8 @@ gravity_DownloadBlocklistFromUrl() { # Determine if cached list has read permission if [[ -r "${saveLocation}" ]]; then echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - # Add domains to database table - str="Adding to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" else echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" fi @@ -686,10 +734,6 @@ fi # Move possibly existing legacy files to the gravity database migrate_to_database -# Ensure proper permissions are set for the newly created database -chown pihole:pihole "${gravityDBfile}" -chmod g+w "${piholeDir}" "${gravityDBfile}" - if [[ "${forceDelete:-}" == true ]]; then str="Deleting existing list cache" echo -ne "${INFO} ${str}..." @@ -704,15 +748,26 @@ gravity_DownloadBlocklists # Create local.list gravity_generateLocalList -gravity_ShowCount +# Update gravity timestamp update_gravity_timestamp -gravity_Cleanup -echo "" +# Migrate rest of the data from old to new database +gravity_swap_databases + +# Ensure proper permissions are set for the database +chown pihole:pihole "${gravityDBfile}" +chmod g+w "${piholeDir}" "${gravityDBfile}" # Determine if DNS has been restarted by this instance of gravity if [[ -z "${dnsWasOffline:-}" ]]; then "${PIHOLE_COMMAND}" restartdns reload fi + +# Compute numbers to be displayed +gravity_ShowCount + +gravity_Cleanup +echo "" + "${PIHOLE_COMMAND}" status From 6b04997fc3d182ef1463d1c2955deed5a505c90e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 27 Jan 2020 10:12:05 +0000 Subject: [PATCH 297/366] DROP and reCREATE TRIGGERs during gravity swapping. Signed-off-by: DL6ER --- advanced/Templates/gravity_copy.sql | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql index e14d9d8c..4a2a9b22 100644 --- a/advanced/Templates/gravity_copy.sql +++ b/advanced/Templates/gravity_copy.sql @@ -4,6 +4,10 @@ ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD; BEGIN TRANSACTION; +DROP TRIGGER tr_domainlist_add; +DROP TRIGGER tr_client_add; +DROP TRIGGER tr_adlist_add; + INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; @@ -18,4 +22,21 @@ INSERT OR REPLACE INTO info SELECT * FROM OLD.info; INSERT OR REPLACE INTO client SELECT * FROM OLD.client; INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; + +CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist + BEGIN + INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_client_add AFTER INSERT ON client + BEGIN + INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist + BEGIN + INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); + END; + + COMMIT; From 92aa510bdabe457598e40df983c01863195a2aa1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 27 Jan 2020 10:36:16 +0000 Subject: [PATCH 298/366] Add timestamps and comment fields to clients. This updates the gravity database to version 11. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 10 ++++++++++ .../database_migration/gravity/10_to_11.sql | 16 ++++++++++++++++ advanced/Templates/gravity.db.sql | 12 ++++++++++-- 3 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 advanced/Scripts/database_migration/gravity/10_to_11.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 6a51e353..8a669429 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -94,4 +94,14 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" version=10 fi + if [[ "$version" == "10" ]]; then + # This adds timestamp and an optional comment field to the client table + # These fields are only temporary and will be replaces by the columns + # defined in gravity.db.sql during gravity swapping. We add them here + # to keep the copying process generic (needs the same columns in both the + # source and the destination databases). + echo -e " ${INFO} Upgrading gravity database from version 10 to 11" + sqlite3 "${database}" < "${scriptPath}/10_to_11.sql" + version=11 + fi } diff --git a/advanced/Scripts/database_migration/gravity/10_to_11.sql b/advanced/Scripts/database_migration/gravity/10_to_11.sql new file mode 100644 index 00000000..b073f83b --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/10_to_11.sql @@ -0,0 +1,16 @@ +.timeout 30000 + +BEGIN TRANSACTION; + +ALTER TABLE client ADD COLUMN date_added INTEGER; +ALTER TABLE client ADD COLUMN date_modified INTEGER; +ALTER TABLE client ADD COLUMN comment TEXT; + +CREATE TRIGGER tr_client_update AFTER UPDATE ON client + BEGIN + UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; + END; + +UPDATE info SET value = 11 WHERE property = 'version'; + +COMMIT; diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index a7dc12df..e543bd19 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -52,7 +52,7 @@ CREATE TABLE info value TEXT NOT NULL ); -INSERT INTO "info" VALUES('version','10'); +INSERT INTO "info" VALUES('version','11'); CREATE TABLE domain_audit ( @@ -71,7 +71,10 @@ CREATE TABLE domainlist_by_group CREATE TABLE client ( id INTEGER PRIMARY KEY AUTOINCREMENT, - ip TEXT NOL NULL UNIQUE + ip TEXT NOL NULL UNIQUE, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT ); CREATE TABLE client_by_group @@ -86,6 +89,11 @@ CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; END; +CREATE TRIGGER tr_client_update AFTER UPDATE ON client + BEGIN + UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE ip = NEW.ip; + END; + CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist BEGIN UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; From 2a5cf221fa54609472f29f5294d3d716db50e268 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 2 Feb 2020 23:46:33 +0100 Subject: [PATCH 299/366] Store number of distinct gravity domains in database after counting. Signed-off-by: DL6ER --- gravity.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/gravity.sh b/gravity.sh index 26bedae7..f4cbe78d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -583,6 +583,7 @@ gravity_Table_Count() { local unique unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")" echo -e " ${INFO} Number of ${str}: ${num} (${unique} unique domains)" + sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" else echo -e " ${INFO} Number of ${str}: ${num}" fi From c91d9cc0b653adf82ef12e6356e6706d12e17f07 Mon Sep 17 00:00:00 2001 From: Willem Date: Sat, 8 Feb 2020 17:00:22 +0100 Subject: [PATCH 300/366] Update Cameleon blacklist url to use https Switches from http to https for the Cameleon (sysctl.org) blacklist. Signed-off-by: canihavesomecoffee --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f99d02ab..b83f8585 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1199,7 +1199,7 @@ appendToListsFile() { case $1 in StevenBlack ) echo "https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts" >> "${adlistFile}";; MalwareDom ) echo "https://mirror1.malwaredomains.com/files/justdomains" >> "${adlistFile}";; - Cameleon ) echo "http://sysctl.org/cameleon/hosts" >> "${adlistFile}";; + Cameleon ) echo "https://sysctl.org/cameleon/hosts" >> "${adlistFile}";; DisconTrack ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt" >> "${adlistFile}";; DisconAd ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt" >> "${adlistFile}";; HostsFile ) echo "https://hosts-file.net/ad_servers.txt" >> "${adlistFile}";; From c4005c4a313608e42dadc722f00459a97197bf0a Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Tue, 11 Feb 2020 09:56:28 -0800 Subject: [PATCH 301/366] Delete FUNDING.yml Organization-wide FUNDING now set up. --- .github/FUNDING.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 3a75dc12..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,4 +0,0 @@ -# These are supported funding model platforms - -patreon: pihole -custom: https://pi-hole.net/donate From dc2fce8e1d48d9013d18eac578f8e5e52670d4c8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:26:25 +0100 Subject: [PATCH 302/366] Store gravity update timestamp only after database swapping. Signed-off-by: DL6ER --- gravity.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 26bedae7..e6e53405 100755 --- a/gravity.sh +++ b/gravity.sh @@ -120,11 +120,11 @@ gravity_swap_databases() { # Update timestamp when the gravity table was last updated successfully update_gravity_timestamp() { - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityTEMPfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" return 1 fi return 0 @@ -749,12 +749,12 @@ gravity_DownloadBlocklists # Create local.list gravity_generateLocalList -# Update gravity timestamp -update_gravity_timestamp - # Migrate rest of the data from old to new database gravity_swap_databases +# Update gravity timestamp +update_gravity_timestamp + # Ensure proper permissions are set for the database chown pihole:pihole "${gravityDBfile}" chmod g+w "${piholeDir}" "${gravityDBfile}" From baf5340dc0aa9344216c1c8eba589ca15888a10d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:39:12 +0100 Subject: [PATCH 303/366] Show info table instead of counting domains to speed up the debugging process on low-end hardware drastically. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 76a409f9..0a256a97 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1134,16 +1134,14 @@ analyze_gravity_list() { gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}") log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" - local gravity_size - gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") - log_write " Size (excluding blacklist): ${COL_CYAN}${gravity_size}${COL_NC} entries" + show_db_entries "Info table" "SELECT property,value FROM info" "20 40" log_write "" OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") - log_write " ${COL_CYAN}----- First 10 Domains -----${COL_NC}" + log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do log_write " ${line}" From 50f6fffbdc31252ae25e494398134d02a79bb57d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:43:55 +0100 Subject: [PATCH 304/366] Migrate debugger to domainlist and add printing of client table. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 0a256a97..c778995d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1113,18 +1113,14 @@ show_adlists() { show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } -show_whitelist() { - show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM whitelist" "4 100 7 19 19 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" +show_domainlist() { + show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } -show_blacklist() { - show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM blacklist" "4 100 7 19 19 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" +show_clients() { + show_db_entries "Clients" "SELECT id,ip,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM client" "4 100 19 19 50" + show_db_entries "Client groups" "SELECT * FROM client_by_group" "10 10" } analyze_gravity_list() { @@ -1299,9 +1295,9 @@ parse_setup_vars check_x_headers analyze_gravity_list show_groups +show_domainlist +show_clients show_adlists -show_whitelist -show_blacklist show_content_of_pihole_files parse_locale analyze_pihole_log From eadd82761c97c6e10ec327871d557dc76f67be59 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:51:40 +0100 Subject: [PATCH 305/366] Add human-readable output of time of the last gravity run. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c778995d..b0af3a40 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1131,6 +1131,9 @@ analyze_gravity_list() { log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" show_db_entries "Info table" "SELECT property,value FROM info" "20 40" + gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated="$(date -d @${gravity_updated_raw})" + log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" OLD_IFS="$IFS" From f10a15146914ce02344f4cb27cb7863f681d1932 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 21:05:02 +0100 Subject: [PATCH 306/366] Fix pihole -t sed instructions. Signed-off-by: DL6ER --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index e1758645..6e72b4a3 100755 --- a/pihole +++ b/pihole @@ -306,7 +306,7 @@ tailFunc() { # Colour A/AAAA/DHCP strings as white # Colour everything else as gray tail -f /var/log/pihole.log | sed -E \ - -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ + -e "s,($(date +'%b %d ')| dnsmasq\[[0-9]*\]),,g" \ -e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ -e "s,.*,${COL_GRAY}&${COL_NC}," From 1072078e26d2e71a984d85744d59ff23c8f33110 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 16 Feb 2020 11:47:42 +0000 Subject: [PATCH 307/366] Change to use API instead of the Location Header (some trailing whitespace removed) Signed-off-by: Adam Warner --- automated install/basic-install.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 7b43f2a3..66f1ddc9 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -427,11 +427,11 @@ make_repo() { # Clone the repo and return the return code from this command git clone -q --depth 20 "${remoteRepo}" "${directory}" &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+rX "${directory}" + chmod -R a+rX "${directory}" # Move into the directory that was passed as an argument pushd "${directory}" &> /dev/null || return 1 # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -457,7 +457,7 @@ update_repo() { # Again, it's useful to store these in variables in case we need to reuse or change the message; # we only need to make one change here local str="Update repo in ${1}" - # Move into the directory that was passed as an argument + # Move into the directory that was passed as an argument pushd "${directory}" &> /dev/null || return 1 # Let the user know what's happening printf " %b %s..." "${INFO}" "${str}" @@ -467,7 +467,7 @@ update_repo() { # Pull the latest commits git pull --quiet &> /dev/null || return $? # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -529,7 +529,7 @@ resetRepo() { printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Return to where we came from popd &> /dev/null || return 1 - # Returning success anyway? + # Returning success anyway? return 0 } @@ -2229,7 +2229,7 @@ FTLinstall() { printf " %b %s..." "${INFO}" "${str}" # Find the latest version tag for FTL - latesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep "Location" | awk -F '/' '{print $NF}') + latesttag=$(curl --silent "https://api.github.com/repos/pi-hole/ftl/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') # Tags should always start with v, check for that. if [[ ! "${latesttag}" == v* ]]; then printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" From 75633f09509f3b1bfe223069bde34696be068e65 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Feb 2020 21:24:32 +0100 Subject: [PATCH 308/366] Install php-intl and trust the system to install the right extension. We've seen reports that just installing php5-intl or php7-intl isn't sufficient and that we need the meta package as well. Signed-off-by: DL6ER --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 66f1ddc9..369e49e0 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -247,7 +247,7 @@ if is_command apt-get ; then PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "${phpVer}-intl") + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "php-intl") # The Web server user, LIGHTTPD_USER="www-data" # group, From cd3ad0bdc7b6758183252667d419e5881f7c8f51 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:39:12 +0100 Subject: [PATCH 309/366] Show info table instead of counting domains to speed up the debugging process on low-end hardware drastically. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 76a409f9..0a256a97 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1134,16 +1134,14 @@ analyze_gravity_list() { gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}") log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" - local gravity_size - gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") - log_write " Size (excluding blacklist): ${COL_CYAN}${gravity_size}${COL_NC} entries" + show_db_entries "Info table" "SELECT property,value FROM info" "20 40" log_write "" OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") - log_write " ${COL_CYAN}----- First 10 Domains -----${COL_NC}" + log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do log_write " ${line}" From 714a79ffced5867fdfd6b43e14266a7850b07b11 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:43:55 +0100 Subject: [PATCH 310/366] Migrate debugger to domainlist and add printing of client table. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 0a256a97..c778995d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1113,18 +1113,14 @@ show_adlists() { show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } -show_whitelist() { - show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM whitelist" "4 100 7 19 19 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" +show_domainlist() { + show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } -show_blacklist() { - show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM blacklist" "4 100 7 19 19 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" +show_clients() { + show_db_entries "Clients" "SELECT id,ip,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM client" "4 100 19 19 50" + show_db_entries "Client groups" "SELECT * FROM client_by_group" "10 10" } analyze_gravity_list() { @@ -1299,9 +1295,9 @@ parse_setup_vars check_x_headers analyze_gravity_list show_groups +show_domainlist +show_clients show_adlists -show_whitelist -show_blacklist show_content_of_pihole_files parse_locale analyze_pihole_log From d0e29ab7b0d08e16b8d7e6f69d3e5b8a544a3c32 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:51:40 +0100 Subject: [PATCH 311/366] Add human-readable output of time of the last gravity run. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c778995d..b0af3a40 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1131,6 +1131,9 @@ analyze_gravity_list() { log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" show_db_entries "Info table" "SELECT property,value FROM info" "20 40" + gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated="$(date -d @${gravity_updated_raw})" + log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" OLD_IFS="$IFS" From c5c414a7a24789a41fd64fdbb1114d037ecf19b0 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Sun, 16 Feb 2020 19:24:05 -0800 Subject: [PATCH 312/366] Stickler Lint - quote to prevent splitting Signed-off-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index b0af3a40..7f2b60c4 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1132,7 +1132,7 @@ analyze_gravity_list() { show_db_entries "Info table" "SELECT property,value FROM info" "20 40" gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" - gravity_updated="$(date -d @${gravity_updated_raw})" + gravity_updated="$(date -d @"${gravity_updated_raw}")" log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" From 52398052e911edfe73fb15b5a18f601cfd636ed5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 17 Feb 2020 21:07:48 +0100 Subject: [PATCH 313/366] Compute number of domains (and store it in the database) BEFORE calling FTL to re-read said value. Signed-off-by: DL6ER --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index f4cbe78d..50ae3883 100755 --- a/gravity.sh +++ b/gravity.sh @@ -760,14 +760,14 @@ gravity_swap_databases chown pihole:pihole "${gravityDBfile}" chmod g+w "${piholeDir}" "${gravityDBfile}" +# Compute numbers to be displayed +gravity_ShowCount + # Determine if DNS has been restarted by this instance of gravity if [[ -z "${dnsWasOffline:-}" ]]; then "${PIHOLE_COMMAND}" restartdns reload fi -# Compute numbers to be displayed -gravity_ShowCount - gravity_Cleanup echo "" From 7be019ff522e78a26e65d2b6cce2d2fba1598c1a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 17 Feb 2020 21:29:25 +0000 Subject: [PATCH 314/366] No need to determine the latest tag, we can just go direct Co-authored-by: Dan Schaper Signed-off-by: Adam Warner --- automated install/basic-install.sh | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 369e49e0..e15ce0f5 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2228,15 +2228,6 @@ FTLinstall() { local str="Downloading and Installing FTL" printf " %b %s..." "${INFO}" "${str}" - # Find the latest version tag for FTL - latesttag=$(curl --silent "https://api.github.com/repos/pi-hole/ftl/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') - # Tags should always start with v, check for that. - if [[ ! "${latesttag}" == v* ]]; then - printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" - printf " %bError: Unable to get latest release location from GitHub%b\\n" "${COL_LIGHT_RED}" "${COL_NC}" - return 1 - fi - # Move into the temp ftl directory pushd "$(mktemp -d)" > /dev/null || { printf "Unable to make temporary directory for FTL binary download\\n"; return 1; } @@ -2257,7 +2248,7 @@ FTLinstall() { # Determine which version of FTL to download if [[ "${ftlBranch}" == "master" ]];then - url="https://github.com/pi-hole/FTL/releases/download/${latesttag%$'\r'}" + url="https://github.com/pi-hole/ftl/releases/latest/download" else url="https://ftl.pi-hole.net/${ftlBranch}" fi From af95e8c2500bc5ac0f6cd78e9ca9ee533f66dc76 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 19 Feb 2020 17:41:53 +0000 Subject: [PATCH 315/366] force `updatchecker.sh` run if any of the three components are updated Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index e45be5cf..1b98eafb 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -198,6 +198,14 @@ main() { ${PI_HOLE_FILES_DIR}/automated\ install/basic-install.sh --reconfigure --unattended || \ echo -e "${basicError}" && exit 1 fi + + if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]] + # Force an update of the updatechecker + /opt/pihole/updatecheck.sh + /opt/pihole/updatecheck.sh x remote + echo -e " ${INFO} Local version file information updated." + fi + echo "" exit 0 } From 4a5f344b099cd1a281a9a38a795cb9bc39eb0857 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 19 Feb 2020 17:46:45 +0000 Subject: [PATCH 316/366] then Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 1b98eafb..f833fc2f 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -199,7 +199,7 @@ main() { echo -e "${basicError}" && exit 1 fi - if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]] + if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]]; then # Force an update of the updatechecker /opt/pihole/updatecheck.sh /opt/pihole/updatecheck.sh x remote From 839fe32042982b1d7109b2c8e3b818da0f9b4118 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 19:56:48 +0100 Subject: [PATCH 317/366] Fix issue with missing newline at the end of adlists (#3144) * Also display non-fatal warnings during the database importing. Previously, we have only show warnings when there were also errors (errors are always fatal). Signed-off-by: DL6ER * Ensure there is always a newline on the last line. Signed-off-by: DL6ER * Stickler linting Signed-off-by: Dan Schaper * Move sed command into subroutine to avoid code duplication. Signed-off-by: DL6ER * Also unify comments. Signed-off-by: DL6ER * Also unify comments. Signed-off-by: DL6ER Co-authored-by: Dan Schaper --- gravity.sh | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/gravity.sh b/gravity.sh index bc2cace3..b9abd83d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -390,12 +390,23 @@ gravity_DownloadBlocklists() { echo -e "${OVER} ${TICK} ${str}" fi + if [[ "${status}" -eq 0 && -n "${output}" ]]; then + echo -e " Encountered non-critical SQL warnings. Please check the suitability of the list you're using!\\nSQL warnings:\\n${output}\\n" + fi + rm "${target}" > /dev/null 2>&1 || \ echo -e " ${CROSS} Unable to remove ${target}" gravity_Blackbody=true } +parseList() { + local adlistID="${1}" src="${2}" target="${3}" + #Append ,${arg} to every line and then remove blank lines before import + # /.$/a\\ ensures there is a newline on the last line + sed -e "s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" +} + # Download specified URL and perform checks on HTTP status and file content gravity_DownloadBlocklistFromUrl() { local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}" @@ -481,14 +492,13 @@ gravity_DownloadBlocklistFromUrl() { if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then # Add domains to database table file - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + parseList "${adlistID}" "${saveLocation}" "${target}" # Check if $patternbuffer is a non-zero length file elif [[ -s "${patternBuffer}" ]]; then # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + # Add domains to database table file + parseList "${adlistID}" "${saveLocation}" "${target}" else # Fall back to previously cached list if $patternBuffer is empty echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" @@ -497,8 +507,8 @@ gravity_DownloadBlocklistFromUrl() { # Determine if cached list has read permission if [[ -r "${saveLocation}" ]]; then echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + # Add domains to database table file + parseList "${adlistID}" "${saveLocation}" "${target}" else echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" fi From 3c09cd4a3ae296a6ed2d061f30cb433aa044bf45 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 21:41:28 +0100 Subject: [PATCH 318/366] Experimental output of matching line from shown warnings. Signed-off-by: DL6ER --- gravity.sh | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index b9abd83d..eac0acfe 100755 --- a/gravity.sh +++ b/gravity.sh @@ -391,7 +391,29 @@ gravity_DownloadBlocklists() { fi if [[ "${status}" -eq 0 && -n "${output}" ]]; then - echo -e " Encountered non-critical SQL warnings. Please check the suitability of the list you're using!\\nSQL warnings:\\n${output}\\n" + echo -e " Encountered non-critical SQL warnings. Please check the suitability of the lists you're using!\\n\\n SQL warnings:" + local warning file line lineno + while IFS= read -r line; do + echo " - ${line}" + warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" + file="${warning%:*}" + lineno="${warning#*:}" + if [[ -n "${file}" && -n "${lineno}" ]]; then + echo -n " Line contains: " + awk "NR==${lineno}" < ${file} + fi + done <<< "${output}" + echo "" + local file line + while IFS= read -r line; do + warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" + file="${warning%:*}" + lineno="${warning#*:}" + if [[ -n "${file}" && -n "${lineno}" ]]; then + echo -n "Line contains: " + awk "NR==${lineno}" < ${file} + fi + done <<< "${output}" fi rm "${target}" > /dev/null 2>&1 || \ From 050e2963c754cab03262bcc53948b620ca92cd01 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 22:28:53 +0100 Subject: [PATCH 319/366] Remove redundant code. Signed-off-by: DL6ER --- gravity.sh | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/gravity.sh b/gravity.sh index eac0acfe..4860339f 100755 --- a/gravity.sh +++ b/gravity.sh @@ -404,16 +404,6 @@ gravity_DownloadBlocklists() { fi done <<< "${output}" echo "" - local file line - while IFS= read -r line; do - warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" - file="${warning%:*}" - lineno="${warning#*:}" - if [[ -n "${file}" && -n "${lineno}" ]]; then - echo -n "Line contains: " - awk "NR==${lineno}" < ${file} - fi - done <<< "${output}" fi rm "${target}" > /dev/null 2>&1 || \ From 81d4531e10b07e96b863b23920d32de189b7616d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 22 Feb 2020 13:00:38 +0100 Subject: [PATCH 320/366] Implement performant list checking routine. Signed-off-by: DL6ER --- gravity.sh | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index 4860339f..30e43135 100755 --- a/gravity.sh +++ b/gravity.sh @@ -412,11 +412,36 @@ gravity_DownloadBlocklists() { gravity_Blackbody=true } +total_num=0 parseList() { - local adlistID="${1}" src="${2}" target="${3}" + local adlistID="${1}" src="${2}" target="${3}" incorrect_lines #Append ,${arg} to every line and then remove blank lines before import # /.$/a\\ ensures there is a newline on the last line - sed -e "s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" + + local num_lines num_target_lines num_correct_lines percentage percentage_fraction + num_lines="$(grep -c "^" "${src}")" + #num_correct_lines="$(grep -c "^[a-zA-Z0-9.-]*$" "${src}")" + num_target_lines="$(grep -c "^" "${target}")" + num_correct_lines="$(( num_target_lines-total_num ))" + total_num="$num_target_lines" + percentage=100 + percentage_fraction=0 + if [[ "${num_lines}" -gt 0 ]]; then + percentage="$(( 1000*num_correct_lines/num_lines ))" + percentage_fraction="$(( percentage%10 ))" + percentage="$(( percentage/10 ))" + fi + echo " ${INFO} List quality: ${num_correct_lines} of ${num_lines} lines importable (${percentage}.${percentage_fraction}%)" + + if [[ -n "${incorrect_lines}" ]]; then + echo " Example for invalid domains (showing only the first five):" + while IFS= read -r line; do + echo " - ${line}" + done <<< "${incorrect_lines}" + fi + } # Download specified URL and perform checks on HTTP status and file content From 8131b5961cdaf0df890408d1400f11e67b2d56e9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 22 Feb 2020 15:17:24 +0100 Subject: [PATCH 321/366] Add comments to the code describing the changes. Signed-off-by: DL6ER --- gravity.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/gravity.sh b/gravity.sh index 30e43135..b3a70f74 100755 --- a/gravity.sh +++ b/gravity.sh @@ -400,7 +400,7 @@ gravity_DownloadBlocklists() { lineno="${warning#*:}" if [[ -n "${file}" && -n "${lineno}" ]]; then echo -n " Line contains: " - awk "NR==${lineno}" < ${file} + awk "NR==${lineno}" < "${file}" fi done <<< "${output}" echo "" @@ -415,17 +415,22 @@ gravity_DownloadBlocklists() { total_num=0 parseList() { local adlistID="${1}" src="${2}" target="${3}" incorrect_lines - #Append ,${arg} to every line and then remove blank lines before import - # /.$/a\\ ensures there is a newline on the last line - sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + # This sed does the following things: + # 1. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_) + # 2. Append ,adlistID to every line + # 3. Ensures there is a newline on the last line + sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/.$/a\\" "${src}" >> "${target}" + # Find (up to) five domains containing invalid characters (see above) incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" local num_lines num_target_lines num_correct_lines percentage percentage_fraction + # Get number of lines in source file num_lines="$(grep -c "^" "${src}")" - #num_correct_lines="$(grep -c "^[a-zA-Z0-9.-]*$" "${src}")" + # Get number of lines in destination file num_target_lines="$(grep -c "^" "${target}")" num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" + # Compute percentage of valid lines percentage=100 percentage_fraction=0 if [[ "${num_lines}" -gt 0 ]]; then @@ -433,15 +438,15 @@ parseList() { percentage_fraction="$(( percentage%10 ))" percentage="$(( percentage/10 ))" fi - echo " ${INFO} List quality: ${num_correct_lines} of ${num_lines} lines importable (${percentage}.${percentage_fraction}%)" + echo " ${INFO} ${num_correct_lines} of ${num_lines} domains imported (${percentage}.${percentage_fraction}%)" + # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then - echo " Example for invalid domains (showing only the first five):" + echo " Sample of invalid domains (showing up to five):" while IFS= read -r line; do echo " - ${line}" done <<< "${incorrect_lines}" fi - } # Download specified URL and perform checks on HTTP status and file content From 1e8bfd33f5375bf4e2d2967be08bd544dad86d71 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 23 Feb 2020 22:50:06 +0100 Subject: [PATCH 322/366] Improve output Signed-off-by: DL6ER --- gravity.sh | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/gravity.sh b/gravity.sh index b3a70f74..d09211c8 100755 --- a/gravity.sh +++ b/gravity.sh @@ -423,26 +423,19 @@ parseList() { # Find (up to) five domains containing invalid characters (see above) incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" - local num_lines num_target_lines num_correct_lines percentage percentage_fraction + local num_lines num_target_lines num_correct_lines num_invalid # Get number of lines in source file num_lines="$(grep -c "^" "${src}")" # Get number of lines in destination file num_target_lines="$(grep -c "^" "${target}")" num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" - # Compute percentage of valid lines - percentage=100 - percentage_fraction=0 - if [[ "${num_lines}" -gt 0 ]]; then - percentage="$(( 1000*num_correct_lines/num_lines ))" - percentage_fraction="$(( percentage%10 ))" - percentage="$(( percentage/10 ))" - fi - echo " ${INFO} ${num_correct_lines} of ${num_lines} domains imported (${percentage}.${percentage_fraction}%)" + num_invalid="$(( num_lines-num_correct_lines ))" + echo " ${INFO} Imported ${num_correct_lines} of ${num_lines} domains, ${num_invalid} domains invalid" # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then - echo " Sample of invalid domains (showing up to five):" + echo " Sample of invalid domains:" while IFS= read -r line; do echo " - ${line}" done <<< "${incorrect_lines}" From 3dd05606ca4714023124b579c648b991fc57af77 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 24 Feb 2020 07:06:15 +0100 Subject: [PATCH 323/366] Call it the received number of domains instead of the imported number as importing does only happen a bit later. Only show the number of invalid domains if there are invalid domains. Signed-off-by: DL6ER --- gravity.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index d09211c8..c421e832 100755 --- a/gravity.sh +++ b/gravity.sh @@ -431,7 +431,11 @@ parseList() { num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" num_invalid="$(( num_lines-num_correct_lines ))" - echo " ${INFO} Imported ${num_correct_lines} of ${num_lines} domains, ${num_invalid} domains invalid" + if [[ "${num_invalid}" -eq 0 ]]; then + echo " ${INFO} Received ${num_lines} domains" + else + echo " ${INFO} Received ${num_lines} domains, ${num_invalid} domains invalid!" + fi # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then From 8ecaaba2479492a9df5a9e84377dcdeec56d158f Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 24 Feb 2020 18:00:19 +0000 Subject: [PATCH 324/366] Compare daemons to expected results. (#3158) (#3159) Signed-off-by: Dan Schaper Co-authored-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 7f2b60c4..28d34ab6 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -662,19 +662,21 @@ ping_internet() { } compare_port_to_service_assigned() { - local service_name="${1}" - # The programs we use may change at some point, so they are in a varible here - local resolver="pihole-FTL" - local web_server="lighttpd" - local ftl="pihole-FTL" + local service_name + local expected_service + local port + + service_name="${2}" + expected_service="${1}" + port="${3}" # If the service is a Pi-hole service, highlight it in green - if [[ "${service_name}" == "${resolver}" ]] || [[ "${service_name}" == "${web_server}" ]] || [[ "${service_name}" == "${ftl}" ]]; then - log_write "[${COL_GREEN}${port_number}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" + if [[ "${service_name}" == "${expected_service}" ]]; then + log_write "[${COL_GREEN}${port}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" # Otherwise, else # Show the service name in red since it's non-standard - log_write "[${COL_RED}${port_number}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" + log_write "[${COL_RED}${port}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" fi } @@ -708,11 +710,11 @@ check_required_ports() { fi # Use a case statement to determine if the right services are using the right ports case "$(echo "$port_number" | rev | cut -d: -f1 | rev)" in - 53) compare_port_to_service_assigned "${resolver}" + 53) compare_port_to_service_assigned "${resolver}" "${service_name}" 53 ;; - 80) compare_port_to_service_assigned "${web_server}" + 80) compare_port_to_service_assigned "${web_server}" "${service_name}" 80 ;; - 4711) compare_port_to_service_assigned "${ftl}" + 4711) compare_port_to_service_assigned "${ftl}" "${service_name}" 4711 ;; # If it's not a default port that Pi-hole needs, just print it out for the user to see *) log_write "${port_number} ${service_name} (${protocol_type})"; From 6104d816226761ffc47279fa9b0cdcf54af472f7 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 24 Feb 2020 20:02:48 +0000 Subject: [PATCH 325/366] Safeguard against colour output in grep commandadd -i to grep to make search for "Location" case-insensitive Signed-off-by: Adam Warner --- automated install/basic-install.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index e15ce0f5..65c72b40 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2459,17 +2459,14 @@ FTLcheckUpdate() { if [[ ${ftlLoc} ]]; then local FTLversion FTLversion=$(/usr/bin/pihole-FTL tag) - local FTLreleaseData local FTLlatesttag - if ! FTLreleaseData=$(curl -sI https://github.com/pi-hole/FTL/releases/latest); then + if ! FTLlatesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep --color=never -i Location | awk -F / '{print $NF}' | tr -d '[:cntrl:]'); then # There was an issue while retrieving the latest version printf " %b Failed to retrieve latest FTL release metadata" "${CROSS}" return 3 fi - FTLlatesttag=$(grep 'Location' <<< "${FTLreleaseData}" | awk -F '/' '{print $NF}' | tr -d '\r\n') - if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then return 0 else From 4f390ce801c739ea87cfcae131855ecbd9161818 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 05:39:21 -0800 Subject: [PATCH 326/366] Use bash regex instead of awk. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a96129e0..73650400 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -33,15 +33,13 @@ scanList(){ export LC_CTYPE=C # /dev/null forces filename to be printed when only one list has been generated - # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; # Create array of regexps # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ - <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; + "regex" ) if [[ "${domain}" =~ ${lists} ]]; then printf "%b\n" "${lists}"; fi;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From 360d0e4e6bfb5c71e078be41d21132422fb20323 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 08:07:10 -0800 Subject: [PATCH 327/366] Loop through array of lists. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 73650400..4dc9429d 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -35,11 +35,15 @@ scanList(){ # /dev/null forces filename to be printed when only one list has been generated case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - # Create array of regexps # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) if [[ "${domain}" =~ ${lists} ]]; then printf "%b\n" "${lists}"; fi;; + "regex" ) + for list in `echo "${lists}"`; do + if [[ "${domain}" =~ ${list} ]]; then + printf "%b\n" "${list}"; + fi + done;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From bf4fada3b7188630ecb4656b123a94110998cacd Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 09:52:06 -0800 Subject: [PATCH 328/366] Don't quote inside backticks, use unquoted variable. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 4dc9429d..7518e6c4 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -39,7 +39,7 @@ scanList(){ # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery "regex" ) - for list in `echo "${lists}"`; do + for list in ${lists}; do if [[ "${domain}" =~ ${list} ]]; then printf "%b\n" "${list}"; fi From 22ce5c0d70e48f8e806dd79758359ccb623efb20 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Mar 2020 00:32:37 +0100 Subject: [PATCH 329/366] Fix incorrect type description. (#3201) Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 28d34ab6..304dc666 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1116,7 +1116,7 @@ show_adlists() { } show_domainlist() { - show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist (0/1 = exact white-/blacklist, 2/3 = regex white-/blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } From dbc54b3063e6bfff302fdd95269c67ae03085e41 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:47:59 +0000 Subject: [PATCH 330/366] remove resolvconf dep Signed-off-by: Adam Warner --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 65c72b40..0d05db1a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -244,7 +244,7 @@ if is_command apt-get ; then # These programs are stored in an array so they can be looped through later INSTALLER_DEPS=(dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed - PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) + PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "php-intl") From 1481cc583fe6425a9be74720f1c45a8bfc389ab5 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:48:40 +0000 Subject: [PATCH 331/366] Don't set nameserver in dhcpcd.conf Signed-off-by: Adam Warner --- automated install/basic-install.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 0d05db1a..b896eb35 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -854,8 +854,7 @@ setDHCPCD() { # we can append these lines to dhcpcd.conf to enable a static IP echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} - static routers=${IPv4gw} - static domain_name_servers=127.0.0.1" | tee -a /etc/dhcpcd.conf >/dev/null + static routers=${IPv4gw}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 175d32c5f660a03368be40bb931a3752bb24643c Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:55:43 +0000 Subject: [PATCH 332/366] Set nameservers to be that which have been chosen by the user in the whiptail Signed-off-by: Adam Warner --- automated install/basic-install.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index b896eb35..35d4df9f 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -854,7 +854,8 @@ setDHCPCD() { # we can append these lines to dhcpcd.conf to enable a static IP echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} - static routers=${IPv4gw}" | tee -a /etc/dhcpcd.conf >/dev/null + static routers=${IPv4gw} + static domain_name_servers=${PIHOLE_DNS_1},${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 4994da5170300cceaba8f1eca143daabe89df357 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Thu, 12 Mar 2020 18:48:40 +0000 Subject: [PATCH 333/366] Update automated install/basic-install.sh --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 35d4df9f..f5043ded 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -855,7 +855,7 @@ setDHCPCD() { echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} static routers=${IPv4gw} - static domain_name_servers=${PIHOLE_DNS_1},${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null + static domain_name_servers=${PIHOLE_DNS_1} ${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 15a9d662ac5e1aab12a7c025a48d75971b3687f0 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 14 Mar 2020 11:18:43 +0000 Subject: [PATCH 334/366] Add option --comment "whatever" for adding comments for new domains through the CLI interface. Signed-off-by: DL6ER --- advanced/Scripts/list.sh | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 4f2e046f..77a5dece 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -22,6 +22,9 @@ web=false domList=() typeId="" +comment="" +declare -i domaincount +domaincount=0 colfile="/opt/pihole/COL_TABLE" source ${colfile} @@ -97,10 +100,12 @@ ValidateDomain() { fi if [[ -n "${validDomain}" ]]; then - domList=("${domList[@]}" ${validDomain}) + domList=("${domList[@]}" "${validDomain}") else echo -e " ${CROSS} ${domain} is not a valid argument or domain name!" fi + + domaincount=$((domaincount+1)) } ProcessDomainList() { @@ -151,7 +156,12 @@ AddDomain() { reload=true # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) - sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" + if [[ -z "${comment}" ]]; then + sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" + else + # also add comment when variable has been set through the "--comment" option + sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');" + fi } RemoveDomain() { @@ -224,8 +234,16 @@ NukeList() { sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" } -for var in "$@"; do - case "${var}" in +GetComment() { + comment="$1" + if [[ "${comment}" =~ [^a-zA-Z0-9_\#:/\.,\ -] ]]; then + echo " ${CROSS} Found invalid characters in domain comment!" + exit + fi +} + +while (( "$#" )); do + case "${1}" in "-w" | "whitelist" ) typeId=0;; "-b" | "blacklist" ) typeId=1;; "--white-regex" | "white-regex" ) typeId=2;; @@ -239,13 +257,15 @@ for var in "$@"; do "-l" | "--list" ) Displaylist;; "--nuke" ) NukeList;; "--web" ) web=true;; - * ) ValidateDomain "${var}";; + "--comment" ) GetComment "${2}"; shift;; + * ) ValidateDomain "${1}";; esac + shift done shift -if [[ $# = 0 ]]; then +if [[ ${domaincount} == 0 ]]; then helpFunc fi From 277179f150692e6c0968912a02341959069a9242 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 27 Mar 2020 19:34:41 +0100 Subject: [PATCH 335/366] Remove 19036 trust anchor, now expired: https://www.icann.org/resources/pages/ksk-rollover Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 829ba57b..aab90c35 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -179,7 +179,6 @@ ProcessDNSSettings() { if [[ "${DNSSEC}" == true ]]; then echo "dnssec -trust-anchor=.,19036,8,2,49AAC11D7B6F6446702E54A1607371607A1A41855200FD2CE1CDDE32F24E8FB5 trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC683457104237C7F8EC8D " >> "${dnsmasqconfig}" fi From dc35709a1b3a60cf48bcd78d1a7ffae00c81cb69 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Tue, 31 Mar 2020 17:39:21 +0100 Subject: [PATCH 336/366] Remove hosts-file.net from default lists --- automated install/basic-install.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f5043ded..a8ac91f3 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1212,8 +1212,7 @@ chooseBlocklists() { MalwareDom "MalwareDomains" on Cameleon "Cameleon" on DisconTrack "Disconnect.me Tracking" on - DisconAd "Disconnect.me Ads" on - HostsFile "Hosts-file.net Ads" on) + DisconAd "Disconnect.me Ads" on) # In a variable, show the choices available; exit if Cancel is selected choices=$("${cmd[@]}" "${options[@]}" 2>&1 >/dev/tty) || { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; rm "${adlistFile}" ;exit 1; } @@ -1235,7 +1234,6 @@ appendToListsFile() { Cameleon ) echo "https://sysctl.org/cameleon/hosts" >> "${adlistFile}";; DisconTrack ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt" >> "${adlistFile}";; DisconAd ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt" >> "${adlistFile}";; - HostsFile ) echo "https://hosts-file.net/ad_servers.txt" >> "${adlistFile}";; esac } From 7b15a88dc45e4bda8e52a9ed6c64c1c7c44a9882 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 31 Mar 2020 18:36:40 +0000 Subject: [PATCH 337/366] Strip comments from downloaded lists instead of discarding lines with comments altogether Signed-off-by: DL6ER --- gravity.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/gravity.sh b/gravity.sh index c421e832..2afbb17b 100755 --- a/gravity.sh +++ b/gravity.sh @@ -566,12 +566,14 @@ gravity_ParseFileIntoDomains() { # It also helps with debugging so each stage of the script can be researched more in depth # 1) Remove carriage returns # 2) Convert all characters to lowercase - # 3) Remove lines containing "#" or "/" - # 4) Remove leading tabs, spaces, etc. - # 5) Delete lines not matching domain names + # 3) Remove comments (text starting with "#", include possible spaces before the hash sign) + # 4) Remove lines containing "/" + # 5) Remove leading tabs, spaces, etc. + # 6) Delete lines not matching domain names < "${source}" tr -d '\r' | \ tr '[:upper:]' '[:lower:]' | \ - sed -r '/(\/|#).*$/d' | \ + sed 's/\s*#.*//g' | \ + sed -r '/(\/).*$/d' | \ sed -r 's/^.*\s+//g' | \ sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}" chmod 644 "${destination}" From 7d19ee1b2575f90b7a42ee390b5561fe6908250a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Tue, 31 Mar 2020 21:48:10 +0100 Subject: [PATCH 338/366] validate blocklist URL before adding to the database (#3237) Signed-off-by: Adam Warner Co-authored-by: DL6ER --- advanced/Scripts/webpage.sh | 34 +++++++++++++++++++++++++--------- gravity.sh | 9 ++++++++- 2 files changed, 33 insertions(+), 10 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index aab90c35..2b70249e 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -401,22 +401,38 @@ SetWebUILayout() { change_setting "WEBUIBOXEDLAYOUT" "${args[2]}" } +CheckUrl(){ + local regex + # Check for characters NOT allowed in URLs + regex="[^a-zA-Z0-9:/?&%=~._-]" + if [[ "${1}" =~ ${regex} ]]; then + return 1 + else + return 0 + fi +} + CustomizeAdLists() { local address address="${args[3]}" local comment comment="${args[4]}" - if [[ "${args[2]}" == "enable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" - elif [[ "${args[2]}" == "disable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" - elif [[ "${args[2]}" == "add" ]]; then - sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" - elif [[ "${args[2]}" == "del" ]]; then - sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + if CheckUrl "${address}"; then + if [[ "${args[2]}" == "enable" ]]; then + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" + elif [[ "${args[2]}" == "disable" ]]; then + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" + elif [[ "${args[2]}" == "add" ]]; then + sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" + elif [[ "${args[2]}" == "del" ]]; then + sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + else + echo "Not permitted" + return 1 + fi else - echo "Not permitted" + echo "Invalid Url" return 1 fi } diff --git a/gravity.sh b/gravity.sh index c421e832..cf3f9299 100755 --- a/gravity.sh +++ b/gravity.sh @@ -374,7 +374,14 @@ gravity_DownloadBlocklists() { esac echo -e " ${INFO} Target: ${url}" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" + local regex + # Check for characters NOT allowed in URLs + regex="[^a-zA-Z0-9:/?&%=~._-]" + if [[ "${url}" =~ ${regex} ]]; then + echo -e " ${CROSS} Invalid Target" + else + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" + fi echo "" done From d1caad76d832eca713352826392917fa3f4a23dc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 Apr 2020 17:19:32 +0000 Subject: [PATCH 339/366] Do not flush neigh cache as this is known to create a number of issues. The better aproach to this is to manually flush the ARP cache by either restarting or calling "ip neigh flush all". Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 7 ------- 1 file changed, 7 deletions(-) diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index aa45f9ad..b6b552c9 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -36,13 +36,6 @@ flushARP(){ echo -ne " ${INFO} Flushing network table ..." fi - # Flush ARP cache to avoid re-adding of dead entries - if ! output=$(ip neigh flush all 2>&1); then - echo -e "${OVER} ${CROSS} Failed to clear ARP cache" - echo " Output: ${output}" - return 1 - fi - # Truncate network_addresses table in pihole-FTL.db # This needs to be done before we can truncate the network table due to # foreign key contraints From 16f664cdb431070f716cf68833a715901e8c3677 Mon Sep 17 00:00:00 2001 From: Antoine Tenart Date: Wed, 1 Apr 2020 18:24:15 +0200 Subject: [PATCH 340/366] basic_install: remove remaining references to hosts-file.net Commit dc35709a1b3a ("Remove hosts-file.net from default lists") left a few references to hosts-file.net. Removes them. Signed-off-by: Antoine Tenart --- automated install/basic-install.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index a8ac91f3..e25f7389 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1206,7 +1206,7 @@ chooseBlocklists() { mv "${adlistFile}" "${adlistFile}.old" fi # Let user select (or not) blocklists via a checklist - cmd=(whiptail --separate-output --checklist "Pi-hole relies on third party lists in order to block ads.\\n\\nYou can use the suggestions below, and/or add your own after installation\\n\\nTo deselect any list, use the arrow keys and spacebar" "${r}" "${c}" 6) + cmd=(whiptail --separate-output --checklist "Pi-hole relies on third party lists in order to block ads.\\n\\nYou can use the suggestions below, and/or add your own after installation\\n\\nTo deselect any list, use the arrow keys and spacebar" "${r}" "${c}" 5) # In an array, show the options available (all off by default): options=(StevenBlack "StevenBlack's Unified Hosts List" on MalwareDom "MalwareDomains" on @@ -1250,7 +1250,6 @@ installDefaultBlocklists() { appendToListsFile Cameleon appendToListsFile DisconTrack appendToListsFile DisconAd - appendToListsFile HostsFile } # Check if /etc/dnsmasq.conf is from pi-hole. If so replace with an original and install new in .d directory From ebbb7168a4c288e61eac30da1c0fe3df71386b22 Mon Sep 17 00:00:00 2001 From: yubiuser Date: Sat, 4 Apr 2020 21:45:09 +0200 Subject: [PATCH 341/366] add [options] for pihole restartdns MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- manpages/pihole.8 | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/manpages/pihole.8 b/manpages/pihole.8 index ed012092..9ee50110 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -1,4 +1,4 @@ -.TH "Pi-hole" "8" "Pi-hole" "Pi-hole" "May 2018" +.TH "Pi-hole" "8" "Pi-hole" "Pi-hole" "April 2020" .SH "NAME" Pi-hole : A black-hole for internet advertisements @@ -43,7 +43,7 @@ pihole -g\fR .br pihole status .br -pihole restartdns\fR +pihole restartdns\fR [options] .br \fBpihole\fR (\fBenable\fR|\fBdisable\fR [time]) .br @@ -260,9 +260,16 @@ Available commands and options: #m Disable Pi-hole functionality for # minute(s) .br -\fBrestartdns\fR +\fBrestartdns\fR [options] .br - Restart Pi-hole subsystems + Full restart Pi-hole subsystems +.br + + (restart options): +.br + reload Updates the lists, flushes dnsmasq 's DNS cache +.br + reload-lists Only update the lists WITHOUT flushing dnsmasq 's DNS cache .br \fBcheckout\fR [repo] [branch] From 3095fd4dd68a02945b32ee1bd2910349216baa3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Sun, 5 Apr 2020 08:49:35 +0200 Subject: [PATCH 342/366] add restart [options] to cli help MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- pihole | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pihole b/pihole index 6e72b4a3..90227e46 100755 --- a/pihole +++ b/pihole @@ -413,7 +413,9 @@ Options: enable Enable Pi-hole subsystems disable Disable Pi-hole subsystems Add '-h' for more info on disable usage - restartdns Restart Pi-hole subsystems + restartdns Full restart Pi-hole subsystems + Add '-- reload' to only updates the lists and flushes dnsmasq 's DNS cache + Add '--reload-lists' to only update the lists WITHOUT flushing dnsmasq 's DNS cache checkout Switch Pi-hole subsystems to a different Github branch Add '-h' for more info on checkout usage arpflush Flush information stored in Pi-hole's network tables"; From de42669bb7fe5a1ef31033a90a778292dee99cba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6nig?= Date: Sun, 5 Apr 2020 08:56:10 +0200 Subject: [PATCH 343/366] fix typo in pihole help MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Christian König --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index 90227e46..e03e29c0 100755 --- a/pihole +++ b/pihole @@ -414,7 +414,7 @@ Options: disable Disable Pi-hole subsystems Add '-h' for more info on disable usage restartdns Full restart Pi-hole subsystems - Add '-- reload' to only updates the lists and flushes dnsmasq 's DNS cache + Add '--reload' to only updates the lists and flushes dnsmasq 's DNS cache Add '--reload-lists' to only update the lists WITHOUT flushing dnsmasq 's DNS cache checkout Switch Pi-hole subsystems to a different Github branch Add '-h' for more info on checkout usage From 2de5362adc2c1c780eac1ab39e466875143091d5 Mon Sep 17 00:00:00 2001 From: M4x Date: Sun, 5 Apr 2020 17:20:35 +0800 Subject: [PATCH 344/366] Sanitize email address in case of security issues (#3254) * Sanitize email address in case of security issues Signed-off-by: bash-c --- advanced/Scripts/webpage.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 2b70249e..f0f8bc31 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -517,6 +517,13 @@ Options: fi if [[ -n "${args[2]}" ]]; then + + # Sanitize email address in case of security issues + if [[ ! "${args[2]}" =~ ^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$ ]]; then + echo -e " ${CROSS} Invalid email address" + exit 0 + fi + change_setting "ADMIN_EMAIL" "${args[2]}" echo -e " ${TICK} Setting admin contact to ${args[2]}" else From d27a565d3950bf0fdb8011b1be7f98e3c8940ee0 Mon Sep 17 00:00:00 2001 From: yubiuser Date: Sun, 5 Apr 2020 11:44:44 +0200 Subject: [PATCH 345/366] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: DL6ER Signed-off-by: Christian König --- manpages/pihole.8 | 4 ++-- pihole | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/manpages/pihole.8 b/manpages/pihole.8 index 9ee50110..84449429 100644 --- a/manpages/pihole.8 +++ b/manpages/pihole.8 @@ -267,9 +267,9 @@ Available commands and options: (restart options): .br - reload Updates the lists, flushes dnsmasq 's DNS cache + reload Updates the lists and flushes DNS cache .br - reload-lists Only update the lists WITHOUT flushing dnsmasq 's DNS cache + reload-lists Updates the lists WITHOUT flushing the DNS cache .br \fBcheckout\fR [repo] [branch] diff --git a/pihole b/pihole index e03e29c0..c0f916b2 100755 --- a/pihole +++ b/pihole @@ -414,8 +414,8 @@ Options: disable Disable Pi-hole subsystems Add '-h' for more info on disable usage restartdns Full restart Pi-hole subsystems - Add '--reload' to only updates the lists and flushes dnsmasq 's DNS cache - Add '--reload-lists' to only update the lists WITHOUT flushing dnsmasq 's DNS cache + Add 'reload' to update the lists and flush the cache without restarting the DNS server + Add 'reload-lists' to only update the lists WITHOUT flushing the cache or restarting the DNS server checkout Switch Pi-hole subsystems to a different Github branch Add '-h' for more info on checkout usage arpflush Flush information stored in Pi-hole's network tables"; From a9b19df4ec2b99442704cf2b7656908e3e10bc14 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 5 Apr 2020 12:28:33 +0100 Subject: [PATCH 346/366] expand email validation regex to catch more valid emails see comments on PR #3254 Signed-off-by: Adam Warner --- advanced/Scripts/webpage.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index f0f8bc31..b60428f6 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -519,7 +519,9 @@ Options: if [[ -n "${args[2]}" ]]; then # Sanitize email address in case of security issues - if [[ ! "${args[2]}" =~ ^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$ ]]; then + local regex + regex="^[a-z0-9!#\$%&'*+/=?^_\`{|}~-]+(\.[a-z0-9!#$%&'*+/=?^_\`{|}~-]+)*@([a-z0-9]([a-z0-9-]*[a-z0-9])?\.)+[a-z0-9]([a-z0-9-]*[a-z0-9])?\$" + if [[ ! "${args[2]}" =~ ${regex} ]]; then echo -e " ${CROSS} Invalid email address" exit 0 fi From b6ac1585ec3c093c471b64485b1da36346a9a58a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 5 Apr 2020 12:29:45 +0100 Subject: [PATCH 347/366] add regex attribution Signed-off-by: Adam Warner --- advanced/Scripts/webpage.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index b60428f6..5279cab3 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -519,6 +519,7 @@ Options: if [[ -n "${args[2]}" ]]; then # Sanitize email address in case of security issues + # Regex from https://stackoverflow.com/a/2138832/4065967 local regex regex="^[a-z0-9!#\$%&'*+/=?^_\`{|}~-]+(\.[a-z0-9!#$%&'*+/=?^_\`{|}~-]+)*@([a-z0-9]([a-z0-9-]*[a-z0-9])?\.)+[a-z0-9]([a-z0-9-]*[a-z0-9])?\$" if [[ ! "${args[2]}" =~ ${regex} ]]; then From 26f71e4dbe08517ebd35cc972b70462bb1823e5e Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 5 Apr 2020 12:34:14 +0100 Subject: [PATCH 348/366] accidentally a space Signed-off-by: Adam Warner --- advanced/Scripts/webpage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 5279cab3..3a04bbd7 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -522,7 +522,7 @@ Options: # Regex from https://stackoverflow.com/a/2138832/4065967 local regex regex="^[a-z0-9!#\$%&'*+/=?^_\`{|}~-]+(\.[a-z0-9!#$%&'*+/=?^_\`{|}~-]+)*@([a-z0-9]([a-z0-9-]*[a-z0-9])?\.)+[a-z0-9]([a-z0-9-]*[a-z0-9])?\$" - if [[ ! "${args[2]}" =~ ${regex} ]]; then + if [[ ! "${args[2]}" =~ ${regex} ]]; then echo -e " ${CROSS} Invalid email address" exit 0 fi From 308eb5eda5c32fd262c09a81c5df7ac2433886d9 Mon Sep 17 00:00:00 2001 From: Matthias Schoettle Date: Tue, 10 Mar 2020 14:31:05 -0400 Subject: [PATCH 349/366] Fixes broken blocking page and landing page when changing server port and/or hostname. See issues #2195 and #2720. Signed-off-by: Matthias Schoettle --- advanced/index.php | 37 +++++++++++++++++++++++++------------ 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index b0c4a7c3..4f2a17f7 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -6,8 +6,8 @@ * This file is copyright under the latest version of the EUPL. * Please see LICENSE file for your rights under this license. */ -// Sanitise HTTP_HOST output -$serverName = htmlspecialchars($_SERVER["HTTP_HOST"]); +// Sanitize SERVER_NAME output +$serverName = htmlspecialchars($_SERVER["SERVER_NAME"]); // Remove external ipv6 brackets if any $serverName = preg_replace('/^\[(.*)\]$/', '${1}', $serverName); @@ -50,16 +50,24 @@ function setHeader($type = "x") { } // Determine block page type -if ($serverName === "pi.hole") { +if ($serverName === "pi.hole" + || (!empty($_SERVER["VIRTUAL_HOST"]) && $serverName === $_SERVER["VIRTUAL_HOST"])) { // Redirect to Web Interface exit(header("Location: /admin")); } elseif (filter_var($serverName, FILTER_VALIDATE_IP) || in_array($serverName, $authorizedHosts)) { // Set Splash Page output $splashPage = " - + + $viewPort - -
Pi-hole: Your black hole for Internet advertisements
Did you mean to go to the admin panel? + + + +
+ Pi-hole: Your black hole for Internet advertisements
+ Did you mean to go to the admin panel? + + "; // Set splash/landing page based off presence of $landPage @@ -68,7 +76,7 @@ if ($serverName === "pi.hole") { // Unset variables so as to not be included in $landPage unset($serverName, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort); - // Render splash/landing page when directly browsing via IP or authorised hostname + // Render splash/landing page when directly browsing via IP or authorized hostname exit($renderPage); } elseif ($currentUrlExt === "js") { // Serve Pi-hole Javascript for blocked domains requesting JS @@ -131,7 +139,12 @@ ini_set("default_socket_timeout", 3); function queryAds($serverName) { // Determine the time it takes while querying adlists $preQueryTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]; - $queryAds = file("http://127.0.0.1/admin/scripts/pi-hole/php/queryads.php?domain=$serverName&bp", FILE_IGNORE_NEW_LINES); + $queryAdsURL = sprintf( + "http://127.0.0.1:%s/admin/scripts/pi-hole/php/queryads.php?domain=%s&bp", + $_SERVER["SERVER_PORT"], + $serverName + ); + $queryAds = file($queryAdsURL, FILE_IGNORE_NEW_LINES); $queryAds = array_values(array_filter(preg_replace("/data:\s+/", "", $queryAds))); $queryTime = sprintf("%.0f", (microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]) - $preQueryTime); @@ -209,7 +222,7 @@ $phVersion = exec("cd /etc/.pihole/ && git describe --long --tags"); if (explode("-", $phVersion)[1] != "0") $execTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]; -// Please Note: Text is added via CSS to allow an admin to provide a localised +// Please Note: Text is added via CSS to allow an admin to provide a localized // language without the need to edit this file setHeader(); @@ -226,10 +239,10 @@ setHeader(); - - + + ● <?=$serverName ?> - +