2018-06-13 05:47:08 +00:00
|
|
|
#!/usr/bin/env bash
|
2018-06-13 06:29:07 +00:00
|
|
|
# shellcheck disable=SC1090
|
2018-06-13 05:47:08 +00:00
|
|
|
# Pi-hole: A black hole for Internet advertisements
|
|
|
|
# (c) 2018 Pi-hole, LLC (https://pi-hole.net)
|
|
|
|
# Network-wide ad blocking via your own hardware.
|
|
|
|
#
|
|
|
|
# Query Domain Lists
|
|
|
|
#
|
|
|
|
# This file is copyright under the latest version of the EUPL.
|
|
|
|
# Please see LICENSE file for your rights under this license.
|
|
|
|
|
|
|
|
# Globals
|
|
|
|
piholeDir="/etc/pihole"
|
2019-05-01 14:56:16 +00:00
|
|
|
gravityDBfile="${piholeDir}/gravity.db"
|
2018-06-13 06:19:07 +00:00
|
|
|
wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf"
|
2018-06-13 05:47:08 +00:00
|
|
|
options="$*"
|
|
|
|
adlist=""
|
|
|
|
all=""
|
|
|
|
exact=""
|
|
|
|
blockpage=""
|
|
|
|
matchType="match"
|
|
|
|
|
|
|
|
colfile="/opt/pihole/COL_TABLE"
|
2018-06-13 06:25:43 +00:00
|
|
|
source "${colfile}"
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2018-07-01 02:03:53 +00:00
|
|
|
# Print each subdomain
|
|
|
|
# e.g: foo.bar.baz.com = "foo.bar.baz.com bar.baz.com baz.com com"
|
|
|
|
processWildcards() {
|
2018-07-20 20:13:42 +00:00
|
|
|
IFS="." read -r -a array <<< "${1}"
|
|
|
|
for (( i=${#array[@]}-1; i>=0; i-- )); do
|
|
|
|
ar=""
|
|
|
|
for (( j=${#array[@]}-1; j>${#array[@]}-i-2; j-- )); do
|
|
|
|
if [[ $j == $((${#array[@]}-1)) ]]; then
|
|
|
|
ar="${array[$j]}"
|
|
|
|
else
|
|
|
|
ar="${array[$j]}.${ar}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
echo "${ar}"
|
2018-07-01 02:03:53 +00:00
|
|
|
done
|
|
|
|
}
|
|
|
|
|
2018-06-13 05:47:08 +00:00
|
|
|
# Scan an array of files for matching strings
|
|
|
|
scanList(){
|
2018-07-20 20:13:42 +00:00
|
|
|
# Escape full stops
|
|
|
|
local domain="${1//./\\.}" lists="${2}" type="${3:-}"
|
|
|
|
|
|
|
|
# Prevent grep from printing file path
|
|
|
|
cd "$piholeDir" || exit 1
|
|
|
|
|
|
|
|
# Prevent grep -i matching slowly: http://bit.ly/2xFXtUX
|
|
|
|
export LC_CTYPE=C
|
|
|
|
|
|
|
|
# /dev/null forces filename to be printed when only one list has been generated
|
|
|
|
# shellcheck disable=SC2086
|
|
|
|
case "${type}" in
|
2019-03-29 00:08:04 +00:00
|
|
|
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
|
2018-07-20 20:13:42 +00:00
|
|
|
"wc" ) grep -i -o -m 1 "/${domain}/" ${lists} 2>/dev/null;;
|
|
|
|
* ) grep -i "${domain}" ${lists} /dev/null 2>/dev/null;;
|
|
|
|
esac
|
2018-06-13 05:47:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if [[ "${options}" == "-h" ]] || [[ "${options}" == "--help" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
echo "Usage: pihole -q [option] <domain>
|
2018-06-13 05:47:08 +00:00
|
|
|
Example: 'pihole -q -exact domain.com'
|
|
|
|
Query the adlists for a specified domain
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-adlist Print the name of the block list URL
|
|
|
|
-exact Search the block lists for exact domain matches
|
|
|
|
-all Return all query matches within a block list
|
|
|
|
-h, --help Show this help dialog"
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Handle valid options
|
|
|
|
if [[ "${options}" == *"-bp"* ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
exact="exact"; blockpage=true
|
2018-06-13 05:47:08 +00:00
|
|
|
else
|
2018-07-20 20:13:42 +00:00
|
|
|
[[ "${options}" == *"-adlist"* ]] && adlist=true
|
|
|
|
[[ "${options}" == *"-all"* ]] && all=true
|
|
|
|
if [[ "${options}" == *"-exact"* ]]; then
|
|
|
|
exact="exact"; matchType="exact ${matchType}"
|
|
|
|
fi
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Strip valid options, leaving only the domain and invalid options
|
|
|
|
# This allows users to place the options before or after the domain
|
|
|
|
options=$(sed -E 's/ ?-(bp|adlists?|all|exact) ?//g' <<< "${options}")
|
|
|
|
|
|
|
|
# Handle remaining options
|
|
|
|
# If $options contain non ASCII characters, convert to punycode
|
|
|
|
case "${options}" in
|
2018-07-20 20:13:42 +00:00
|
|
|
"" ) str="No domain specified";;
|
|
|
|
*" "* ) str="Unknown query option specified";;
|
|
|
|
*[![:ascii:]]* ) domainQuery=$(idn2 "${options}");;
|
|
|
|
* ) domainQuery="${options}";;
|
2018-06-13 05:47:08 +00:00
|
|
|
esac
|
|
|
|
|
|
|
|
if [[ -n "${str:-}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
echo -e "${str}${COL_NC}\\nTry 'pihole -q --help' for more information."
|
|
|
|
exit 1
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
2019-05-04 10:47:25 +00:00
|
|
|
scanDatabaseTable() {
|
|
|
|
local domain table type querystr result
|
|
|
|
domain="${1}"
|
|
|
|
table="${2}"
|
|
|
|
type="${3:-}"
|
|
|
|
|
|
|
|
# As underscores are legitimate parts of domains, we escape possible them when using the LIKE operator.
|
|
|
|
# Underscores are a SQLite wildcard matching exactly one character. We obviously want to suppress this
|
|
|
|
# behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched
|
|
|
|
# as a literal underscore character.
|
|
|
|
case "${type}" in
|
|
|
|
"exact" ) querystr="SELECT domain FROM vw_${table} WHERE domain = '${domain}'";;
|
|
|
|
* ) querystr="SELECT domain FROM vw_${table} WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\'";;
|
|
|
|
esac
|
|
|
|
|
|
|
|
# Send prepared query to gravity database
|
|
|
|
result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
|
|
|
|
if [[ -n "${result}" ]]; then
|
|
|
|
# Prepend listname (separated by a colon) if we found at least one result
|
|
|
|
# and output result
|
|
|
|
results="$(sed "s/^/${table}:/g;" <<< "${result}")"
|
|
|
|
else
|
|
|
|
# Output empty string as the database query didn't return any result
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
mapfile -t results <<< "${results}"
|
|
|
|
if [[ -n "${results[*]}" ]]; then
|
|
|
|
wbMatch=true
|
|
|
|
# Loop through each result in order to print unique file title once
|
|
|
|
for result in "${results[@]}"; do
|
|
|
|
fileName="${result%%:*}"
|
|
|
|
if [[ -n "${blockpage}" ]]; then
|
|
|
|
echo "π ${result}"
|
|
|
|
exit 0
|
|
|
|
elif [[ -n "${exact}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
|
2019-05-04 10:47:25 +00:00
|
|
|
else
|
|
|
|
# Only print filename title once per file
|
|
|
|
if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
|
|
|
|
echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
|
|
|
|
fileName_prev="${fileName}"
|
|
|
|
fi
|
|
|
|
echo " ${result#*:}"
|
2018-07-20 20:13:42 +00:00
|
|
|
fi
|
2019-05-04 10:47:25 +00:00
|
|
|
done
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
# Scan Whitelist and Blacklist
|
|
|
|
scanDatabaseTable "${domainQuery}" "whitelist" "${exact}"
|
|
|
|
scanDatabaseTable "${domainQuery}" "blacklist" "${exact}"
|
2018-06-13 05:47:08 +00:00
|
|
|
|
|
|
|
# Scan Wildcards
|
|
|
|
if [[ -e "${wildcardlist}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
# Determine all subdomains, domain and TLDs
|
|
|
|
mapfile -t wildcards <<< "$(processWildcards "${domainQuery}")"
|
|
|
|
for match in "${wildcards[@]}"; do
|
|
|
|
# Search wildcard list for matches
|
|
|
|
mapfile -t results <<< "$(scanList "${match}" "${wildcardlist}" "wc")"
|
|
|
|
if [[ -n "${results[*]}" ]]; then
|
|
|
|
if [[ -z "${wcMatch:-}" ]] && [[ -z "${blockpage}" ]]; then
|
|
|
|
wcMatch=true
|
|
|
|
echo " ${matchType^} found in ${COL_BOLD}Wildcards${COL_NC}:"
|
|
|
|
fi
|
|
|
|
case "${blockpage}" in
|
|
|
|
true ) echo "π ${wildcardlist##*/}"; exit 0;;
|
|
|
|
* ) echo " *.${match}";;
|
|
|
|
esac
|
|
|
|
fi
|
|
|
|
done
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Get version sorted *.domains filenames (without dir path)
|
|
|
|
lists=("$(cd "$piholeDir" || exit 0; printf "%s\\n" -- *.domains | sort -V)")
|
|
|
|
|
|
|
|
# Query blocklists for occurences of domain
|
|
|
|
mapfile -t results <<< "$(scanList "${domainQuery}" "${lists[*]}" "${exact}")"
|
|
|
|
|
|
|
|
# Handle notices
|
|
|
|
if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
echo -e " ${INFO} No ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC} within the block lists"
|
|
|
|
exit 0
|
2018-06-13 05:47:08 +00:00
|
|
|
elif [[ -z "${results[*]}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
# Result found in WL/BL/Wildcards
|
|
|
|
exit 0
|
2018-06-13 05:47:08 +00:00
|
|
|
elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
echo -e " ${INFO} Over 100 ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC}
|
|
|
|
This can be overridden using the -all option"
|
|
|
|
exit 0
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Remove unwanted content from non-exact $results
|
|
|
|
if [[ -z "${exact}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
# Delete lines starting with #
|
|
|
|
# Remove comments after domain
|
|
|
|
# Remove hosts format IP address
|
|
|
|
mapfile -t results <<< "$(IFS=$'\n'; sed \
|
|
|
|
-e "/:#/d" \
|
|
|
|
-e "s/[ \\t]#.*//g" \
|
|
|
|
-e "s/:.*[ \\t]/:/g" \
|
|
|
|
<<< "${results[*]}")"
|
|
|
|
# Exit if result was in a comment
|
|
|
|
[[ -z "${results[*]}" ]] && exit 0
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Get adlist file content as array
|
2019-05-01 15:06:14 +00:00
|
|
|
if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
|
2019-05-01 14:56:16 +00:00
|
|
|
# Retrieve source URLs from gravity database
|
|
|
|
mapfile -t adlists <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlists;" 2> /dev/null)"
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Print "Exact matches for" title
|
|
|
|
if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then
|
2018-07-20 20:13:42 +00:00
|
|
|
plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es"
|
|
|
|
echo " ${matchType^}${plural} for ${COL_BOLD}${domainQuery}${COL_NC} found in:"
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
for result in "${results[@]}"; do
|
2018-07-20 20:13:42 +00:00
|
|
|
fileName="${result/:*/}"
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2018-07-20 20:13:42 +00:00
|
|
|
# Determine *.domains URL using filename's number
|
|
|
|
if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
|
|
|
|
fileNum="${fileName/list./}"; fileNum="${fileNum%%.*}"
|
|
|
|
fileName="${adlists[$fileNum]}"
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2018-07-20 20:13:42 +00:00
|
|
|
# Discrepency occurs when adlists has been modified, but Gravity has not been run
|
|
|
|
if [[ -z "${fileName}" ]]; then
|
|
|
|
fileName="${COL_LIGHT_RED}(no associated adlists URL found)${COL_NC}"
|
|
|
|
fi
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
|
2018-07-20 20:13:42 +00:00
|
|
|
if [[ -n "${blockpage}" ]]; then
|
|
|
|
echo "${fileNum} ${fileName}"
|
|
|
|
elif [[ -n "${exact}" ]]; then
|
|
|
|
echo " ${fileName}"
|
2018-06-13 05:47:08 +00:00
|
|
|
else
|
2018-07-20 20:13:42 +00:00
|
|
|
if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
|
|
|
|
count=""
|
|
|
|
echo " ${matchType^} found in ${COL_BOLD}${fileName}${COL_NC}:"
|
|
|
|
fileName_prev="${fileName}"
|
|
|
|
fi
|
|
|
|
: $((count++))
|
|
|
|
|
|
|
|
# Print matching domain if $max_count has not been reached
|
|
|
|
[[ -z "${all}" ]] && max_count="50"
|
|
|
|
if [[ -z "${all}" ]] && [[ "${count}" -ge "${max_count}" ]]; then
|
|
|
|
[[ "${count}" -gt "${max_count}" ]] && continue
|
|
|
|
echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}"
|
|
|
|
else
|
|
|
|
echo " ${result#*:}"
|
|
|
|
fi
|
2018-06-13 05:47:08 +00:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
exit 0
|