2023-08-13 20:00:44 +00:00
|
|
|
#!/usr/bin/env sh
|
2018-06-13 06:29:07 +00:00
|
|
|
# shellcheck disable=SC1090
|
2021-03-18 08:04:09 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Ignore warning about `local` being undefinded in POSIX
|
|
|
|
# shellcheck disable=SC3043
|
|
|
|
# https://github.com/koalaman/shellcheck/wiki/SC3043#exceptions
|
|
|
|
|
2018-06-13 05:47:08 +00:00
|
|
|
# Pi-hole: A black hole for Internet advertisements
|
2023-08-13 20:00:44 +00:00
|
|
|
# (c) 2023 Pi-hole, LLC (https://pi-hole.net)
|
2018-06-13 05:47:08 +00:00
|
|
|
# Network-wide ad blocking via your own hardware.
|
|
|
|
#
|
2023-08-13 20:00:44 +00:00
|
|
|
# Search Adlists
|
2018-06-13 05:47:08 +00:00
|
|
|
#
|
|
|
|
# This file is copyright under the latest version of the EUPL.
|
|
|
|
# Please see LICENSE file for your rights under this license.
|
|
|
|
|
|
|
|
# Globals
|
2023-08-13 20:00:44 +00:00
|
|
|
PI_HOLE_INSTALL_DIR="/opt/pihole"
|
|
|
|
max_results="20"
|
2023-10-14 19:50:57 +00:00
|
|
|
partial="false"
|
2023-08-13 20:00:44 +00:00
|
|
|
domain=""
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Source color table
|
2018-06-13 05:47:08 +00:00
|
|
|
colfile="/opt/pihole/COL_TABLE"
|
2023-08-13 20:00:44 +00:00
|
|
|
. "${colfile}"
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Source api functions
|
|
|
|
. "${PI_HOLE_INSTALL_DIR}/api.sh"
|
|
|
|
|
|
|
|
Help(){
|
2018-07-20 20:13:42 +00:00
|
|
|
echo "Usage: pihole -q [option] <domain>
|
2023-10-14 19:50:57 +00:00
|
|
|
Example: 'pihole -q --partial domain.com'
|
2018-06-13 05:47:08 +00:00
|
|
|
Query the adlists for a specified domain
|
|
|
|
|
|
|
|
Options:
|
2023-10-14 19:50:57 +00:00
|
|
|
--partial Search the adlists for partially matching domains
|
2023-08-13 20:00:44 +00:00
|
|
|
--all Return all query matches within the adlists
|
2023-10-14 19:50:57 +00:00
|
|
|
-h, --help Show this help dialog"
|
2018-06-13 05:47:08 +00:00
|
|
|
exit 0
|
2023-03-06 20:16:51 +00:00
|
|
|
}
|
|
|
|
|
2019-05-04 10:47:25 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
GenerateOutput(){
|
|
|
|
local data gravity_data lists_data num_gravity num_lists search_type_str
|
|
|
|
local gravity_data_csv lists_data_csv line current_domain
|
|
|
|
data="${1}"
|
2023-02-28 18:55:02 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# construct a new json for the list results where each object contains the domain and the related type
|
|
|
|
lists_data=$(echo "${data}" | jq '.search.domains | [.[] | {domain: .domain, type: .type}]')
|
2023-02-28 18:55:02 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# construct a new json for the gravity results where each object contains the adlist URL and the related domains
|
|
|
|
gravity_data=$(echo "${data}" | jq '.search.gravity | group_by(.address) | map({ address: (.[0].address), domains: [.[] | .domain] })')
|
2023-02-28 18:55:02 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# number of objects in each json
|
|
|
|
num_gravity=$(echo "${gravity_data}" | jq length )
|
|
|
|
num_lists=$(echo "${lists_data}" | jq length )
|
2023-02-28 18:55:02 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
if [ "${partial}" = true ]; then
|
|
|
|
search_type_str="partially"
|
2019-12-12 10:58:41 +00:00
|
|
|
else
|
2023-08-13 20:00:44 +00:00
|
|
|
search_type_str="exactly"
|
2019-12-12 10:58:41 +00:00
|
|
|
fi
|
2019-05-04 10:47:25 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Results from allow/deny list
|
|
|
|
printf "%s\n\n" "Found ${num_lists} domains ${search_type_str} matching '${COL_BLUE}${domain}${COL_NC}'."
|
|
|
|
if [ "${num_lists}" -gt 0 ]; then
|
|
|
|
# Convert the data to a csv, each line is a "domain,type" string
|
|
|
|
# not using jq's @csv here as it quotes each value individually
|
|
|
|
lists_data_csv=$(echo "${lists_data}" | jq --raw-output '.[] | [.domain, .type] | join(",")' )
|
|
|
|
|
|
|
|
# Generate output for each csv line, separating line in a domain and type substring at the ','
|
|
|
|
echo "${lists_data_csv}" | while read -r line; do
|
|
|
|
printf "%s\n\n" " - ${COL_GREEN}${line%,*}${COL_NC} (type: exact ${line#*,} domain)"
|
|
|
|
done
|
2019-05-04 10:47:25 +00:00
|
|
|
fi
|
2019-05-04 11:15:30 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Results from gravity
|
|
|
|
printf "%s\n\n" "Found ${num_gravity} adlists ${search_type_str} matching '${COL_BLUE}${domain}${COL_NC}'."
|
|
|
|
if [ "${num_gravity}" -gt 0 ]; then
|
|
|
|
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
|
|
|
# not using jq's @csv here as it quotes each value individually
|
|
|
|
gravity_data_csv=$(echo "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")' )
|
|
|
|
|
|
|
|
# Generate line-by-line output for each csv line
|
|
|
|
echo "${gravity_data_csv}" | while read -r line; do
|
|
|
|
|
|
|
|
# print adlist URL
|
|
|
|
printf "%s\n\n" " - ${COL_BLUE}${line%%,*}${COL_NC}"
|
|
|
|
|
|
|
|
# cut off URL, leaving "domain,domain,...."
|
|
|
|
line=${line#*,}
|
|
|
|
# print each domain and remove it from the string until nothing is left
|
|
|
|
while [ ${#line} -gt 0 ]; do
|
|
|
|
current_domain=${line%%,*}
|
|
|
|
printf ' - %s\n' "${COL_GREEN}${current_domain}${COL_NC}"
|
|
|
|
# we need to remove the current_domain and the comma in two steps because
|
|
|
|
# the last domain won't have a trailing comma and the while loop wouldn't exit
|
|
|
|
line=${line#"${current_domain}"}
|
|
|
|
line=${line#,}
|
|
|
|
done
|
|
|
|
printf "\n\n"
|
|
|
|
done
|
2019-12-12 10:58:41 +00:00
|
|
|
fi
|
2019-05-04 10:47:25 +00:00
|
|
|
}
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
Main(){
|
|
|
|
local data
|
2019-08-22 12:12:58 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
if [ -z "${domain}" ]; then
|
|
|
|
echo "No domain specified"; exit 1
|
2019-08-22 12:12:58 +00:00
|
|
|
fi
|
2023-10-31 20:02:26 +00:00
|
|
|
# domains are lowercased and converted to punycode by FTL since
|
|
|
|
# https://github.com/pi-hole/FTL/pull/1715
|
|
|
|
# no need to do it here
|
|
|
|
|
2019-09-02 20:39:28 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Test if the authentication endpoint is available
|
|
|
|
TestAPIAvailability
|
2023-10-14 12:01:20 +00:00
|
|
|
|
|
|
|
# Users can configure FTL in a way, that for accessing a) all endpoints (webserver.api.localAPIauth)
|
|
|
|
# or b) for the /search endpoint (webserver.api.searchAPIauth) no authentication is required.
|
|
|
|
# Therefore, we try to query directly without authentication but do authenticat if 401 is returned
|
2018-06-13 05:47:08 +00:00
|
|
|
|
2023-12-01 08:10:06 +00:00
|
|
|
data=$(GetFTLData "search/${domain}?N=${max_results}&partial=${partial}")
|
2023-10-14 12:01:20 +00:00
|
|
|
|
|
|
|
if [ "${data}" = 401 ]; then
|
|
|
|
# Unauthenticated, so authenticate with the FTL server required
|
2023-12-01 08:10:06 +00:00
|
|
|
Authentication
|
2023-10-14 12:01:20 +00:00
|
|
|
|
|
|
|
# send query again
|
2023-12-01 08:10:06 +00:00
|
|
|
data=$(GetFTLData "search/${domain}?N=${max_results}&partial=${partial}")
|
2023-10-14 12:01:20 +00:00
|
|
|
fi
|
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
GenerateOutput "${data}"
|
|
|
|
DeleteSession
|
|
|
|
}
|
2018-07-20 20:13:42 +00:00
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
# Process all options (if present)
|
|
|
|
while [ "$#" -gt 0 ]; do
|
|
|
|
case "$1" in
|
|
|
|
"-h" | "--help" ) Help;;
|
2023-10-14 19:50:57 +00:00
|
|
|
"--partial" ) partial="true";;
|
2023-08-13 20:00:44 +00:00
|
|
|
"--all" ) max_results=10000;; # hard-coded FTL limit
|
|
|
|
* ) domain=$1;;
|
|
|
|
esac
|
|
|
|
shift
|
2018-06-13 05:47:08 +00:00
|
|
|
done
|
|
|
|
|
2023-08-13 20:00:44 +00:00
|
|
|
Main "${domain}"
|