2017-06-05 19:55:58 +00:00
#!/usr/bin/env bash
2016-03-20 01:32:11 +00:00
# Pi-hole: A black hole for Internet advertisements
2017-02-22 17:55:20 +00:00
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
#
2016-09-27 01:06:31 +00:00
# Generates pihole_debug.log to be used for troubleshooting.
2016-03-20 01:32:11 +00:00
#
2017-02-22 17:55:20 +00:00
# This file is copyright under the latest version of the EUPL.
# Please see LICENSE file for your rights under this license.
2017-05-26 17:16:22 +00:00
# -e option instructs bash to immediately exit if any command [1] has a non-zero exit status
# -u a reference to any variable you haven't previously defined
# with the exceptions of $* and $@ - is an error, and causes the program to immediately exit
# -o pipefail prevents errors in a pipeline from being masked. If any command in a pipeline fails,
# that return code will be used as the return code of the whole pipeline. By default, the
# pipeline's return code is that of the last command - even if it succeeds
2016-09-28 02:30:37 +00:00
set -o pipefail
2017-05-26 19:26:02 +00:00
#IFS=$'\n\t'
2016-03-20 01:32:11 +00:00
######## GLOBAL VARS ########
2017-06-10 04:22:37 +00:00
# These variables would normally be next to the other files
# but we need them to be first in order to get the colors needed for the script output
PIHOLE_SCRIPTS_DIRECTORY = "/opt/pihole"
PIHOLE_COLTABLE_FILE = " ${ PIHOLE_SCRIPTS_DIRECTORY } /COL_TABLE "
2017-05-20 06:34:13 +00:00
2017-05-25 03:07:15 +00:00
# These provide the colors we need for making the log more readable
2017-06-10 04:22:37 +00:00
if [ [ -f ${ PIHOLE_COLTABLE_FILE } ] ] ; then
source ${ PIHOLE_COLTABLE_FILE }
2017-05-20 06:34:13 +00:00
else
COL_NC = '\e[0m' # No Color
COL_YELLOW = '\e[1;33m'
COL_LIGHT_PURPLE = '\e[1;35m'
COL_CYAN = '\e[0;36m'
TICK = " [ ${ COL_LIGHT_GREEN } ✓ ${ COL_NC } ] "
CROSS = " [ ${ COL_LIGHT_RED } ✗ ${ COL_NC } ] "
INFO = "[i]"
DONE = " ${ COL_LIGHT_GREEN } done! ${ COL_NC } "
OVER = "\r\033[K"
fi
2016-03-24 21:21:29 +00:00
2017-06-10 04:22:37 +00:00
# FAQ URLs for use in showing the debug log
2017-06-01 06:15:11 +00:00
FAQ_UPDATE_PI_HOLE = " ${ COL_CYAN } https://discourse.pi-hole.net/t/how-do-i-update-pi-hole/249 ${ COL_NC } "
FAQ_CHECKOUT_COMMAND = " ${ COL_CYAN } https://discourse.pi-hole.net/t/the-pihole-command-with-examples/738#checkout ${ COL_NC } "
FAQ_HARDWARE_REQUIREMENTS = " ${ COL_CYAN } https://discourse.pi-hole.net/t/hardware-software-requirements/273 ${ COL_NC } "
2017-06-11 03:18:33 +00:00
FAQ_HARDWARE_REQUIREMENTS_PORTS = " ${ COL_CYAN } https://discourse.pi-hole.net/t/hardware-software-requirements/273#ports ${ COL_NC } "
2017-06-05 19:55:58 +00:00
FAQ_GATEWAY = " ${ COL_CYAN } https://discourse.pi-hole.net/t/why-is-a-default-gateway-important-for-pi-hole/3546 ${ COL_NC } "
2017-06-01 06:15:11 +00:00
FAQ_ULA = " ${ COL_CYAN } https://discourse.pi-hole.net/t/use-ipv6-ula-addresses-for-pi-hole/2127 ${ COL_NC } "
2017-06-05 19:55:58 +00:00
FAQ_FTL_COMPATIBILITY = " ${ COL_CYAN } https://github.com/pi-hole/FTL#compatibility-list ${ COL_NC } "
2017-06-01 06:15:11 +00:00
2017-06-10 04:22:37 +00:00
# Other URLs we may use
FORUMS_URL = " ${ COL_CYAN } https://discourse.pi-hole.net ${ COL_NC } "
TRICORDER_CONTEST = " ${ COL_CYAN } https://pi-hole.net/2016/11/07/crack-our-medical-tricorder-win-a-raspberry-pi-3/ ${ COL_NC } "
# Port numbers used for uploading the debug log
TRICORDER_NC_PORT_NUMBER = 9999
TRICORDER_SSL_PORT_NUMBER = 9998
# Directories required by Pi-hole
# https://discourse.pi-hole.net/t/what-files-does-pi-hole-use/1684
CORE_GIT_DIRECTORY = "/etc/.pihole"
CRON_D_DIRECTORY = "/etc/cron.d"
DNSMASQ_D_DIRECTORY = "/etc/dnsmasq.d"
PIHOLE_DIRECTORY = "/etc/pihole"
PIHOLE_SCRIPTS_DIRECTORY = "/opt/pihole"
BIN_DIRECTORY = "/usr/local/bin"
RUN_DIRECTORY = "/run"
LOG_DIRECTORY = "/var/log"
WEB_SERVER_LOG_DIRECTORY = " ${ LOG_DIRECTORY } /lighttpd "
WEB_SERVER_CONFIG_DIRECTORY = "/etc/lighttpd"
HTML_DIRECTORY = "/var/www/html"
WEB_GIT_DIRECTORY = " ${ HTML_DIRECTORY } /admin "
BLOCK_PAGE_DIRECTORY = " ${ HTML_DIRECTORY } /pihole "
# Files required by Pi-hole
# https://discourse.pi-hole.net/t/what-files-does-pi-hole-use/1684
PIHOLE_CRON_FILE = " ${ CRON_D_DIRECTORY } /pihole "
PIHOLE_DNS_CONFIG_FILE = " ${ DNSMASQ_D_DIRECTORY } /01-pihole.conf "
PIHOLE_DHCP_CONFIG_FILE = " ${ DNSMASQ_D_DIRECTORY } /02-pihole-dhcp.conf "
PIHOLE_WILDCARD_CONFIG_FILE = " ${ DNSMASQ_D_DIRECTORY } /03-wildcard.conf "
WEB_SERVER_CONFIG_FILE = " ${ WEB_SERVER_CONFIG_DIRECTORY } /lighttpd.conf "
2017-06-10 23:37:28 +00:00
WEB_SERVER_CUSTOM_CONFIG_FILE = " ${ WEB_SERVER_CONFIG_DIRECTORY } /external.conf "
2017-06-10 04:22:37 +00:00
PIHOLE_DEFAULT_AD_LISTS = " ${ PIHOLE_DIRECTORY } /adlists.default "
PIHOLE_USER_DEFINED_AD_LISTS = " ${ PIHOLE_DIRECTORY } /adlists.list "
PIHOLE_BLACKLIST_FILE = " ${ PIHOLE_DIRECTORY } /blacklist.txt "
PIHOLE_BLOCKLIST_FILE = " ${ PIHOLE_DIRECTORY } /gravity.list "
PIHOLE_INSTALL_LOG_FILE = " ${ PIHOLE_DIRECTORY } /install.log "
PIHOLE_RAW_BLOCKLIST_FILES = ${ PIHOLE_DIRECTORY } /list.*
PIHOLE_LOCAL_HOSTS_FILE = " ${ PIHOLE_DIRECTORY } /local.list "
PIHOLE_LOGROTATE_FILE = " ${ PIHOLE_DIRECTORY } /logrotate "
PIHOLE_SETUP_VARS_FILE = " ${ PIHOLE_DIRECTORY } /setupVars.conf "
PIHOLE_WHITELIST_FILE = " ${ PIHOLE_DIRECTORY } /whitelist.txt "
PIHOLE_COMMAND = " ${ BIN_DIRECTORY } /pihole "
PIHOLE_COLTABLE_FILE = " ${ BIN_DIRECTORY } /COL_TABLE "
FTL_PID = " ${ RUN_DIRECTORY } /pihole-FTL.pid "
FTL_PORT = " ${ RUN_DIRECTORY } /pihole-FTL.port "
PIHOLE_LOG = " ${ LOG_DIRECTORY } /pihole.log "
PIHOLE_LOG_GZIPS = ${ LOG_DIRECTORY } /pihole.log.[ 0-9] .*
PIHOLE_DEBUG_LOG = " ${ LOG_DIRECTORY } /pihole_debug.log "
2017-06-29 16:17:19 +00:00
PIHOLE_DEBUG_LOG_SANITIZED = " ${ LOG_DIRECTORY } /pihole_debug-sanitized.log "
2017-06-10 04:22:37 +00:00
PIHOLE_FTL_LOG = " ${ LOG_DIRECTORY } /pihole-FTL.log "
PIHOLE_WEB_SERVER_ACCESS_LOG_FILE = " ${ WEB_SERVER_LOG_DIRECTORY } /access.log "
PIHOLE_WEB_SERVER_ERROR_LOG_FILE = " ${ WEB_SERVER_LOG_DIRECTORY } /error.log "
# An array of operating system "pretty names" that we officialy support
# We can loop through the array at any time to see if it matches a value
SUPPORTED_OS = ( "Raspbian" "Ubuntu" "Fedora" "Debian" "CentOS" )
2017-06-10 20:54:27 +00:00
# Store Pi-hole's processes in an array for easy use and parsing
2017-06-10 23:37:28 +00:00
PIHOLE_PROCESSES = ( "dnsmasq" "lighttpd" "pihole-FTL" )
2017-06-10 20:54:27 +00:00
# Store the required directories in an array so it can be parsed through
2017-06-10 04:22:37 +00:00
REQUIRED_DIRECTORIES = ( ${ CORE_GIT_DIRECTORY }
${ CRON_D_DIRECTORY }
${ DNSMASQ_D_DIRECTORY }
${ PIHOLE_DIRECTORY }
${ PIHOLE_SCRIPTS_DIRECTORY }
${ BIN_DIRECTORY }
${ RUN_DIRECTORY }
${ LOG_DIRECTORY }
${ WEB_SERVER_LOG_DIRECTORY }
${ WEB_SERVER_CONFIG_DIRECTORY }
${ HTML_DIRECTORY }
${ WEB_GIT_DIRECTORY }
${ BLOCK_PAGE_DIRECTORY } )
2017-06-10 20:54:27 +00:00
# Store the required directories in an array so it can be parsed through
2017-06-27 02:34:39 +00:00
mapfile -t array <<< " $var "
2017-06-10 04:22:37 +00:00
REQUIRED_FILES = ( ${ PIHOLE_CRON_FILE }
${ PIHOLE_DNS_CONFIG_FILE }
${ PIHOLE_DHCP_CONFIG_FILE }
${ PIHOLE_WILDCARD_CONFIG_FILE }
${ WEB_SERVER_CONFIG_FILE }
${ PIHOLE_DEFAULT_AD_LISTS }
${ PIHOLE_USER_DEFINED_AD_LISTS }
${ PIHOLE_BLACKLIST_FILE }
${ PIHOLE_BLOCKLIST_FILE }
${ PIHOLE_INSTALL_LOG_FILE }
${ PIHOLE_RAW_BLOCKLIST_FILES }
${ PIHOLE_LOCAL_HOSTS_FILE }
${ PIHOLE_LOGROTATE_FILE }
${ PIHOLE_SETUP_VARS_FILE }
${ PIHOLE_WHITELIST_FILE }
${ PIHOLE_COMMAND }
${ PIHOLE_COLTABLE_FILE }
${ FTL_PID }
${ FTL_PORT }
${ PIHOLE_LOG }
${ PIHOLE_LOG_GZIPS }
${ PIHOLE_DEBUG_LOG }
${ PIHOLE_FTL_LOG }
${ PIHOLE_WEB_SERVER_ACCESS_LOG_FILE }
${ PIHOLE_WEB_SERVER_ERROR_LOG_FILE } )
2017-05-28 02:21:18 +00:00
source_setup_variables( ) {
# Display the current test that is running
2017-06-10 04:22:37 +00:00
log_write " \n ${ COL_LIGHT_PURPLE } *** [ INITIALIZING ] ${ COL_NC } Sourcing setup variables "
2017-05-28 02:21:18 +00:00
# If the variable file exists,
2017-06-27 03:03:45 +00:00
if ls " ${ PIHOLE_SETUP_VARS_FILE } " 1> /dev/null 2>& 1; then
2017-06-10 04:22:37 +00:00
log_write " ${ INFO } Sourcing ${ PIHOLE_SETUP_VARS_FILE } ... " ;
2017-05-28 02:21:18 +00:00
# source it
2017-06-27 03:03:45 +00:00
source ${ PIHOLE_SETUP_VARS_FILE }
else
2017-05-28 02:21:18 +00:00
# If it can't, show an error
2017-06-10 04:22:37 +00:00
log_write " ${ PIHOLE_SETUP_VARS_FILE } ${ COL_LIGHT_RED } does not exist or cannot be read. ${ COL_NC } "
2017-06-27 03:03:45 +00:00
fi
2017-05-28 02:21:18 +00:00
}
2017-05-26 17:16:22 +00:00
make_temporary_log( ) {
2017-06-10 20:54:27 +00:00
# Create a random temporary file for the log
2017-05-26 17:16:22 +00:00
TEMPLOG = $( mktemp /tmp/pihole_temp.XXXXXX)
# Open handle 3 for templog
# https://stackoverflow.com/questions/18460186/writing-outputs-to-log-file-and-console
exec 3>" $TEMPLOG "
2017-06-10 20:54:27 +00:00
# Delete templog, but allow for addressing via file handle
2017-06-29 16:37:58 +00:00
# This lets us write to the log without having a temporary file on the drive, which
# is meant to be a security measure so there is not a lingering file on the drive during the debug process
2017-05-26 17:16:22 +00:00
rm " $TEMPLOG "
}
log_write( ) {
2017-05-27 05:04:42 +00:00
# echo arguments to both the log and the console
2017-05-26 17:16:22 +00:00
echo -e " ${ @ } " | tee -a /proc/$$ /fd/3
}
copy_to_debug_log( ) {
2017-06-29 16:37:58 +00:00
# Copy the contents of file descriptor 3 into the debug log
2017-06-29 16:17:19 +00:00
cat /proc/$$ /fd/3 > " ${ PIHOLE_DEBUG_LOG } "
2017-06-29 16:37:58 +00:00
# Since we use color codes such as '\e[1;33m', they should be removed before being
# uploaded to our server, since it can't properly display in color
# This is accomplished by use sed to remove characters matching that patter
# The entire file is then copied over to a sanitized version of the log
2017-06-29 16:17:19 +00:00
sed 's/ \[[0-9;]\{1,5\}m//g' > " ${ PIHOLE_DEBUG_LOG_SANITIZED } " <<< cat " ${ PIHOLE_DEBUG_LOG } "
2017-05-26 17:16:22 +00:00
}
2017-05-20 06:34:13 +00:00
initiate_debug( ) {
# Clear the screen so the debug log is readable
clear
2017-05-27 03:05:50 +00:00
# Display that the debug process is beginning
log_write " ${ COL_LIGHT_PURPLE } *** [ INITIALIZING ] ${ COL_NC } "
2017-05-22 04:25:53 +00:00
# Timestamp the start of the log
2017-05-27 05:04:42 +00:00
log_write " ${ INFO } $( date "+%Y-%m-%d:%H:%M:%S" ) debug log has been initiated. "
2016-10-28 21:52:08 +00:00
}
2016-09-27 01:06:31 +00:00
2017-05-20 06:34:13 +00:00
# This is a function for visually displaying the curent test that is being run.
# Accepts one variable: the name of what is being diagnosed
2017-05-22 04:25:53 +00:00
# Colors do not show in the dasboard, but the icons do: [i], [✓], and [✗]
2017-05-20 06:34:13 +00:00
echo_current_diagnostic( ) {
# Colors are used for visually distinguishing each test in the output
2017-05-27 03:05:50 +00:00
# These colors do not show in the GUI, but the formatting will
2017-05-26 17:16:22 +00:00
log_write " \n ${ COL_LIGHT_PURPLE } *** [ DIAGNOSING ]: ${ COL_NC } ${ 1 } "
2016-09-27 01:06:31 +00:00
}
2017-05-27 20:47:15 +00:00
compare_local_version_to_git_version( ) {
# The git directory to check
local git_dir = " ${ 1 } "
# The named component of the project (Core or Web)
local pihole_component = " ${ 2 } "
# If we are checking the Core versions,
if [ [ " ${ pihole_component } " = = "Core" ] ] ; then
# We need to search for "Pi-hole" when using pihole -v
local search_term = "Pi-hole"
elif [ [ " ${ pihole_component } " = = "Web" ] ] ; then
2017-06-10 20:54:27 +00:00
# We need to search for "AdminLTE" so store it in a variable as well
2017-05-27 20:47:15 +00:00
local search_term = "AdminLTE"
fi
# Display what we are checking
echo_current_diagnostic " ${ pihole_component } version "
2017-05-25 03:07:15 +00:00
# Store the error message in a variable in case we want to change and/or reuse it
2017-05-22 07:39:00 +00:00
local error_msg = "git status failed"
2017-05-25 03:07:15 +00:00
# If the pihole git directory exists,
2017-06-27 02:34:39 +00:00
if [ [ -d " ${ git_dir } " ] ] ; then
2017-05-25 03:07:15 +00:00
# move into it
2017-05-27 20:47:15 +00:00
cd " ${ git_dir } " || \
# If not, show an error
log_write " ${ COL_LIGHT_RED } Could not cd into ${ git_dir } $COL_NC "
2017-05-22 07:39:00 +00:00
if git status & > /dev/null; then
2017-05-25 03:07:15 +00:00
# The current version the user is on
2017-06-27 02:34:39 +00:00
local remote_version
remote_version = $( git describe --tags --abbrev= 0) ;
2017-05-25 03:07:15 +00:00
# What branch they are on
2017-06-27 02:34:39 +00:00
local remote_branch
remote_branch = $( git rev-parse --abbrev-ref HEAD) ;
2017-05-25 03:07:15 +00:00
# The commit they are on
2017-06-27 02:34:39 +00:00
local remote_commit
remote_commit = $( git describe --long --dirty --tags --always)
2017-05-25 03:07:15 +00:00
# echo this information out to the user in a nice format
2017-05-26 19:26:02 +00:00
# If the current version matches what pihole -v produces, the user is up-to-date
2017-05-27 20:47:15 +00:00
if [ [ " ${ remote_version } " = = " $( pihole -v | awk '/${search_term}/ {print $6}' | cut -d ')' -f1) " ] ] ; then
log_write " ${ TICK } ${ pihole_component } : ${ COL_LIGHT_GREEN } ${ remote_version } ${ COL_NC } "
2017-05-26 19:26:02 +00:00
# If not,
else
2017-05-27 03:05:50 +00:00
# echo the current version in yellow, signifying it's something to take a look at, but not a critical error
# Also add a URL to an FAQ
2017-06-01 05:18:06 +00:00
log_write " ${ INFO } ${ pihole_component } : ${ COL_YELLOW } ${ remote_version :- Untagged } ${ COL_NC } ( ${ FAQ_UPDATE_PI_HOLE } ) "
2017-05-26 19:26:02 +00:00
fi
2017-05-27 03:05:50 +00:00
# If the repo is on the master branch, they are on the stable codebase
2017-05-27 20:47:15 +00:00
if [ [ " ${ remote_branch } " = = "master" ] ] ; then
2017-05-27 03:05:50 +00:00
# so the color of the text is green
2017-05-27 20:47:15 +00:00
log_write " ${ INFO } Branch: ${ COL_LIGHT_GREEN } ${ remote_branch } ${ COL_NC } "
2017-05-27 03:05:50 +00:00
# If it is any other branch, they are in a developement branch
2017-05-26 19:26:02 +00:00
else
2017-05-27 03:05:50 +00:00
# So show that in yellow, signifying it's something to take a look at, but not a critical error
2017-06-01 05:18:06 +00:00
log_write " ${ INFO } Branch: ${ COL_YELLOW } ${ remote_branch :- Detached } ${ COL_NC } ( ${ FAQ_CHECKOUT_COMMAND } ) "
2017-05-26 19:26:02 +00:00
fi
2017-05-27 03:05:50 +00:00
# echo the current commit
2017-05-27 20:47:15 +00:00
log_write " ${ INFO } Commit: ${ remote_commit } "
2017-05-25 03:07:15 +00:00
# If git status failed,
2017-05-22 07:39:00 +00:00
else
2017-05-25 03:07:15 +00:00
# Return an error message
2017-05-26 17:16:22 +00:00
log_write " ${ error_msg } "
2017-05-25 03:07:15 +00:00
# and exit with a non zero code
2017-05-22 07:39:00 +00:00
return 1
fi
2017-06-27 02:34:39 +00:00
else
:
fi
2017-05-22 07:39:00 +00:00
}
check_ftl_version( ) {
2017-05-27 20:47:15 +00:00
local ftl_name = "FTL"
echo_current_diagnostic " ${ ftl_name } version "
2017-05-25 03:07:15 +00:00
# Use the built in command to check FTL's version
2017-05-22 07:39:00 +00:00
FTL_VERSION = $( pihole-FTL version)
2017-05-27 03:05:50 +00:00
# Compare the current FTL version to the remote version
if [ [ " ${ FTL_VERSION } " = = " $( pihole -v | awk '/FTL/ {print $6}' | cut -d ')' -f1) " ] ] ; then
# If they are the same, FTL is up-to-date
2017-05-27 20:47:15 +00:00
log_write " ${ TICK } ${ ftl_name } : ${ COL_LIGHT_GREEN } ${ FTL_VERSION } ${ COL_NC } "
2017-05-27 03:05:50 +00:00
else
# If not, show it in yellow, signifying there is an update
2017-06-01 05:18:06 +00:00
log_write " ${ TICK } ${ ftl_name } : ${ COL_YELLOW } ${ FTL_VERSION } ${ COL_NC } ( ${ FAQ_UPDATE_PI_HOLE } ) "
2017-05-27 03:05:50 +00:00
fi
2017-05-22 07:39:00 +00:00
}
2017-05-27 20:47:15 +00:00
# Checks the core version of the Pi-hole codebase
check_component_versions( ) {
# Check the Web version, branch, and commit
2017-06-10 04:22:37 +00:00
compare_local_version_to_git_version " ${ CORE_GIT_DIRECTORY } " "Core"
2017-05-27 20:47:15 +00:00
# Check the Web version, branch, and commit
2017-06-10 04:22:37 +00:00
compare_local_version_to_git_version " ${ WEB_GIT_DIRECTORY } " "Web"
2017-05-27 20:47:15 +00:00
# Check the FTL version
check_ftl_version
}
get_program_version( ) {
local program_name = " ${ 1 } "
2017-06-10 20:54:27 +00:00
# Create a loval variable so this function can be safely reused
2017-05-27 20:47:15 +00:00
local program_version
echo_current_diagnostic " ${ program_name } version "
2017-06-10 20:54:27 +00:00
# Evalutate the program we are checking, if it is any of the ones below, show the version
2017-05-27 20:47:15 +00:00
case " ${ program_name } " in
"lighttpd" ) program_version = " $( ${ program_name } -v | & head -n1 | cut -d '/' -f2 | cut -d ' ' -f1) "
; ;
"dnsmasq" ) program_version = " $( ${ program_name } -v | & head -n1 | awk '{print $3}' ) "
; ;
"php" ) program_version = " $( ${ program_name } -v | & head -n1 | cut -d '-' -f1 | cut -d ' ' -f2) "
; ;
2017-06-10 20:54:27 +00:00
# If a match is not found, show an error
2017-05-27 20:47:15 +00:00
*) echo "Unrecognized program" ;
esac
2017-06-10 20:54:27 +00:00
# If the program does not have a version (the variable is empty)
2017-05-27 20:47:15 +00:00
if [ [ -z " ${ program_version } " ] ] ; then
2017-05-25 03:07:15 +00:00
# Display and error
2017-05-27 20:47:15 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } ${ program_name } version could not be detected. ${ COL_NC } "
2017-05-22 08:05:51 +00:00
else
2017-05-27 03:05:50 +00:00
# Otherwise, display the version
2017-06-09 20:42:54 +00:00
log_write " ${ INFO } ${ program_version } "
2017-05-22 13:48:56 +00:00
fi
}
2017-05-25 03:07:15 +00:00
# These are the most critical dependencies of Pi-hole, so we check for them
# and their versions, using the functions above.
2017-05-27 20:47:15 +00:00
check_critical_program_versions( ) {
2017-05-27 03:05:50 +00:00
# Use the function created earlier and bundle them into one function that checks all the version numbers
2017-05-27 20:47:15 +00:00
get_program_version "dnsmasq"
get_program_version "lighttpd"
get_program_version "php"
2017-05-22 08:05:51 +00:00
}
2017-06-01 05:18:06 +00:00
is_os_supported( ) {
local os_to_check = " ${ 1 } "
2017-06-10 20:54:27 +00:00
# Strip just the base name of the system using sed
2017-06-01 05:18:06 +00:00
the_os = $( echo ${ os_to_check } | sed 's/ .*//' )
2017-06-10 20:54:27 +00:00
# If the variable is one of our supported OSes,
2017-06-01 05:18:06 +00:00
case " ${ the_os } " in
2017-06-10 20:54:27 +00:00
# Print it in green
2017-06-01 05:18:06 +00:00
"Raspbian" ) log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ os_to_check } ${ COL_NC } " ; ;
2017-06-01 06:15:11 +00:00
"Ubuntu" ) log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ os_to_check } ${ COL_NC } " ; ;
2017-06-01 05:18:06 +00:00
"Fedora" ) log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ os_to_check } ${ COL_NC } " ; ;
"Debian" ) log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ os_to_check } ${ COL_NC } " ; ;
"CentOS" ) log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ os_to_check } ${ COL_NC } " ; ;
2017-06-10 20:54:27 +00:00
# If not, show it in red and link to our software requirements page
2017-06-01 05:18:06 +00:00
*) log_write " ${ CROSS } ${ COL_LIGHT_RED } ${ os_to_check } ${ COL_NC } ( ${ FAQ_HARDWARE_REQUIREMENTS } ) " ;
esac
}
2017-05-20 06:34:13 +00:00
get_distro_attributes( ) {
# Put the current Internal Field Separator into another variable so it can be restored later
OLD_IFS = " $IFS "
# Store the distro info in an array and make it global since the OS won't change,
# but we'll keep it within the function for better unit testing
2017-06-27 03:03:45 +00:00
IFS = $'\r\n' command eval 'distro_info=( $(cat /etc/*release) )'
2016-10-28 22:53:18 +00:00
2017-05-22 04:25:53 +00:00
# Set a named variable for better readability
2017-05-20 06:34:13 +00:00
local distro_attribute
2017-05-22 04:25:53 +00:00
# For each line found in an /etc/*release file,
2017-05-20 06:34:13 +00:00
for distro_attribute in " ${ distro_info [@] } " ; do
2017-05-27 03:05:50 +00:00
# store the key in a variable
local pretty_name_key = $( echo " ${ distro_attribute } " | grep "PRETTY_NAME" | cut -d '=' -f1)
# we need just the OS PRETTY_NAME,
2017-05-20 07:01:56 +00:00
if [ [ " ${ pretty_name_key } " = = "PRETTY_NAME" ] ] ; then
2017-06-01 05:18:06 +00:00
# so save in in a variable when we find it
2017-05-27 03:05:50 +00:00
PRETTY_NAME_VALUE = $( echo " ${ distro_attribute } " | grep "PRETTY_NAME" | cut -d '=' -f2- | tr -d '"' )
2017-06-01 05:18:06 +00:00
# then pass it as an argument that checks if the OS is supported
is_os_supported " ${ PRETTY_NAME_VALUE } "
2017-05-20 07:01:56 +00:00
else
2017-05-27 03:05:50 +00:00
# Since we only need the pretty name, we can just skip over anything that is not a match
2017-05-20 07:01:56 +00:00
:
fi
2017-05-20 06:34:13 +00:00
done
# Set the IFS back to what it was
IFS = " $OLD_IFS "
2016-10-26 21:33:47 +00:00
}
2017-05-20 06:34:13 +00:00
diagnose_operating_system( ) {
2017-05-25 03:07:15 +00:00
# error message in a variable so we can easily modify it later (or re-use it)
2017-05-22 04:25:53 +00:00
local error_msg = "Distribution unknown -- most likely you are on an unsupported platform and may run into issues."
2017-05-20 06:34:13 +00:00
# Display the current test that is running
echo_current_diagnostic "Operating system"
2016-10-26 18:38:19 +00:00
2017-05-20 06:34:13 +00:00
# If there is a /etc/*release file, it's probably a supported operating system, so we can
2017-06-27 03:03:45 +00:00
if ls /etc/*release 1> /dev/null 2>& 1; then
2017-05-25 03:07:15 +00:00
# display the attributes to the user from the function made earlier
2017-06-27 02:51:41 +00:00
get_distro_attributes
else
2017-05-22 04:25:53 +00:00
# If it doesn't exist, it's not a system we currently support and link to FAQ
2017-06-01 05:18:06 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } ${ error_msg } ${ COL_NC } ( ${ FAQ_HARDWARE_REQUIREMENTS } ) "
2017-06-27 02:51:41 +00:00
fi
2016-09-27 03:39:39 +00:00
}
2017-05-22 13:48:56 +00:00
processor_check( ) {
echo_current_diagnostic "Processor"
2017-05-25 03:07:15 +00:00
# Store the processor type in a variable
2017-05-22 13:48:56 +00:00
PROCESSOR = $( uname -m)
2017-05-25 03:07:15 +00:00
# If it does not contain a value,
2017-05-22 13:48:56 +00:00
if [ [ -z " ${ PROCESSOR } " ] ] ; then
2017-05-25 03:07:15 +00:00
# we couldn't detect it, so show an error
2017-06-05 19:55:58 +00:00
PROCESSOR = $( lscpu | awk '/Architecture/ {print $2}' )
log_write " ${ CROSS } ${ COL_LIGHT_RED } ${ PROCESSOR } ${ COL_NC } has not been tested with FTL, but may still work: ( ${ FAQ_FTL_COMPATIBILITY } ) "
2017-05-22 13:48:56 +00:00
else
2017-06-05 19:55:58 +00:00
# Check if the architecture is currently supported for FTL
case " ${ PROCESSOR } " in
"amd64" ) " ${ TICK } ${ COL_LIGHT_GREEN } ${ PROCESSOR } ${ COL_NC } "
; ;
"armv6l" ) " ${ TICK } ${ COL_LIGHT_GREEN } ${ PROCESSOR } ${ COL_NC } "
; ;
"armv6" ) " ${ TICK } ${ COL_LIGHT_GREEN } ${ PROCESSOR } ${ COL_NC } "
; ;
"armv7l" ) " ${ TICK } ${ COL_LIGHT_GREEN } ${ PROCESSOR } ${ COL_NC } "
; ;
"aarch64" ) " ${ TICK } ${ COL_LIGHT_GREEN } ${ PROCESSOR } ${ COL_NC } "
; ;
2017-05-27 03:05:50 +00:00
# Otherwise, show the processor type
2017-06-05 19:55:58 +00:00
*) log_write " ${ INFO } ${ PROCESSOR } " ;
esac
2017-05-22 17:05:42 +00:00
fi
}
2017-05-28 02:21:18 +00:00
parse_setup_vars( ) {
echo_current_diagnostic "Setup variables"
2017-06-10 20:54:27 +00:00
# If the file exists,
2017-06-27 02:51:41 +00:00
if [ [ -r " ${ PIHOLE_SETUP_VARS_FILE } " ] ] ; then
2017-06-10 20:54:27 +00:00
# parse it
2017-06-27 02:51:41 +00:00
parse_file " ${ PIHOLE_SETUP_VARS_FILE } "
else
2017-06-10 20:54:27 +00:00
# If not, show an error
2017-06-10 04:22:37 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Could not read ${ PIHOLE_SETUP_VARS_FILE } . ${ COL_NC } "
2017-06-27 02:51:41 +00:00
fi
2017-05-28 02:21:18 +00:00
}
2017-06-01 06:15:11 +00:00
does_ip_match_setup_vars( ) {
# Check for IPv4 or 6
local protocol = " ${ 1 } "
# IP address to check for
local ip_address = " ${ 2 } "
# See what IP is in the setupVars.conf file
2017-06-27 03:03:45 +00:00
local setup_vars_ip = $( cat ${ PIHOLE_SETUP_VARS_FILE } | grep IPV${ protocol } _ADDRESS | cut -d '=' -f2)
2017-06-01 06:15:11 +00:00
# If it's an IPv6 address
if [ [ " ${ protocol } " = = "6" ] ] ; then
2017-06-10 20:54:27 +00:00
# Strip off the / (CIDR notation)
2017-06-01 06:15:11 +00:00
if [ [ " ${ ip_address %/* } " = = " ${ setup_vars_ip } " ] ] ; then
# if it matches, show it in green
2017-06-10 04:22:37 +00:00
log_write " ${ COL_LIGHT_GREEN } ${ ip_address %/* } ${ COL_NC } matches the IP found in ${ PIHOLE_SETUP_VARS_FILE } "
2017-06-01 06:15:11 +00:00
else
# otherwise show it in red with an FAQ URL
2017-06-10 04:22:37 +00:00
log_write " ${ COL_LIGHT_RED } ${ ip_address %/* } ${ COL_NC } does not match the IP found in ${ PIHOLE_SETUP_VARS_FILE } ( ${ FAQ_ULA } ) "
2017-06-01 06:15:11 +00:00
fi
else
# if the protocol isn't 6, it's 4 so no need to strip the CIDR notation
# since it exists in the setupVars.conf that way
if [ [ " ${ ip_address } " = = " ${ setup_vars_ip } " ] ] ; then
# show in green if it matches
2017-06-10 04:22:37 +00:00
log_write " ${ COL_LIGHT_GREEN } ${ ip_address } ${ COL_NC } matches the IP found in ${ PIHOLE_SETUP_VARS_FILE } "
2017-06-01 06:15:11 +00:00
else
# otherwise show it in red
2017-06-10 04:22:37 +00:00
log_write " ${ COL_LIGHT_RED } ${ ip_address } ${ COL_NC } does not match the IP found in ${ PIHOLE_SETUP_VARS_FILE } ( ${ FAQ_ULA } ) "
2017-06-01 06:15:11 +00:00
fi
fi
}
2017-05-22 17:05:42 +00:00
detect_ip_addresses( ) {
# First argument should be a 4 or a 6
local protocol = ${ 1 }
# Use ip to show the addresses for the chosen protocol
# Store the values in an arry so they can be looped through
# Get the lines that are in the file(s) and store them in an array for parsing later
declare -a ip_addr_list = ( $( ip -${ protocol } addr show dev ${ PIHOLE_INTERFACE } | awk -F ' ' '{ for(i=1;i<=NF;i++) if ($i ~ ' /^inet/') print $(i+1) }' ) )
# If there is something in the IP address list,
if [ [ -n ${ ip_addr_list } ] ] ; then
# Local iterator
local i
2017-05-25 03:07:15 +00:00
# Display the protocol and interface
2017-05-28 02:21:18 +00:00
log_write " ${ TICK } IPv ${ protocol } address(es) bound to the ${ PIHOLE_INTERFACE } interface: "
2017-05-25 03:07:15 +00:00
# Since there may be more than one IP address, store them in an array
2017-05-22 17:35:57 +00:00
for i in " ${ !ip_addr_list[@] } " ; do
2017-05-27 05:04:42 +00:00
# For each one in the list, print it out
2017-06-01 06:15:11 +00:00
does_ip_match_setup_vars " ${ protocol } " " ${ ip_addr_list [ $i ] } "
2017-05-22 17:05:42 +00:00
done
2017-06-10 20:54:27 +00:00
# Print a blank line just for formatting
2017-05-27 05:04:42 +00:00
log_write ""
2017-05-22 17:05:42 +00:00
else
2017-05-27 03:05:50 +00:00
# If there are no IPs detected, explain that the protocol is not configured
2017-05-28 02:21:18 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } No IPv ${ protocol } address(es) found on the ${ PIHOLE_INTERFACE } ${ COL_NC } interace.\n "
2017-05-22 17:05:42 +00:00
return 1
2017-05-22 13:48:56 +00:00
fi
2017-06-01 06:15:11 +00:00
# If the protocol is v6
if [ [ " ${ protocol } " = = "6" ] ] ; then
# let the user know that as long as there is one green address, things should be ok
2017-06-05 19:55:58 +00:00
log_write " ^ Please note that you may have more than one IP address listed."
2017-06-10 04:22:37 +00:00
log_write " As long as one of them is green, and it matches what is in ${ PIHOLE_SETUP_VARS_FILE } , there is no need for concern.\n "
2017-06-05 19:55:58 +00:00
log_write " The link to the FAQ is for an issue that sometimes occurs when the IPv6 address changes, which is why we check for it.\n"
2017-06-01 06:15:11 +00:00
fi
2017-05-22 13:48:56 +00:00
}
2017-06-01 06:15:11 +00:00
ping_ipv4_or_ipv6( ) {
# Give the first argument a readable name (a 4 or a six should be the argument)
2017-05-22 17:35:57 +00:00
local protocol = " ${ 1 } "
# If the protocol is 6,
if [ [ ${ protocol } = = "6" ] ] ; then
# use ping6
2017-06-01 06:15:11 +00:00
cmd = "ping6"
2017-05-22 17:35:57 +00:00
# and Google's public IPv6 address
2017-06-01 06:15:11 +00:00
public_address = "2001:4860:4860::8888"
2017-05-22 17:35:57 +00:00
else
2017-05-27 03:05:50 +00:00
# Otherwise, just use ping
2017-06-01 06:15:11 +00:00
cmd = "ping"
2017-05-22 17:35:57 +00:00
# and Google's public IPv4 address
2017-06-01 06:15:11 +00:00
public_address = "8.8.8.8"
2017-05-22 17:35:57 +00:00
fi
2017-06-01 06:15:11 +00:00
}
2017-05-22 17:35:57 +00:00
2017-06-01 06:15:11 +00:00
ping_gateway( ) {
local protocol = " ${ 1 } "
ping_ipv4_or_ipv6 " ${ protocol } "
# Check if we are using IPv4 or IPv6
2017-05-22 17:35:57 +00:00
# Find the default gateway using IPv4 or IPv6
local gateway
gateway = " $( ip -${ protocol } route | grep default | cut -d ' ' -f 3) "
# If the gateway variable has a value (meaning a gateway was found),
if [ [ -n " ${ gateway } " ] ] ; then
2017-05-28 02:21:18 +00:00
log_write " ${ INFO } Default IPv ${ protocol } gateway: ${ gateway } "
2017-05-22 17:35:57 +00:00
# Let the user know we will ping the gateway for a response
2017-06-09 20:42:54 +00:00
log_write " * Pinging ${ gateway } ... "
2017-05-22 17:35:57 +00:00
# Try to quietly ping the gateway 3 times, with a timeout of 3 seconds, using numeric output only,
# on the pihole interface, and tail the last three lines of the output
# If pinging the gateway is not successful,
2017-06-01 06:15:11 +00:00
if ! ${ cmd } -c 3 -W 2 -n ${ gateway } -I ${ PIHOLE_INTERFACE } >/dev/null; then
2017-05-22 17:35:57 +00:00
# let the user know
2017-06-01 06:15:11 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Gateway did not respond. ${ COL_NC } ( $FAQ_GATEWAY )\n "
2017-05-22 17:35:57 +00:00
# and return an error code
return 1
# Otherwise,
else
# show a success
2017-05-28 02:21:18 +00:00
log_write " ${ TICK } ${ COL_LIGHT_GREEN } Gateway responded. ${ COL_NC } "
2017-05-22 17:35:57 +00:00
# and return a success code
return 0
fi
fi
}
2017-05-22 17:57:55 +00:00
ping_internet( ) {
local protocol = " ${ 1 } "
2017-06-10 20:54:27 +00:00
# Ping a public address using the protocol passed as an argument
2017-06-01 06:15:11 +00:00
ping_ipv4_or_ipv6 " ${ protocol } "
2017-05-28 02:21:18 +00:00
log_write " * Checking Internet connectivity via IPv ${ protocol } ... "
2017-05-25 03:07:15 +00:00
# Try to ping the address 3 times
2017-06-01 06:15:11 +00:00
if ! ${ cmd } -W 2 -c 3 -n ${ public_address } -I ${ PIHOLE_INTERFACE } >/dev/null; then
2017-05-25 03:07:15 +00:00
# if it's unsuccessful, show an error
2017-05-27 05:04:42 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Cannot reach the Internet. ${ COL_NC } \n "
2017-05-22 17:57:55 +00:00
return 1
else
2017-05-27 03:05:50 +00:00
# Otherwise, show success
2017-05-27 05:04:42 +00:00
log_write " ${ TICK } ${ COL_LIGHT_GREEN } Query responded. ${ COL_NC } \n "
2017-05-22 17:57:55 +00:00
return 0
fi
}
2017-05-28 02:21:18 +00:00
compare_port_to_service_assigned( ) {
local service_name = " ${ 1 } "
2017-06-10 20:54:27 +00:00
# The programs we use may change at some point, so they are in a varible here
2017-05-28 02:21:18 +00:00
local resolver = "dnsmasq"
local web_server = "lighttpd"
local ftl = "pihole-FT"
if [ [ " ${ service_name } " = = " ${ resolver } " ] ] || [ [ " ${ service_name } " = = " ${ web_server } " ] ] || [ [ " ${ service_name } " = = " ${ ftl } " ] ] ; then
# if port 53 is dnsmasq, show it in green as it's standard
log_write " [ ${ COL_LIGHT_GREEN } ${ port_number } ${ COL_NC } ] is in use by ${ COL_LIGHT_GREEN } ${ service_name } ${ COL_NC } "
# Otherwise,
else
# Show the service name in red since it's non-standard
2017-06-11 03:18:33 +00:00
log_write " [ ${ COL_LIGHT_RED } ${ port_number } ${ COL_NC } ] is in use by ${ COL_LIGHT_RED } ${ service_name } ${ COL_NC } ( ${ FAQ_HARDWARE_REQUIREMENTS_PORTS } ) "
2017-05-28 02:21:18 +00:00
fi
}
2017-05-24 03:32:30 +00:00
check_required_ports( ) {
2017-05-28 02:21:18 +00:00
echo_current_diagnostic "Ports in use"
2017-05-25 03:07:15 +00:00
# Since Pi-hole needs 53, 80, and 4711, check what they are being used by
# so we can detect any issues
2017-05-28 02:21:18 +00:00
local resolver = "dnsmasq"
local web_server = "lighttpd"
local ftl = "pihole-FT"
2017-05-25 03:07:15 +00:00
# Create an array for these ports in use
2017-05-24 03:32:30 +00:00
ports_in_use = ( )
2017-05-25 03:07:15 +00:00
# Sort the addresses and remove duplicates
2017-05-24 03:32:30 +00:00
while IFS = read -r line; do
ports_in_use += ( " $line " )
2017-05-26 19:26:02 +00:00
done < <( lsof -i -P -n | awk -F' ' '/LISTEN/ {print $9, $1}' | sort -n | uniq | cut -d':' -f2 )
2017-05-24 03:32:30 +00:00
2017-05-25 03:07:15 +00:00
# Now that we have the values stored,
2017-06-27 03:03:45 +00:00
for i in " ${ !ports_in_use[@] } " ; do
2017-05-27 03:05:50 +00:00
# loop through them and assign some local variables
2017-06-27 03:03:45 +00:00
local port_number
port_number = " $( echo " ${ ports_in_use [ $i ] } " | awk '{print $1}' ) "
local service_name
service_name = $( echo " ${ ports_in_use [ $i ] } " | awk '{print $2}' )
2017-05-27 03:05:50 +00:00
# Use a case statement to determine if the right services are using the right ports
2017-05-26 19:26:02 +00:00
case " ${ port_number } " in
2017-05-28 02:21:18 +00:00
53) compare_port_to_service_assigned " ${ resolver } "
2017-05-26 19:26:02 +00:00
; ;
2017-05-28 02:21:18 +00:00
80) compare_port_to_service_assigned " ${ web_server } "
2017-05-26 19:26:02 +00:00
; ;
2017-05-28 02:21:18 +00:00
4711) compare_port_to_service_assigned " ${ ftl } "
2017-05-26 19:26:02 +00:00
; ;
2017-06-10 20:54:27 +00:00
# If it's not a default port that Pi-hole needs, just print it out for the user to see
2017-05-27 05:04:42 +00:00
*) log_write " [ ${ port_number } ] is in use by ${ service_name } " ;
2017-05-26 19:26:02 +00:00
esac
2017-05-24 03:32:30 +00:00
done
}
2017-05-22 17:35:57 +00:00
check_networking( ) {
2017-05-25 03:07:15 +00:00
# Runs through several of the functions made earlier; we just clump them
# together since they are all related to the networking aspect of things
2017-05-22 17:35:57 +00:00
echo_current_diagnostic "Networking"
detect_ip_addresses "4"
detect_ip_addresses "6"
2017-05-28 02:21:18 +00:00
ping_gateway "4"
2017-05-22 17:35:57 +00:00
ping_gateway "6"
2017-05-24 03:32:30 +00:00
check_required_ports
2017-05-22 17:35:57 +00:00
}
2017-05-24 20:29:31 +00:00
check_x_headers( ) {
2017-05-25 03:07:15 +00:00
# The X-Headers allow us to determine from the command line if the Web
2017-06-29 16:37:58 +00:00
# lighttpd.conf has a directive to show "X-Pi-hole: A black hole for Internet advertisements."
# in the header of any Pi-holed domain
# Similarly, it will show "X-Pi-hole: The Pi-hole Web interface is working!" if you view the header returned
# when accessing the dashboard (i.e curl -I pi.hole/admin/)
2017-05-25 03:07:15 +00:00
# server is operating correctly
2017-05-24 21:10:14 +00:00
echo_current_diagnostic "Dashboard and block page"
2017-05-25 03:07:15 +00:00
# Use curl -I to get the header and parse out just the X-Pi-hole one
2017-06-27 03:03:45 +00:00
local block_page
block_page = $( curl -Is localhost | awk '/X-Pi-hole/' | tr -d '\r' )
2017-05-25 03:07:15 +00:00
# Do it for the dashboard as well, as the header is different than above
2017-06-27 03:03:45 +00:00
local dashboard
dashboard = $( curl -Is localhost/admin/ | awk '/X-Pi-hole/' | tr -d '\r' )
2017-05-25 03:07:15 +00:00
# Store what the X-Header shoud be in variables for comparision later
2017-06-27 03:03:45 +00:00
local block_page_working
block_page_working = "X-Pi-hole: A black hole for Internet advertisements."
local dashboard_working
dashboard_working = "X-Pi-hole: The Pi-hole Web interface is working!"
2017-05-25 03:07:15 +00:00
# If the X-header found by curl matches what is should be,
2017-06-27 03:03:45 +00:00
if [ [ $block_page = = " $block_page_working " ] ] ; then
2017-05-25 03:07:15 +00:00
# display a success message
2017-05-27 05:04:42 +00:00
log_write " $TICK ${ COL_LIGHT_GREEN } ${ block_page } ${ COL_NC } "
2017-05-24 21:10:14 +00:00
else
2017-05-27 03:05:50 +00:00
# Otherwise, show an error
2017-05-27 05:04:42 +00:00
log_write " $CROSS ${ COL_LIGHT_RED } X-Header does not match or could not be retrieved. ${ COL_NC } "
2017-05-24 21:10:14 +00:00
fi
2017-05-27 03:05:50 +00:00
# Same logic applies to the dashbord as above, if the X-Header matches what a working system shoud have,
2017-06-27 03:03:45 +00:00
if [ [ $dashboard = = " $dashboard_working " ] ] ; then
2017-05-27 03:05:50 +00:00
# then we can show a success
2017-05-27 05:04:42 +00:00
log_write " $TICK ${ COL_LIGHT_GREEN } ${ dashboard } ${ COL_NC } "
2017-05-24 21:10:14 +00:00
else
2017-05-27 03:05:50 +00:00
# Othewise, it's a failure since the X-Headers either don't exist or have been modified in some way
2017-05-27 05:04:42 +00:00
log_write " $CROSS ${ COL_LIGHT_RED } X-Header does not match or could not be retrieved. ${ COL_NC } "
2017-05-24 21:10:14 +00:00
fi
2017-05-24 20:29:31 +00:00
}
dig_at( ) {
2017-05-27 03:05:50 +00:00
# We need to test if Pi-hole can properly resolve domain names
# as it is an essential piece of the software
2017-05-25 03:07:15 +00:00
# Store the arguments as variables with names
2017-05-24 20:29:31 +00:00
local protocol = " ${ 1 } "
local IP = " ${ 2 } "
2017-06-05 19:55:58 +00:00
echo_current_diagnostic " Name resolution (IPv ${ protocol } ) using a random blocked domain and a known ad-serving domain "
2017-05-25 03:07:15 +00:00
# Set more local variables
2017-06-10 20:54:27 +00:00
# We need to test name resolution locally, via Pi-hole, and via a public resolver
2017-05-24 20:29:31 +00:00
local local_dig
local pihole_dig
local remote_dig
2017-05-26 19:26:02 +00:00
# Use a static domain that we know has IPv4 and IPv6 to avoid false positives
2017-05-27 03:05:50 +00:00
# Sometimes the randomly chosen domains don't use IPv6, or something else is wrong with them
2017-05-26 19:26:02 +00:00
local remote_url = "doubleclick.com"
2017-05-24 20:29:31 +00:00
2017-05-25 03:07:15 +00:00
# If the protocol (4 or 6) is 6,
2017-05-24 20:29:31 +00:00
if [ [ ${ protocol } = = "6" ] ] ; then
2017-05-25 03:07:15 +00:00
# Set the IPv6 variables and record type
2017-05-24 20:29:31 +00:00
local local_address = "::1"
local pihole_address = " ${ IPV6_ADDRESS %/* } "
local remote_address = "2001:4860:4860::8888"
local record_type = "AAAA"
2017-05-25 03:07:15 +00:00
# Othwerwise, it should be 4
2017-05-24 20:29:31 +00:00
else
2017-05-25 03:07:15 +00:00
# so use the IPv4 values
2017-05-24 20:29:31 +00:00
local local_address = "127.0.0.1"
local pihole_address = " ${ IPV4_ADDRESS %/* } "
local remote_address = "8.8.8.8"
local record_type = "A"
fi
# Find a random blocked url that has not been whitelisted.
2017-05-27 03:05:50 +00:00
# This helps emulate queries to different domains that a user might query
# It will also give extra assurance that Pi-hole is correctly resolving and blocking domains
2017-06-10 04:22:37 +00:00
local random_url = $( shuf -n 1 " ${ PIHOLE_BLOCKLIST_FILE } " | awk -F ' ' '{ print $2 }' )
2017-05-24 20:29:31 +00:00
2017-05-27 03:05:50 +00:00
# First, do a dig on localhost to see if Pi-hole can use itself to block a domain
2017-05-26 20:17:26 +00:00
if local_dig = $( dig +tries= 1 +time= 2 -" ${ protocol } " " ${ random_url } " @${ local_address } +short " ${ record_type } " ) ; then
2017-05-25 03:07:15 +00:00
# If it can, show sucess
2017-06-09 20:42:54 +00:00
log_write " ${ TICK } ${ random_url } ${ COL_LIGHT_GREEN } is ${ local_dig } ${ COL_NC } via ${ COL_CYAN } localhost $COL_NC ( ${ local_address } ) "
2017-05-24 20:29:31 +00:00
else
2017-05-27 03:05:50 +00:00
# Otherwise, show a failure
2017-06-09 20:42:54 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Failed to resolve ${ COL_NC } ${ random_url } via ${ COL_LIGHT_RED } localhost ${ COL_NC } ( ${ local_address } ) "
2017-05-24 20:29:31 +00:00
fi
2017-05-25 03:07:15 +00:00
# Next we need to check if Pi-hole can resolve a domain when the query is sent to it's IP address
# This better emulates how clients will interact with Pi-hole as opposed to above where Pi-hole is
# just asing itself locally
2017-05-27 03:05:50 +00:00
# The default timeouts and tries are reduced in case the DNS server isn't working, so the user isn't waiting for too long
# If Pi-hole can dig itself from it's IP (not the loopback address)
2017-05-26 20:17:26 +00:00
if pihole_dig = $( dig +tries= 1 +time= 2 -" ${ protocol } " " ${ random_url } " @${ pihole_address } +short " ${ record_type } " ) ; then
2017-05-27 03:05:50 +00:00
# show a success
2017-06-09 20:42:54 +00:00
log_write " ${ TICK } ${ random_url } ${ COL_LIGHT_GREEN } is ${ pihole_dig } ${ COL_NC } via ${ COL_CYAN } Pi-hole ${ COL_NC } ( ${ pihole_address } ) "
2017-05-24 20:29:31 +00:00
else
2017-05-27 03:05:50 +00:00
# Othewise, show a failure
2017-06-09 20:42:54 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Failed to resolve ${ COL_NC } ${ random_url } via ${ COL_LIGHT_RED } Pi-hole ${ COL_NC } ( ${ pihole_address } ) "
2017-05-24 20:29:31 +00:00
fi
2017-05-27 03:05:50 +00:00
# Finally, we need to make sure legitimate queries can out to the Internet using an external, public DNS server
# We are using the static remote_url here instead of a random one because we know it works with IPv4 and IPv6
2017-05-26 20:17:26 +00:00
if remote_dig = $( dig +tries= 1 +time= 2 -" ${ protocol } " " ${ remote_url } " @${ remote_address } +short " ${ record_type } " | head -n1) ; then
2017-05-25 03:07:15 +00:00
# If successful, the real IP of the domain will be returned instead of Pi-hole's IP
2017-06-09 20:42:54 +00:00
log_write " ${ TICK } ${ remote_url } ${ COL_LIGHT_GREEN } is ${ remote_dig } ${ COL_NC } via ${ COL_CYAN } a remote, public DNS server ${ COL_NC } ( ${ remote_address } ) "
2017-05-24 20:29:31 +00:00
else
2017-05-27 03:05:50 +00:00
# Otherwise, show an error
2017-06-09 20:42:54 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } Failed to resolve ${ COL_NC } ${ remote_url } via ${ COL_LIGHT_RED } a remote, public DNS server ${ COL_NC } ( ${ remote_address } ) "
2017-05-24 20:29:31 +00:00
fi
}
2017-05-24 03:57:22 +00:00
process_status( ) {
2017-05-25 03:07:15 +00:00
# Check to make sure Pi-hole's services are running and active
2017-05-24 03:57:22 +00:00
echo_current_diagnostic "Pi-hole processes"
2017-05-27 03:05:50 +00:00
# Local iterator
2017-05-24 03:57:22 +00:00
local i
2017-05-25 03:07:15 +00:00
# For each process,
2017-06-10 23:37:28 +00:00
for i in " ${ PIHOLE_PROCESSES [@] } " ; do
2017-06-10 20:54:27 +00:00
# get its status via systemctl
2017-05-24 03:57:22 +00:00
local status_of_process = $( systemctl is-active " ${ i } " )
2017-05-25 03:07:15 +00:00
# and print it out to the user
2017-05-26 19:26:02 +00:00
if [ [ " ${ status_of_process } " = = "active" ] ] ; then
2017-05-27 03:05:50 +00:00
# If it's active, show it in green
2017-05-27 05:04:42 +00:00
log_write " ${ TICK } ${ COL_LIGHT_GREEN } ${ i } ${ COL_NC } daemon is ${ COL_LIGHT_GREEN } ${ status_of_process } ${ COL_NC } "
2017-05-26 19:26:02 +00:00
else
2017-05-27 03:05:50 +00:00
# If it's not, show it in red
2017-05-27 05:04:42 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } ${ i } ${ COL_NC } daemon is ${ COL_LIGHT_RED } ${ status_of_process } ${ COL_NC } "
2017-05-26 19:26:02 +00:00
fi
2017-05-24 03:57:22 +00:00
done
}
2017-05-27 05:04:42 +00:00
make_array_from_file( ) {
local filename = " ${ 1 } "
2017-06-11 02:57:17 +00:00
# The second argument can put a limit on how many line should be read from the file
# Since some of the files are so large, this is helpful to limit the output
local limit = ${ 2 }
# A local iterator for testing if we are at the limit above
local i = 0
2017-06-10 20:54:27 +00:00
# Set the array to be empty so we can start fresh when the function is used
2017-06-07 17:25:00 +00:00
local file_content = ( )
2017-05-27 18:44:33 +00:00
# If the file is a directory
2017-05-27 05:04:42 +00:00
if [ [ -d " ${ filename } " ] ] ; then
2017-05-27 18:44:33 +00:00
# do nothing since it cannot be parsed
2017-05-27 05:04:42 +00:00
:
else
2017-05-27 18:44:33 +00:00
# Otherwise, read the file line by line
2017-05-27 05:04:42 +00:00
while IFS = read -r line; do
2017-06-11 02:57:17 +00:00
# Othwerise, strip out comments and blank lines
2017-05-27 18:44:33 +00:00
new_line = $( echo " ${ line } " | sed -e 's/#.*$//' -e '/^$/d' )
2017-06-07 17:25:00 +00:00
# If the line still has content (a non-zero value)
2017-05-27 18:44:33 +00:00
if [ [ -n " ${ new_line } " ] ] ; then
# Put it into the array
file_content += ( " ${ new_line } " )
else
# Otherwise, it's a blank line or comment, so do nothing
:
fi
2017-06-11 02:57:17 +00:00
# Increment the iterator +1
i = $(( i+1))
# but if the limit of lines we want to see is exceeded
if [ [ -z ${ limit } ] ] ; then
# do nothing
:
elif [ [ $i -eq ${ limit } ] ] ; then
break
fi
2017-05-27 05:04:42 +00:00
done < " ${ filename } "
2017-06-11 02:57:17 +00:00
# Now the we have made an array of the file's content
for each_line in " ${ file_content [@] } " ; do
# Print each line
# At some point, we may want to check the file line-by-line, so that's the reason for an array
log_write " ${ each_line } "
done
fi
2017-05-27 05:04:42 +00:00
}
2017-05-20 06:34:13 +00:00
parse_file( ) {
2017-05-27 03:05:50 +00:00
# Set the first argument passed to this function as a named variable for better readability
2017-05-20 06:34:13 +00:00
local filename = " ${ 1 } "
2017-05-22 04:25:53 +00:00
# Put the current Internal Field Separator into another variable so it can be restored later
2017-05-20 06:34:13 +00:00
OLD_IFS = " $IFS "
2017-05-22 04:25:53 +00:00
# Get the lines that are in the file(s) and store them in an array for parsing later
2017-05-20 06:34:13 +00:00
IFS = $'\r\n' command eval 'file_info=( $(cat "${filename}") )'
2017-03-04 19:34:34 +00:00
2017-05-22 04:25:53 +00:00
# Set a named variable for better readability
2017-05-20 06:34:13 +00:00
local file_lines
2017-05-27 03:05:50 +00:00
# For each line in the file,
2017-05-20 06:34:13 +00:00
for file_lines in " ${ file_info [@] } " ; do
2017-06-01 06:15:11 +00:00
if [ [ ! -z " ${ file_lines } " ] ] ; then
# don't include the Web password hash
[ [ " ${ file_linesline } " = ~ ^\# .*$ || ! " ${ file_lines } " || " ${ file_lines } " = = "WEBPASSWORD=" * ] ] && continue
# otherwise, display the lines of the file
2017-05-27 18:44:33 +00:00
log_write " ${ file_lines } "
2017-06-01 06:15:11 +00:00
fi
2017-05-20 06:34:13 +00:00
done
# Set the IFS back to what it was
IFS = " $OLD_IFS "
2017-03-04 23:06:34 +00:00
}
2017-05-24 20:29:31 +00:00
check_name_resolution( ) {
# Check name resoltion from localhost, Pi-hole's IP, and Google's name severs
2017-05-25 03:07:15 +00:00
# using the function we created earlier
2017-05-24 20:29:31 +00:00
dig_at 4 " ${ IPV4_ADDRESS %/* } "
2017-05-27 03:05:50 +00:00
# If IPv6 enabled,
2017-05-24 20:29:31 +00:00
if [ [ " ${ IPV6_ADDRESS } " ] ] ; then
2017-05-27 03:05:50 +00:00
# check resolution
2017-05-24 20:29:31 +00:00
dig_at 6 " ${ IPV6_ADDRESS %/* } "
fi
}
2017-05-22 04:25:53 +00:00
# This function can check a directory exists
# Pi-hole has files in several places, so we will reuse this function
2017-05-20 06:34:13 +00:00
dir_check( ) {
2017-05-22 04:25:53 +00:00
# Set the first argument passed to tihs function as a named variable for better readability
2017-05-20 06:34:13 +00:00
local directory = " ${ 1 } "
2017-05-22 04:25:53 +00:00
# Display the current test that is running
2017-06-09 20:42:54 +00:00
echo_current_diagnostic " contents of ${ COL_CYAN } ${ directory } ${ COL_NC } "
2017-05-22 04:25:53 +00:00
# For each file in the directory,
2017-06-27 03:03:45 +00:00
for filename in ${ directory } ; do
2017-05-22 04:25:53 +00:00
# check if exists first; if it does,
2017-06-27 03:03:45 +00:00
if ls " ${ filename } " 1> /dev/null 2>& 1; then
# do nothing
:
else
# Otherwise, show an error
log_write " ${ COL_LIGHT_RED } ${ directory } does not exist. ${ COL_NC } "
fi
2017-05-20 06:34:13 +00:00
done
2016-04-11 23:35:44 +00:00
}
2017-05-20 06:34:13 +00:00
list_files_in_dir( ) {
2017-05-22 04:25:53 +00:00
# Set the first argument passed to tihs function as a named variable for better readability
2017-05-20 06:34:13 +00:00
local dir_to_parse = " ${ 1 } "
2017-05-22 04:25:53 +00:00
# Store the files found in an array
2017-06-07 17:25:00 +00:00
local files_found = ( $( ls " ${ dir_to_parse } " ) )
# For each file in the array,
2017-05-20 06:34:13 +00:00
for each_file in " ${ files_found [@] } " ; do
2017-06-11 02:57:17 +00:00
if [ [ -d " ${ dir_to_parse } / ${ each_file } " ] ] ; then
2017-06-07 17:25:00 +00:00
# If it's a directoy, do nothing
2017-05-27 05:04:42 +00:00
:
2017-06-10 23:37:28 +00:00
elif [ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_BLOCKLIST_FILE } " ] ] || \
[ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_DEBUG_LOG } " ] ] || \
[ [ ${ dir_to_parse } /${ each_file } = = ${ PIHOLE_RAW_BLOCKLIST_FILES } ] ] || \
[ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_INSTALL_LOG_FILE } " ] ] || \
[ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_SETUP_VARS_FILE } " ] ] || \
[ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_LOG } " ] ] || \
[ [ " ${ dir_to_parse } / ${ each_file } " = = " ${ PIHOLE_WEB_SERVER_ACCESS_LOG_FILE } " ] ] || \
[ [ ${ dir_to_parse } /${ each_file } = = ${ PIHOLE_LOG_GZIPS } ] ] ; then
:
2017-05-27 05:04:42 +00:00
else
2017-06-07 17:25:00 +00:00
# Then, parse the file's content into an array so each line can be analyzed if need be
2017-06-10 17:27:05 +00:00
for i in " ${ !REQUIRED_FILES[@] } " ; do
if [ [ " ${ dir_to_parse } / ${ each_file } " = = ${ REQUIRED_FILES [ $i ] } ] ] ; then
# display the filename
log_write " \n ${ COL_LIGHT_GREEN } $( ls -ld ${ dir_to_parse } /${ each_file } ) ${ COL_NC } "
2017-06-11 02:57:17 +00:00
# Check if the file we want to view has a limit (because sometimes we just need a little bit of info from the file, not the entire thing)
case " ${ dir_to_parse } / ${ each_file } " in
# If it's Web server error log, just give the first 25 lines
" ${ PIHOLE_WEB_SERVER_ERROR_LOG_FILE } " ) make_array_from_file " ${ dir_to_parse } / ${ each_file } " 25
; ;
# Same for the FTL log
" ${ PIHOLE_FTL_LOG } " ) make_array_from_file " ${ dir_to_parse } / ${ each_file } " 25
; ;
# parse the file into an array in case we ever need to analyze it line-by-line
*) make_array_from_file " ${ dir_to_parse } / ${ each_file } " ;
esac
2017-06-10 17:27:05 +00:00
else
2017-06-10 20:54:27 +00:00
# Otherwise, do nothing since it's not a file needed for Pi-hole so we don't care about it
2017-06-10 17:27:05 +00:00
:
fi
done
2017-05-27 05:04:42 +00:00
fi
2016-11-18 21:27:06 +00:00
done
2016-03-20 01:32:11 +00:00
}
2017-06-07 17:25:00 +00:00
show_content_of_files_in_dir( ) {
2017-05-22 04:25:53 +00:00
# Set a local variable for better readability
2017-06-07 17:25:00 +00:00
local directory = " ${ 1 } "
2017-05-24 20:29:31 +00:00
# Check if the directory exists
dir_check " ${ directory } "
# if it does, list the files in it
list_files_in_dir " ${ directory } "
}
2017-06-07 17:25:00 +00:00
show_content_of_pihole_files( ) {
2017-06-10 20:54:27 +00:00
# Show the content of the files in each of Pi-hole's folders
2017-06-10 23:37:28 +00:00
show_content_of_files_in_dir " ${ PIHOLE_DIRECTORY } "
2017-06-10 04:22:37 +00:00
show_content_of_files_in_dir " ${ DNSMASQ_D_DIRECTORY } "
2017-06-10 20:54:27 +00:00
show_content_of_files_in_dir " ${ WEB_SERVER_CONFIG_DIRECTORY } "
show_content_of_files_in_dir " ${ CRON_D_DIRECTORY } "
2017-06-10 23:37:28 +00:00
show_content_of_files_in_dir " ${ WEB_SERVER_LOG_DIRECTORY } "
2017-06-11 03:18:33 +00:00
show_content_of_files_in_dir " ${ LOG_DIRECTORY } "
2017-05-24 20:29:31 +00:00
}
2017-05-24 23:31:55 +00:00
analyze_gravity_list( ) {
2017-05-27 03:05:50 +00:00
echo_current_diagnostic "Gravity list"
2017-06-11 04:20:27 +00:00
local head_line
local tail_line
# Put the current Internal Field Separator into another variable so it can be restored later
OLD_IFS = " $IFS "
# Get the lines that are in the file(s) and store them in an array for parsing later
IFS = $'\r\n'
local gravity_permissions = $( ls -ld " ${ PIHOLE_BLOCKLIST_FILE } " )
log_write " ${ COL_LIGHT_GREEN } ${ gravity_permissions } ${ COL_NC } "
local gravity_head = ( )
gravity_head = ( $( head -n 4 ${ PIHOLE_BLOCKLIST_FILE } ) )
log_write " ${ COL_CYAN } -----head of $( basename ${ PIHOLE_BLOCKLIST_FILE } ) ------ ${ COL_NC } "
for head_line in " ${ gravity_head [@] } " ; do
log_write " ${ head_line } "
done
log_write ""
local gravity_tail = ( )
gravity_tail = ( $( tail -n 4 ${ PIHOLE_BLOCKLIST_FILE } ) )
log_write " ${ COL_CYAN } -----tail of $( basename ${ PIHOLE_BLOCKLIST_FILE } ) ------ ${ COL_NC } "
for tail_line in " ${ gravity_tail [@] } " ; do
log_write " ${ tail_line } "
done
# Set the IFS back to what it was
IFS = " $OLD_IFS "
}
analyze_pihole_log( ) {
echo_current_diagnostic "Pi-hole log"
local head_line
# Put the current Internal Field Separator into another variable so it can be restored later
OLD_IFS = " $IFS "
# Get the lines that are in the file(s) and store them in an array for parsing later
IFS = $'\r\n'
local pihole_log_permissions = $( ls -ld " ${ PIHOLE_LOG } " )
log_write " ${ COL_LIGHT_GREEN } ${ pihole_log_permissions } ${ COL_NC } "
local pihole_log_head = ( )
pihole_log_head = ( $( head -n 20 ${ PIHOLE_LOG } ) )
log_write " ${ COL_CYAN } -----head of $( basename ${ PIHOLE_LOG } ) ------ ${ COL_NC } "
for head_line in " ${ pihole_log_head [@] } " ; do
log_write " ${ head_line } "
done
log_write ""
# Set the IFS back to what it was
IFS = " $OLD_IFS "
2017-05-24 23:31:55 +00:00
}
2017-05-27 03:05:50 +00:00
tricorder_use_nc_or_ssl( ) {
# Users can submit their debug logs using nc (unencrypted) or openssl (enrypted) if available
# Check for openssl first since encryption is a good thing
2017-05-25 02:11:15 +00:00
if command -v openssl & > /dev/null; then
2017-05-27 03:05:50 +00:00
# If the command exists,
2017-05-27 05:04:42 +00:00
log_write " * Using ${ COL_LIGHT_GREEN } openssl ${ COL_NC } for transmission. "
2017-05-27 03:05:50 +00:00
# encrypt and transmit the log and store the token returned in a variable
2017-06-29 16:49:28 +00:00
tricorder_token = $( < ${ PIHOLE_DEBUG_LOG_SANITIZED } openssl s_client -quiet -connect tricorder.pi-hole.net:${ TRICORDER_SSL_PORT_NUMBER } 2> /dev/null)
2017-05-25 03:07:15 +00:00
# Otherwise,
2017-05-25 02:11:15 +00:00
else
2017-05-25 03:07:15 +00:00
# use net cat
2017-05-27 05:04:42 +00:00
log_write " ${ INFO } Using ${ COL_YELLOW } netcat ${ COL_NC } for transmission. "
2017-06-10 20:54:27 +00:00
# Save the token returned by our server in a variable
2017-06-29 16:49:28 +00:00
tricorder_token = $( < ${ PIHOLE_DEBUG_LOG_SANITIZED } nc tricorder.pi-hole.net ${ TRICORDER_NC_PORT_NUMBER } )
2017-05-25 02:11:15 +00:00
fi
}
2017-05-24 23:31:55 +00:00
2017-05-25 02:11:15 +00:00
upload_to_tricorder( ) {
2017-06-10 20:54:27 +00:00
local username = "pihole"
2017-05-24 23:31:55 +00:00
# Set the permissions and owner
2017-06-10 04:22:37 +00:00
chmod 644 ${ PIHOLE_DEBUG_LOG }
2017-06-10 20:54:27 +00:00
chown " $USER " :" ${ username } " ${ PIHOLE_DEBUG_LOG }
2017-05-24 23:31:55 +00:00
2017-06-10 20:54:27 +00:00
# Let the user know debugging is complete with something strikingly visual
2017-06-10 20:28:04 +00:00
log_write ""
log_write " ${ COL_LIGHT_PURPLE } ******************************************** ${ COL_NC } "
log_write " ${ COL_LIGHT_PURPLE } ******************************************** ${ COL_NC } "
log_write " ${ TICK } ${ COL_LIGHT_GREEN } ** FINISHED DEBUGGING! ** ${ COL_NC } \n "
2017-05-25 02:11:15 +00:00
2017-05-25 03:07:15 +00:00
# Provide information on what they should do with their token
2017-05-27 05:04:42 +00:00
log_write " * The debug log can be uploaded to tricorder.pi-hole.net for sharing with developers only."
2017-06-10 04:22:37 +00:00
log_write " * For more information, see: ${ TRICORDER_CONTEST } "
2017-05-27 05:04:42 +00:00
log_write " * If available, we'll use openssl to upload the log, otherwise it will fall back to netcat."
2017-05-25 03:07:15 +00:00
# If pihole -d is running automatically (usually throught the dashboard)
2017-05-24 23:31:55 +00:00
if [ [ " ${ AUTOMATED } " ] ] ; then
2017-05-25 03:07:15 +00:00
# let the user know
2017-05-27 05:04:42 +00:00
log_write " ${ INFO } Debug script running in automated mode "
2017-05-25 03:07:15 +00:00
# and then decide again which tool to use to submit it
2017-05-25 02:11:15 +00:00
if command -v openssl & > /dev/null; then
2017-06-10 20:54:27 +00:00
# If openssl is available, use it
2017-05-27 05:04:42 +00:00
log_write " ${ INFO } Using ${ COL_LIGHT_GREEN } openssl ${ COL_NC } for transmission. "
2017-06-10 20:54:27 +00:00
# Save the token returned by our server in a variable
2017-06-10 04:22:37 +00:00
tricorder_token = $( openssl s_client -quiet -connect tricorder.pi-hole.net:${ TRICORDER_SSL_PORT_NUMBER } 2> /dev/null < /dev/stdin)
2017-05-25 02:11:15 +00:00
else
2017-06-10 20:54:27 +00:00
# Otherwise, fallback to netcat
2017-05-27 05:04:42 +00:00
log_write " ${ INFO } Using ${ COL_YELLOW } netcat ${ COL_NC } for transmission. "
2017-06-10 20:54:27 +00:00
# Save the token returned by our server in a variable
2017-06-10 04:22:37 +00:00
tricorder_token = $( nc tricorder.pi-hole.net ${ TRICORDER_NC_PORT_NUMBER } < /dev/stdin)
2017-05-25 02:11:15 +00:00
fi
2017-06-10 20:54:27 +00:00
# If we're not running in automated mode,
2017-05-24 23:31:55 +00:00
else
2017-05-25 02:11:15 +00:00
echo ""
2017-06-10 20:54:27 +00:00
# give the user a choice of uploading it or not
# Users can review the log file locally (or the output of the script since they are the same) and try to self-diagnose their problem
2017-05-25 02:11:15 +00:00
read -r -p "[?] Would you like to upload the log? [y/N] " response
2017-05-24 23:31:55 +00:00
case ${ response } in
2017-05-25 03:07:15 +00:00
# If they say yes, run our function for uploading the log
2017-05-27 03:05:50 +00:00
[ yY] [ eE] [ sS] | [ yY] ) tricorder_use_nc_or_ssl; ;
2017-05-25 03:07:15 +00:00
# If they choose no, just exit out of the script
2017-05-27 05:04:42 +00:00
*) log_write " * Log will ${ COL_LIGHT_GREEN } NOT ${ COL_NC } be uploaded to tricorder. " ; exit;
2017-05-24 23:31:55 +00:00
esac
fi
2017-05-25 03:07:15 +00:00
# Check if tricorder.pi-hole.net is reachable and provide token
# along with some additional useful information
2017-05-27 03:05:50 +00:00
if [ [ -n " ${ tricorder_token } " ] ] ; then
2017-06-10 20:54:27 +00:00
# Again, try to make this visually striking so the user realizes they need to do something with this information
# Namely, provide the Pi-hole devs with the token
2017-06-10 20:28:04 +00:00
log_write ""
log_write " ${ COL_LIGHT_PURPLE } *********************************** ${ COL_NC } "
2017-05-26 17:16:22 +00:00
log_write " ${ COL_LIGHT_PURPLE } *********************************** ${ COL_NC } "
2017-05-27 03:05:50 +00:00
log_write " ${ TICK } Your debug token is: ${ COL_LIGHT_GREEN } ${ tricorder_token } ${ COL_NC } "
2017-05-26 17:16:22 +00:00
log_write " ${ COL_LIGHT_PURPLE } *********************************** ${ COL_NC } "
2017-06-10 20:28:04 +00:00
log_write " ${ COL_LIGHT_PURPLE } *********************************** ${ COL_NC } "
2017-06-01 06:15:11 +00:00
log_write ""
2017-06-10 20:28:04 +00:00
log_write " * Provide the token above to the Pi-hole team for assistance at"
2017-06-10 04:22:37 +00:00
log_write " * ${ FORUMS_URL } "
2017-06-01 06:15:11 +00:00
log_write " * Your log will self-destruct on our server after ${ COL_LIGHT_RED } 48 hours ${ COL_NC } . "
2017-06-10 20:54:27 +00:00
# If no token was generated
else
# Show an error and some help instructions
2017-05-27 05:04:42 +00:00
log_write " ${ CROSS } ${ COL_LIGHT_RED } There was an error uploading your debug log. ${ COL_NC } "
2017-05-27 18:44:33 +00:00
log_write " * Please try again or contact the Pi-hole team for assistance."
2017-05-24 23:31:55 +00:00
fi
2017-06-10 20:54:27 +00:00
# Finally, show where the log file is no matter the outcome of the function so users can look at it
2017-06-29 16:17:19 +00:00
log_write " * A local copy of the debug log can be found at: ${ COL_CYAN } ${ PIHOLE_DEBUG_LOG_SANITIZED } ${ COL_NC } \n "
2017-05-24 20:29:31 +00:00
}
2017-05-25 03:07:15 +00:00
# Run through all the functions we made
2017-05-26 17:16:22 +00:00
make_temporary_log
2017-06-29 16:37:58 +00:00
initiate_debug
2017-05-28 02:21:18 +00:00
# setupVars.conf needs to be sourced before the networking so the values are
# available to the other functions
source_setup_variables
2017-05-27 20:47:15 +00:00
check_component_versions
check_critical_program_versions
2017-05-20 06:34:13 +00:00
diagnose_operating_system
2017-05-22 13:48:56 +00:00
processor_check
2017-05-22 17:35:57 +00:00
check_networking
2017-05-24 20:29:31 +00:00
check_name_resolution
2017-05-24 03:57:22 +00:00
process_status
2017-05-28 02:21:18 +00:00
parse_setup_vars
2017-05-24 21:10:14 +00:00
check_x_headers
2017-05-27 03:05:50 +00:00
analyze_gravity_list
2017-06-07 17:25:00 +00:00
show_content_of_pihole_files
2017-06-11 04:20:27 +00:00
analyze_pihole_log
2017-05-26 17:16:22 +00:00
copy_to_debug_log
2017-05-25 02:11:15 +00:00
upload_to_tricorder