1
0
mirror of https://github.com/pi-hole/pi-hole synced 2024-12-22 06:48:07 +00:00

Rename some of the variables to hopefully make the process a little clearer

Signed-off-by: Adam Warner <me@adamwarner.co.uk>
This commit is contained in:
Adam Warner 2023-01-21 23:47:19 +00:00
parent 79f4a7cef0
commit 9939cf1d77
No known key found for this signature in database

View File

@ -244,7 +244,7 @@ database_adlist_number() {
return; return;
fi fi
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_source_lines}" "${num_unusable}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${num_domains}" "${num_non_domains}" "${1}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?" status="$?"
if [[ "${status}" -ne 0 ]]; then if [[ "${status}" -ne 0 ]]; then
@ -519,12 +519,12 @@ gravity_DownloadBlocklists() {
gravity_Blackbody=true gravity_Blackbody=true
} }
# num_target_lines does increase for every correctly added domain in pareseList() # num_total_imported_domains increases for each list processed
num_target_lines=0 num_total_imported_domains=0
num_source_lines=0 num_domains=0
num_unusable=0 num_non_domains=0
parseList() { parseList() {
local adlistID="${1}" src="${2}" target="${3}" unusable_lines sample_unusable_lines tmp_unusuable_lines_str false_positive local adlistID="${1}" src="${2}" target="${3}" non_domains sample_non_domains tmp_non_domains_str false_positive
# This sed does the following things: # This sed does the following things:
# 1. Remove all lines containing no domains # 1. Remove all lines containing no domains
# 2. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_) # 2. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_)
@ -535,9 +535,9 @@ parseList() {
# Find lines containing no domains or with invalid characters (see above) # Find lines containing no domains or with invalid characters (see above)
# Remove duplicates from the list # Remove duplicates from the list
mapfile -t unusable_lines <<< "$(sed -r "/([^\.]+\.)+[^\.]{2,}/d" < "${src}")" mapfile -t non_domains <<< "$(sed -r "/([^\.]+\.)+[^\.]{2,}/d" < "${src}")"
mapfile -t -O "${#unusable_lines[@]}" unusable_lines <<< "$(sed -r "/[^a-zA-Z0-9.\_-]/!d" < "${src}")" mapfile -t -O "${#non_domains[@]}" non_domains <<< "$(sed -r "/[^a-zA-Z0-9.\_-]/!d" < "${src}")"
IFS=" " read -r -a unusable_lines <<< "$(tr ' ' '\n' <<< "${unusable_lines[@]}" | sort -u | tr '\n' ' ')" IFS=" " read -r -a non_domains <<< "$(tr ' ' '\n' <<< "${non_domains[@]}" | sort -u | tr '\n' ' ')"
# A list of items of common local hostnames not to report as unusable # A list of items of common local hostnames not to report as unusable
# Some lists (i.e StevenBlack's) contain these as they are supposed to be used as HOST files # Some lists (i.e StevenBlack's) contain these as they are supposed to be used as HOST files
@ -559,37 +559,37 @@ parseList() {
) )
# Read the unusable lines into a string # Read the unusable lines into a string
tmp_unusuable_lines_str=" ${unusable_lines[*]} " tmp_non_domains_str=" ${non_domains[*]} "
for false_positive in "${false_positives[@]}"; do for false_positive in "${false_positives[@]}"; do
# Remove false positives from tmp_unusuable_lines_str # Remove false positives from tmp_non_domains_str
tmp_unusuable_lines_str="${tmp_unusuable_lines_str/ ${false_positive} / }" tmp_non_domains_str="${tmp_non_domains_str/ ${false_positive} / }"
done done
# Read the string back into an array # Read the string back into an array
IFS=" " read -r -a unusable_lines <<< "${tmp_unusuable_lines_str}" IFS=" " read -r -a non_domains <<< "${tmp_non_domains_str}"
# Get a sample of the incorrect lines, limited to 5 (the list should already have been de-duplicated) # Get a sample of non-domain entries, limited to 5 (the list should already have been de-duplicated)
IFS=" " read -r -a sample_unusable_lines <<< "$(tr ' ' '\n' <<< "${unusable_lines[@]}" | head -n 5 | tr '\n' ' ')" IFS=" " read -r -a sample_non_domains <<< "$(tr ' ' '\n' <<< "${non_domains[@]}" | head -n 5 | tr '\n' ' ')"
local num_target_lines_new num_correct_lines local tmp_new_imported_total
# Get number of lines in source file # Get the new number of domains in destination file
num_source_lines="$(grep -c "^" "${src}")" tmp_new_imported_total="$(grep -c "^" "${target}")"
# Get the new number of lines in destination file # Number of imported lines for this file is the difference between the new total and the old total. (Or, the number of domains we just added.)
num_target_lines_new="$(grep -c "^" "${target}")" num_domains="$(( tmp_new_imported_total-num_total_imported_domains ))"
# Number of new correctly added lines # Replace the running total with the new total.
num_correct_lines="$(( num_target_lines_new-num_target_lines ))" num_total_imported_domains="$tmp_new_imported_total"
# Update number of lines in target file # Get the number of non_domains (this is the number of entries left after stripping the source of comments/duplicates/false positives/domains)
num_target_lines="$num_target_lines_new" num_non_domains="${#non_domains[@]}"
num_unusable="${#unusable_lines[@]}"
if [[ "${num_unusable}" -ne 0 ]]; then # If there are unusable lines, we display some information about them. This is not error or major cause for concern.
echo " ${INFO} Imported ${num_correct_lines} domains, ignoring ${num_unusable} non-domain entries" if [[ "${num_non_domains}" -ne 0 ]]; then
echo " ${INFO} Imported ${num_domains} domains, ignoring ${num_non_domains} non-domain entries"
echo " Sample of non-domain entries:" echo " Sample of non-domain entries:"
for each in "${sample_unusable_lines[@]}" for each in "${sample_non_domains[@]}"
do do
echo " - ${each}" echo " - ${each}"
done done
else else
echo " ${INFO} Imported ${num_correct_lines} domains" echo " ${INFO} Imported ${num_domains} domains"
fi fi
} }
@ -745,8 +745,8 @@ gravity_DownloadBlocklistFromUrl() {
else else
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
# Manually reset these two numbers because we do not call parseList here # Manually reset these two numbers because we do not call parseList here
num_source_lines=0 num_domains=0
num_unusable=0 num_non_domains=0
database_adlist_number "${adlistID}" database_adlist_number "${adlistID}"
database_adlist_status "${adlistID}" "4" database_adlist_status "${adlistID}" "4"
fi fi