mirror of
https://github.com/pi-hole/pi-hole
synced 2025-02-08 06:02:37 +00:00
Improve gravity performance (#3100)
* Gravity performance improvements. Signed-off-by: DL6ER <dl6er@dl6er.de> * Do not move downloaded lists into migration_backup directory. Signed-off-by: DL6ER <dl6er@dl6er.de> * Do not (strictly) sort domains. Random-leaf access is faster than always-last-leaf access (on average). Signed-off-by: DL6ER <dl6er@dl6er.de> * Append instead of overwrite gravity_new collection list. Signed-off-by: DL6ER <dl6er@dl6er.de> * Rename table gravity_new to gravity_temp to clarify that this is only an intermediate table. Signed-off-by: DL6ER <dl6er@dl6er.de> * Add timers for each of the calls to compute intense parts. They are to be removed before this finally hits the release/v5.0 branch. Signed-off-by: DL6ER <dl6er@dl6er.de> * Fix legacy list files import. It currently doesn't work when the gravity database has already been updated to using the single domainlist table. Signed-off-by: DL6ER <dl6er@dl6er.de> * Simplify database_table_from_file(), remove all to this function for gravity lost downloads. Signed-off-by: DL6ER <dl6er@dl6er.de> * Update gravity.db.sql to version 10 to have newle created databases already reflect the most recent state. Signed-off-by: DL6ER <dl6er@dl6er.de> * Create second gravity database and swap them on success. This has a number of advantages such as instantaneous gravity updates (as seen from FTL) and always available gravity blocking. Furthermore, this saves disk space as the old database is removed on completion. * Add timing output for the database swapping SQLite3 call. Signed-off-by: DL6ER <dl6er@dl6er.de> * Explicitly generate index as a separate process. Signed-off-by: DL6ER <dl6er@dl6er.de> * Remove time measurements. Signed-off-by: DL6ER <dl6er@dl6er.de>
This commit is contained in:
parent
52e2a2610e
commit
10c2dad48a
@ -1,16 +1,21 @@
|
|||||||
PRAGMA FOREIGN_KEYS=ON;
|
PRAGMA foreign_keys=OFF;
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
CREATE TABLE "group"
|
CREATE TABLE "group"
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||||
name TEXT NOT NULL,
|
name TEXT UNIQUE NOT NULL,
|
||||||
|
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||||
|
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||||
description TEXT
|
description TEXT
|
||||||
);
|
);
|
||||||
|
INSERT INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||||
|
|
||||||
CREATE TABLE whitelist
|
CREATE TABLE domainlist
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
type INTEGER NOT NULL DEFAULT 0,
|
||||||
domain TEXT UNIQUE NOT NULL,
|
domain TEXT UNIQUE NOT NULL,
|
||||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||||
@ -18,47 +23,6 @@ CREATE TABLE whitelist
|
|||||||
comment TEXT
|
comment TEXT
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE whitelist_by_group
|
|
||||||
(
|
|
||||||
whitelist_id INTEGER NOT NULL REFERENCES whitelist (id),
|
|
||||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
|
||||||
PRIMARY KEY (whitelist_id, group_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE blacklist
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT UNIQUE NOT NULL,
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
|
||||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
|
||||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
|
||||||
comment TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE blacklist_by_group
|
|
||||||
(
|
|
||||||
blacklist_id INTEGER NOT NULL REFERENCES blacklist (id),
|
|
||||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
|
||||||
PRIMARY KEY (blacklist_id, group_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE regex
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
domain TEXT UNIQUE NOT NULL,
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
|
||||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
|
||||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
|
||||||
comment TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE regex_by_group
|
|
||||||
(
|
|
||||||
regex_id INTEGER NOT NULL REFERENCES regex (id),
|
|
||||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
|
||||||
PRIMARY KEY (regex_id, group_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE adlist
|
CREATE TABLE adlist
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
@ -78,7 +42,8 @@ CREATE TABLE adlist_by_group
|
|||||||
|
|
||||||
CREATE TABLE gravity
|
CREATE TABLE gravity
|
||||||
(
|
(
|
||||||
domain TEXT PRIMARY KEY
|
domain TEXT NOT NULL,
|
||||||
|
adlist_id INTEGER NOT NULL REFERENCES adlist (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE info
|
CREATE TABLE info
|
||||||
@ -87,56 +52,129 @@ CREATE TABLE info
|
|||||||
value TEXT NOT NULL
|
value TEXT NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
INSERT INTO info VALUES("version","1");
|
INSERT INTO "info" VALUES('version','10');
|
||||||
|
|
||||||
CREATE VIEW vw_whitelist AS SELECT DISTINCT domain
|
CREATE TABLE domain_audit
|
||||||
FROM whitelist
|
(
|
||||||
LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id
|
domain TEXT UNIQUE NOT NULL,
|
||||||
WHERE whitelist.enabled = 1 AND (whitelist_by_group.group_id IS NULL OR "group".enabled = 1)
|
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int))
|
||||||
ORDER BY whitelist.id;
|
);
|
||||||
|
|
||||||
CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist
|
CREATE TABLE domainlist_by_group
|
||||||
BEGIN
|
(
|
||||||
UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
domainlist_id INTEGER NOT NULL REFERENCES domainlist (id),
|
||||||
END;
|
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||||
|
PRIMARY KEY (domainlist_id, group_id)
|
||||||
|
);
|
||||||
|
|
||||||
CREATE VIEW vw_blacklist AS SELECT DISTINCT domain
|
CREATE TABLE client
|
||||||
FROM blacklist
|
(
|
||||||
LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id
|
ip TEXT NOL NULL UNIQUE
|
||||||
WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1)
|
);
|
||||||
ORDER BY blacklist.id;
|
|
||||||
|
|
||||||
CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist
|
CREATE TABLE client_by_group
|
||||||
BEGIN
|
(
|
||||||
UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
client_id INTEGER NOT NULL REFERENCES client (id),
|
||||||
END;
|
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||||
|
PRIMARY KEY (client_id, group_id)
|
||||||
CREATE VIEW vw_regex AS SELECT DISTINCT domain
|
);
|
||||||
FROM regex
|
|
||||||
LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id
|
|
||||||
LEFT JOIN "group" ON "group".id = regex_by_group.group_id
|
|
||||||
WHERE regex.enabled = 1 AND (regex_by_group.group_id IS NULL OR "group".enabled = 1)
|
|
||||||
ORDER BY regex.id;
|
|
||||||
|
|
||||||
CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex
|
|
||||||
BEGIN
|
|
||||||
UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE VIEW vw_adlist AS SELECT DISTINCT address
|
|
||||||
FROM adlist
|
|
||||||
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
|
|
||||||
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
|
||||||
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
|
||||||
ORDER BY adlist.id;
|
|
||||||
|
|
||||||
CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist
|
CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist
|
||||||
BEGIN
|
BEGIN
|
||||||
UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address;
|
UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
CREATE VIEW vw_gravity AS SELECT domain
|
CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist
|
||||||
|
BEGIN
|
||||||
|
UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||||
|
FROM domainlist
|
||||||
|
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||||
|
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||||
|
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||||
|
AND domainlist.type = 0
|
||||||
|
ORDER BY domainlist.id;
|
||||||
|
|
||||||
|
CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||||
|
FROM domainlist
|
||||||
|
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||||
|
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||||
|
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||||
|
AND domainlist.type = 1
|
||||||
|
ORDER BY domainlist.id;
|
||||||
|
|
||||||
|
CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||||
|
FROM domainlist
|
||||||
|
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||||
|
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||||
|
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||||
|
AND domainlist.type = 2
|
||||||
|
ORDER BY domainlist.id;
|
||||||
|
|
||||||
|
CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||||
|
FROM domainlist
|
||||||
|
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||||
|
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||||
|
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||||
|
AND domainlist.type = 3
|
||||||
|
ORDER BY domainlist.id;
|
||||||
|
|
||||||
|
CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id
|
||||||
FROM gravity
|
FROM gravity
|
||||||
WHERE domain NOT IN (SELECT domain from vw_whitelist);
|
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id
|
||||||
|
LEFT JOIN adlist ON adlist.id = gravity.adlist_id
|
||||||
|
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||||
|
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1);
|
||||||
|
|
||||||
|
CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id
|
||||||
|
FROM adlist
|
||||||
|
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
|
||||||
|
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||||
|
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||||
|
ORDER BY adlist.id;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_client_add AFTER INSERT ON client
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
|
||||||
|
BEGIN
|
||||||
|
UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||||
|
BEGIN
|
||||||
|
INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM adlist_by_group WHERE adlist_id = OLD.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER tr_client_delete AFTER DELETE ON client
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM client_by_group WHERE client_id = OLD.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
21
advanced/Templates/gravity_copy.sql
Normal file
21
advanced/Templates/gravity_copy.sql
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
.timeout 30000
|
||||||
|
|
||||||
|
ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD;
|
||||||
|
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group";
|
||||||
|
INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit;
|
||||||
|
|
||||||
|
INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist;
|
||||||
|
INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group;
|
||||||
|
|
||||||
|
INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist;
|
||||||
|
INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
|
||||||
|
|
||||||
|
INSERT OR REPLACE INTO info SELECT * FROM OLD.info;
|
||||||
|
|
||||||
|
INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
|
||||||
|
INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
|
||||||
|
|
||||||
|
COMMIT;
|
219
gravity.sh
219
gravity.sh
@ -36,7 +36,9 @@ VPNList="/etc/openvpn/ipp.txt"
|
|||||||
|
|
||||||
piholeGitDir="/etc/.pihole"
|
piholeGitDir="/etc/.pihole"
|
||||||
gravityDBfile="${piholeDir}/gravity.db"
|
gravityDBfile="${piholeDir}/gravity.db"
|
||||||
|
gravityTEMPfile="${piholeDir}/gravity_temp.db"
|
||||||
gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql"
|
gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql"
|
||||||
|
gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql"
|
||||||
optimize_database=false
|
optimize_database=false
|
||||||
|
|
||||||
domainsExtension="domains"
|
domainsExtension="domains"
|
||||||
@ -80,31 +82,49 @@ fi
|
|||||||
|
|
||||||
# Generate new sqlite3 file from schema template
|
# Generate new sqlite3 file from schema template
|
||||||
generate_gravity_database() {
|
generate_gravity_database() {
|
||||||
sqlite3 "${gravityDBfile}" < "${gravityDBschema}"
|
sqlite3 "${1}" < "${gravityDBschema}"
|
||||||
}
|
}
|
||||||
|
|
||||||
update_gravity_timestamp() {
|
# Copy data from old to new database file and swap them
|
||||||
# Update timestamp when the gravity table was last updated successfully
|
gravity_swap_databases() {
|
||||||
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
local str
|
||||||
|
str="Building tree"
|
||||||
|
echo -ne " ${INFO} ${str}..."
|
||||||
|
|
||||||
|
# The index is intentionally not UNIQUE as prro quality adlists may contain domains more than once
|
||||||
|
output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}"
|
echo -e "\\n ${CROSS} Unable to build gravity tree in ${gravityTEMPfile}\\n ${output}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
return 0
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
}
|
|
||||||
|
|
||||||
database_truncate_table() {
|
str="Swapping databases"
|
||||||
local table
|
echo -ne " ${INFO} ${str}..."
|
||||||
table="${1}"
|
|
||||||
|
|
||||||
output=$( { printf ".timeout 30000\\nDELETE FROM %s;" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBcopy}"; } 2>&1 )
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}"
|
echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
|
||||||
gravity_Cleanup "error"
|
return 1
|
||||||
|
fi
|
||||||
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
|
|
||||||
|
# Swap databases and remove old database
|
||||||
|
rm "${gravityDBfile}"
|
||||||
|
mv "${gravityTEMPfile}" "${gravityDBfile}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update timestamp when the gravity table was last updated successfully
|
||||||
|
update_gravity_timestamp() {
|
||||||
|
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityTEMPfile}"; } 2>&1 )
|
||||||
|
status="$?"
|
||||||
|
|
||||||
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityTEMPfile}\\n ${output}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
return 0
|
return 0
|
||||||
@ -113,73 +133,80 @@ database_truncate_table() {
|
|||||||
# Import domains from file and store them in the specified database table
|
# Import domains from file and store them in the specified database table
|
||||||
database_table_from_file() {
|
database_table_from_file() {
|
||||||
# Define locals
|
# Define locals
|
||||||
local table source backup_path backup_file arg
|
local table source backup_path backup_file tmpFile type
|
||||||
table="${1}"
|
table="${1}"
|
||||||
source="${2}"
|
source="${2}"
|
||||||
arg="${3}"
|
|
||||||
backup_path="${piholeDir}/migration_backup"
|
backup_path="${piholeDir}/migration_backup"
|
||||||
backup_file="${backup_path}/$(basename "${2}")"
|
backup_file="${backup_path}/$(basename "${2}")"
|
||||||
|
|
||||||
# Truncate table only if not gravity (we add multiple times to this table)
|
|
||||||
if [[ "${table}" != "gravity" ]]; then
|
|
||||||
database_truncate_table "${table}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
local tmpFile
|
|
||||||
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
|
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||||
|
|
||||||
local timestamp
|
local timestamp
|
||||||
timestamp="$(date --utc +'%s')"
|
timestamp="$(date --utc +'%s')"
|
||||||
local inputfile
|
|
||||||
# Apply format for white-, blacklist, regex, and adlist tables
|
|
||||||
# Read file line by line
|
|
||||||
local rowid
|
local rowid
|
||||||
declare -i rowid
|
declare -i rowid
|
||||||
rowid=1
|
rowid=1
|
||||||
|
|
||||||
if [[ "${table}" == "gravity" ]]; then
|
# Special handling for domains to be imported into the common domainlist table
|
||||||
#Append ,${arg} to every line and then remove blank lines before import
|
if [[ "${table}" == "whitelist" ]]; then
|
||||||
sed -e "s/$/,${arg}/" "${source}" > "${tmpFile}"
|
type="0"
|
||||||
sed -i '/^$/d' "${tmpFile}"
|
table="domainlist"
|
||||||
else
|
elif [[ "${table}" == "blacklist" ]]; then
|
||||||
grep -v '^ *#' < "${source}" | while IFS= read -r domain
|
type="1"
|
||||||
do
|
table="domainlist"
|
||||||
# Only add non-empty lines
|
elif [[ "${table}" == "regex" ]]; then
|
||||||
if [[ -n "${domain}" ]]; then
|
type="3"
|
||||||
if [[ "${table}" == "domain_audit" ]]; then
|
table="domainlist"
|
||||||
# domain_audit table format (no enable or modified fields)
|
|
||||||
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
|
||||||
else
|
|
||||||
# White-, black-, and regexlist format
|
|
||||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
|
||||||
fi
|
|
||||||
rowid+=1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
inputfile="${tmpFile}"
|
|
||||||
|
|
||||||
# Remove possible duplicates found in lower-quality adlists
|
# Get MAX(id) from domainlist when INSERTing into this table
|
||||||
sort -u -o "${inputfile}" "${inputfile}"
|
if [[ "${table}" == "domainlist" ]]; then
|
||||||
|
rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")"
|
||||||
|
if [[ -z "$rowid" ]]; then
|
||||||
|
rowid=0
|
||||||
|
fi
|
||||||
|
rowid+=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Loop over all domains in ${source} file
|
||||||
|
# Read file line by line
|
||||||
|
grep -v '^ *#' < "${source}" | while IFS= read -r domain
|
||||||
|
do
|
||||||
|
# Only add non-empty lines
|
||||||
|
if [[ -n "${domain}" ]]; then
|
||||||
|
if [[ "${table}" == "domain_audit" ]]; then
|
||||||
|
# domain_audit table format (no enable or modified fields)
|
||||||
|
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
||||||
|
elif [[ "${table}" == "adlist" ]]; then
|
||||||
|
# Adlist table format
|
||||||
|
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
||||||
|
else
|
||||||
|
# White-, black-, and regexlist table format
|
||||||
|
echo "${rowid},${type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
||||||
|
fi
|
||||||
|
rowid+=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Store domains in database table specified by ${table}
|
# Store domains in database table specified by ${table}
|
||||||
# Use printf as .mode and .import need to be on separate lines
|
# Use printf as .mode and .import need to be on separate lines
|
||||||
# see https://unix.stackexchange.com/a/445615/83260
|
# see https://unix.stackexchange.com/a/445615/83260
|
||||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
echo -e "\\n ${CROSS} Unable to fill table ${table} in database ${gravityDBfile}\\n ${output}"
|
echo -e "\\n ${CROSS} Unable to fill table ${table}${type} in database ${gravityDBfile}\\n ${output}"
|
||||||
gravity_Cleanup "error"
|
gravity_Cleanup "error"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Delete tmpfile
|
|
||||||
rm "${tmpFile}" > /dev/null 2>&1 || \
|
|
||||||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
|
||||||
|
|
||||||
# Move source file to backup directory, create directory if not existing
|
# Move source file to backup directory, create directory if not existing
|
||||||
mkdir -p "${backup_path}"
|
mkdir -p "${backup_path}"
|
||||||
mv "${source}" "${backup_file}" 2> /dev/null || \
|
mv "${source}" "${backup_file}" 2> /dev/null || \
|
||||||
echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}"
|
echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}"
|
||||||
|
|
||||||
|
# Delete tmpFile
|
||||||
|
rm "${tmpFile}" > /dev/null 2>&1 || \
|
||||||
|
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Migrate pre-v5.0 list files to database-based Pi-hole versions
|
# Migrate pre-v5.0 list files to database-based Pi-hole versions
|
||||||
@ -188,7 +215,10 @@ migrate_to_database() {
|
|||||||
if [ ! -e "${gravityDBfile}" ]; then
|
if [ ! -e "${gravityDBfile}" ]; then
|
||||||
# Create new database file - note that this will be created in version 1
|
# Create new database file - note that this will be created in version 1
|
||||||
echo -e " ${INFO} Creating new gravity database"
|
echo -e " ${INFO} Creating new gravity database"
|
||||||
generate_gravity_database
|
generate_gravity_database "${gravityDBfile}"
|
||||||
|
|
||||||
|
# Check if gravity database needs to be updated
|
||||||
|
upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"
|
||||||
|
|
||||||
# Migrate list files to new database
|
# Migrate list files to new database
|
||||||
if [ -e "${adListFile}" ]; then
|
if [ -e "${adListFile}" ]; then
|
||||||
@ -306,16 +336,25 @@ gravity_DownloadBlocklists() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local url domain agent cmd_ext str
|
local url domain agent cmd_ext str target
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Flush gravity table once before looping over sources
|
# Prepare new gravity database
|
||||||
str="Flushing gravity table"
|
str="Preparing new gravity database"
|
||||||
echo -ne " ${INFO} ${str}..."
|
echo -ne " ${INFO} ${str}..."
|
||||||
if database_truncate_table "gravity"; then
|
rm "${gravityTEMPfile}" > /dev/null 2>&1
|
||||||
|
output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
|
||||||
|
status="$?"
|
||||||
|
|
||||||
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
echo -e "\\n ${CROSS} Unable to create new database ${gravityTEMPfile}\\n ${output}"
|
||||||
|
gravity_Cleanup "error"
|
||||||
|
else
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
target="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||||
|
|
||||||
# Loop through $sources and download each one
|
# Loop through $sources and download each one
|
||||||
for ((i = 0; i < "${#sources[@]}"; i++)); do
|
for ((i = 0; i < "${#sources[@]}"; i++)); do
|
||||||
url="${sources[$i]}"
|
url="${sources[$i]}"
|
||||||
@ -335,15 +374,32 @@ gravity_DownloadBlocklists() {
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
echo -e " ${INFO} Target: ${url}"
|
echo -e " ${INFO} Target: ${url}"
|
||||||
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}"
|
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}"
|
||||||
echo ""
|
echo ""
|
||||||
done
|
done
|
||||||
|
|
||||||
|
str="Storing downloaded domains in new gravity database"
|
||||||
|
echo -ne " ${INFO} ${str}..."
|
||||||
|
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 )
|
||||||
|
status="$?"
|
||||||
|
|
||||||
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
echo -e "\\n ${CROSS} Unable to fill gravity table in database ${gravityTEMPfile}\\n ${output}"
|
||||||
|
gravity_Cleanup "error"
|
||||||
|
else
|
||||||
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm "${target}" > /dev/null 2>&1 || \
|
||||||
|
echo -e " ${CROSS} Unable to remove ${target}"
|
||||||
|
|
||||||
gravity_Blackbody=true
|
gravity_Blackbody=true
|
||||||
}
|
}
|
||||||
|
|
||||||
# Download specified URL and perform checks on HTTP status and file content
|
# Download specified URL and perform checks on HTTP status and file content
|
||||||
gravity_DownloadBlocklistFromUrl() {
|
gravity_DownloadBlocklistFromUrl() {
|
||||||
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" heisenbergCompensator="" patternBuffer str httpCode success=""
|
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}"
|
||||||
|
local heisenbergCompensator="" patternBuffer str httpCode success=""
|
||||||
|
|
||||||
# Create temp file to store content on disk instead of RAM
|
# Create temp file to store content on disk instead of RAM
|
||||||
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
|
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
|
||||||
@ -424,20 +480,15 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
# Determine if the blocklist was downloaded and saved correctly
|
# Determine if the blocklist was downloaded and saved correctly
|
||||||
if [[ "${success}" == true ]]; then
|
if [[ "${success}" == true ]]; then
|
||||||
if [[ "${httpCode}" == "304" ]]; then
|
if [[ "${httpCode}" == "304" ]]; then
|
||||||
# Add domains to database table
|
# Add domains to database table file
|
||||||
str="Adding adlist with ID ${adlistID} to database table"
|
#Append ,${arg} to every line and then remove blank lines before import
|
||||||
echo -ne " ${INFO} ${str}..."
|
sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}"
|
||||||
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
|
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
|
||||||
# Check if $patternbuffer is a non-zero length file
|
# Check if $patternbuffer is a non-zero length file
|
||||||
elif [[ -s "${patternBuffer}" ]]; then
|
elif [[ -s "${patternBuffer}" ]]; then
|
||||||
# Determine if blocklist is non-standard and parse as appropriate
|
# Determine if blocklist is non-standard and parse as appropriate
|
||||||
gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}"
|
gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}"
|
||||||
# Add domains to database table
|
#Append ,${arg} to every line and then remove blank lines before import
|
||||||
str="Adding adlist with ID ${adlistID} to database table"
|
sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}"
|
||||||
echo -ne " ${INFO} ${str}..."
|
|
||||||
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
|
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
|
||||||
else
|
else
|
||||||
# Fall back to previously cached list if $patternBuffer is empty
|
# Fall back to previously cached list if $patternBuffer is empty
|
||||||
echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||||
@ -446,11 +497,8 @@ gravity_DownloadBlocklistFromUrl() {
|
|||||||
# Determine if cached list has read permission
|
# Determine if cached list has read permission
|
||||||
if [[ -r "${saveLocation}" ]]; then
|
if [[ -r "${saveLocation}" ]]; then
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||||
# Add domains to database table
|
#Append ,${arg} to every line and then remove blank lines before import
|
||||||
str="Adding to database table"
|
sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}"
|
||||||
echo -ne " ${INFO} ${str}..."
|
|
||||||
database_table_from_file "gravity" "${saveLocation}" "${adlistID}"
|
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
|
||||||
else
|
else
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
||||||
fi
|
fi
|
||||||
@ -686,10 +734,6 @@ fi
|
|||||||
# Move possibly existing legacy files to the gravity database
|
# Move possibly existing legacy files to the gravity database
|
||||||
migrate_to_database
|
migrate_to_database
|
||||||
|
|
||||||
# Ensure proper permissions are set for the newly created database
|
|
||||||
chown pihole:pihole "${gravityDBfile}"
|
|
||||||
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
|
||||||
|
|
||||||
if [[ "${forceDelete:-}" == true ]]; then
|
if [[ "${forceDelete:-}" == true ]]; then
|
||||||
str="Deleting existing list cache"
|
str="Deleting existing list cache"
|
||||||
echo -ne "${INFO} ${str}..."
|
echo -ne "${INFO} ${str}..."
|
||||||
@ -704,15 +748,26 @@ gravity_DownloadBlocklists
|
|||||||
|
|
||||||
# Create local.list
|
# Create local.list
|
||||||
gravity_generateLocalList
|
gravity_generateLocalList
|
||||||
gravity_ShowCount
|
|
||||||
|
|
||||||
|
# Update gravity timestamp
|
||||||
update_gravity_timestamp
|
update_gravity_timestamp
|
||||||
|
|
||||||
gravity_Cleanup
|
# Migrate rest of the data from old to new database
|
||||||
echo ""
|
gravity_swap_databases
|
||||||
|
|
||||||
|
# Ensure proper permissions are set for the database
|
||||||
|
chown pihole:pihole "${gravityDBfile}"
|
||||||
|
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
||||||
|
|
||||||
# Determine if DNS has been restarted by this instance of gravity
|
# Determine if DNS has been restarted by this instance of gravity
|
||||||
if [[ -z "${dnsWasOffline:-}" ]]; then
|
if [[ -z "${dnsWasOffline:-}" ]]; then
|
||||||
"${PIHOLE_COMMAND}" restartdns reload
|
"${PIHOLE_COMMAND}" restartdns reload
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Compute numbers to be displayed
|
||||||
|
gravity_ShowCount
|
||||||
|
|
||||||
|
gravity_Cleanup
|
||||||
|
echo ""
|
||||||
|
|
||||||
"${PIHOLE_COMMAND}" status
|
"${PIHOLE_COMMAND}" status
|
||||||
|
Loading…
Reference in New Issue
Block a user