From 2a72012ca1448b8e2f7f0de3e79c6b9d7d72b7cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 21 Oct 2023 10:55:06 +0000 Subject: [PATCH 01/12] Bump actions/checkout from 4.1.0 to 4.1.1 Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.0 to 4.1.1. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4.1.0...v4.1.1) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/stale.yml | 2 +- .github/workflows/sync-back-to-dev.yml | 2 +- .github/workflows/test.yml | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 4685aa2c..6544db61 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4.1.0 + uses: actions/checkout@v4.1.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 14c55d10..0e149c79 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -40,7 +40,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4.1.0 + uses: actions/checkout@v4.1.1 - name: Remove 'stale' label run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }} env: diff --git a/.github/workflows/sync-back-to-dev.yml b/.github/workflows/sync-back-to-dev.yml index a1025629..e52d4ae9 100644 --- a/.github/workflows/sync-back-to-dev.yml +++ b/.github/workflows/sync-back-to-dev.yml @@ -33,7 +33,7 @@ jobs: name: Syncing branches steps: - name: Checkout - uses: actions/checkout@v4.1.0 + uses: actions/checkout@v4.1.1 - name: Opening pull request run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal' env: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 623590da..b070c982 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4.1.0 + uses: actions/checkout@v4.1.1 - name: Check scripts in repository are executable run: | @@ -65,7 +65,7 @@ jobs: DISTRO: ${{matrix.distro}} steps: - name: Checkout repository - uses: actions/checkout@v4.1.0 + uses: actions/checkout@v4.1.1 - name: Set up Python 3.10 uses: actions/setup-python@v4.7.1 From ec86124997d1b8969be2f528fc63b2626486eb73 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 28 Oct 2023 10:32:55 +0000 Subject: [PATCH 02/12] Bump pytest from 7.4.2 to 7.4.3 in /test Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.2 to 7.4.3. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.2...7.4.3) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index 27417754..45c7c7c0 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,5 +1,5 @@ pyyaml == 6.0.1 -pytest == 7.4.2 +pytest == 7.4.3 pytest-xdist == 3.3.1 pytest-testinfra == 9.0.0 tox == 4.11.3 From 3c4f217876f9ad7f8889b94d0499a480518f8a22 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 18 Nov 2023 10:27:10 +0000 Subject: [PATCH 03/12] Bump pytest-xdist from 3.3.1 to 3.4.0 in /test Bumps [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) from 3.3.1 to 3.4.0. - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v3.3.1...v3.4.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index 45c7c7c0..56423192 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,6 +1,6 @@ pyyaml == 6.0.1 pytest == 7.4.3 -pytest-xdist == 3.3.1 +pytest-xdist == 3.4.0 pytest-testinfra == 9.0.0 tox == 4.11.3 From 2e73eb36efbd129a105f5e18bfa0a6dd1b240032 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 18 Nov 2023 10:54:16 +0000 Subject: [PATCH 04/12] Bump pytest-testinfra from 9.0.0 to 10.0.0 in /test Bumps [pytest-testinfra](https://github.com/pytest-dev/pytest-testinfra) from 9.0.0 to 10.0.0. - [Release notes](https://github.com/pytest-dev/pytest-testinfra/releases) - [Changelog](https://github.com/pytest-dev/pytest-testinfra/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-testinfra/compare/9.0.0...10.0.0) --- updated-dependencies: - dependency-name: pytest-testinfra dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index 56423192..74c67fd9 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,6 +1,6 @@ pyyaml == 6.0.1 pytest == 7.4.3 pytest-xdist == 3.4.0 -pytest-testinfra == 9.0.0 +pytest-testinfra == 10.0.0 tox == 4.11.3 From 9d57f6493751d319440727f927f099887decb886 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 25 Nov 2023 10:58:21 +0000 Subject: [PATCH 05/12] Bump pytest-xdist from 3.4.0 to 3.5.0 in /test Bumps [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) from 3.4.0 to 3.5.0. - [Release notes](https://github.com/pytest-dev/pytest-xdist/releases) - [Changelog](https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-xdist/compare/v3.4.0...v3.5.0) --- updated-dependencies: - dependency-name: pytest-xdist dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index 74c67fd9..799e3fad 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,6 +1,6 @@ pyyaml == 6.0.1 pytest == 7.4.3 -pytest-xdist == 3.4.0 +pytest-xdist == 3.5.0 pytest-testinfra == 10.0.0 tox == 4.11.3 From bfc824f2ff05f56d24697230463a3ec0bf2a25cf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 10:04:50 +0000 Subject: [PATCH 06/12] Bump tox from 4.11.3 to 4.11.4 in /test Bumps [tox](https://github.com/tox-dev/tox) from 4.11.3 to 4.11.4. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.11.3...4.11.4) --- updated-dependencies: - dependency-name: tox dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index 799e3fad..bfc6d027 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -2,5 +2,5 @@ pyyaml == 6.0.1 pytest == 7.4.3 pytest-xdist == 3.5.0 pytest-testinfra == 10.0.0 -tox == 4.11.3 +tox == 4.11.4 From 16180e4b234510d3c52545f817164fd9873448a8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 9 Dec 2023 10:36:44 +0000 Subject: [PATCH 07/12] Bump actions/setup-python from 4.7.1 to 5.0.0 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.7.1 to 5.0.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4.7.1...v5.0.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b070c982..35ed0c30 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -68,7 +68,7 @@ jobs: uses: actions/checkout@v4.1.1 - name: Set up Python 3.10 - uses: actions/setup-python@v4.7.1 + uses: actions/setup-python@v5.0.0 with: python-version: "3.10" From c34464d1e8f6e0ba5731171791d9a31d801dfa5c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 9 Dec 2023 10:36:49 +0000 Subject: [PATCH 08/12] Bump actions/stale from 8.0.0 to 9.0.0 Bumps [actions/stale](https://github.com/actions/stale) from 8.0.0 to 9.0.0. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v8.0.0...v9.0.0) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/stale.yml | 2 +- .github/workflows/stale_pr.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 0e149c79..095d7358 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -17,7 +17,7 @@ jobs: issues: write steps: - - uses: actions/stale@v8.0.0 + - uses: actions/stale@v9.0.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-stale: 30 diff --git a/.github/workflows/stale_pr.yml b/.github/workflows/stale_pr.yml index 2db2a25d..96650818 100644 --- a/.github/workflows/stale_pr.yml +++ b/.github/workflows/stale_pr.yml @@ -17,7 +17,7 @@ jobs: pull-requests: write steps: - - uses: actions/stale@v8.0.0 + - uses: actions/stale@v9.0.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} # Do not automatically mark PR/issue as stale From 3f7413d538dae3524b0a5d627668aecd60bcd815 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 9 Dec 2023 23:07:35 +0100 Subject: [PATCH 09/12] Add "-ni" to all sqlite3 invocations Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 30 ++++++++-------- advanced/Scripts/list.sh | 20 +++++------ advanced/Scripts/piholeARPTable.sh | 4 +-- advanced/Scripts/piholeDebug.sh | 12 +++---- advanced/Scripts/piholeLogFlush.sh | 2 +- advanced/Scripts/query.sh | 6 ++-- advanced/Scripts/webpage.sh | 12 +++---- gravity.sh | 36 +++++++++---------- 8 files changed, 61 insertions(+), 61 deletions(-) diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index a7ba60a9..1459ecd9 100755 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -19,13 +19,13 @@ upgrade_gravityDB(){ auditFile="${piholeDir}/auditlog.list" # Get database version - version="$(pihole-FTL sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" + version="$(pihole-FTL sqlite3 -ni "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")" if [[ "$version" == "1" ]]; then # This migration script upgrades the gravity.db file by # adding the domain_audit table echo -e " ${INFO} Upgrading gravity database from version 1 to 2" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/1_to_2.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/1_to_2.sql" version=2 # Store audit domains in database table @@ -40,28 +40,28 @@ upgrade_gravityDB(){ # renaming the regex table to regex_blacklist, and # creating a new regex_whitelist table + corresponding linking table and views echo -e " ${INFO} Upgrading gravity database from version 2 to 3" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/2_to_3.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/2_to_3.sql" version=3 fi if [[ "$version" == "3" ]]; then # This migration script unifies the formally separated domain # lists into a single table with a UNIQUE domain constraint echo -e " ${INFO} Upgrading gravity database from version 3 to 4" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/3_to_4.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/3_to_4.sql" version=4 fi if [[ "$version" == "4" ]]; then # This migration script upgrades the gravity and list views # implementing necessary changes for per-client blocking echo -e " ${INFO} Upgrading gravity database from version 4 to 5" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/4_to_5.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/4_to_5.sql" version=5 fi if [[ "$version" == "5" ]]; then # This migration script upgrades the adlist view # to return an ID used in gravity.sh echo -e " ${INFO} Upgrading gravity database from version 5 to 6" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/5_to_6.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/5_to_6.sql" version=6 fi if [[ "$version" == "6" ]]; then @@ -69,7 +69,7 @@ upgrade_gravityDB(){ # which is automatically associated to all clients not # having their own group assignments echo -e " ${INFO} Upgrading gravity database from version 6 to 7" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/6_to_7.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/6_to_7.sql" version=7 fi if [[ "$version" == "7" ]]; then @@ -77,21 +77,21 @@ upgrade_gravityDB(){ # to ensure uniqueness on the group name # We also add date_added and date_modified columns echo -e " ${INFO} Upgrading gravity database from version 7 to 8" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/7_to_8.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/7_to_8.sql" version=8 fi if [[ "$version" == "8" ]]; then # This migration fixes some issues that were introduced # in the previous migration script. echo -e " ${INFO} Upgrading gravity database from version 8 to 9" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/8_to_9.sql" version=9 fi if [[ "$version" == "9" ]]; then # This migration drops unused tables and creates triggers to remove # obsolete groups assignments when the linked items are deleted echo -e " ${INFO} Upgrading gravity database from version 9 to 10" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/9_to_10.sql" version=10 fi if [[ "$version" == "10" ]]; then @@ -101,31 +101,31 @@ upgrade_gravityDB(){ # to keep the copying process generic (needs the same columns in both the # source and the destination databases). echo -e " ${INFO} Upgrading gravity database from version 10 to 11" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/10_to_11.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/10_to_11.sql" version=11 fi if [[ "$version" == "11" ]]; then # Rename group 0 from "Unassociated" to "Default" echo -e " ${INFO} Upgrading gravity database from version 11 to 12" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/11_to_12.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/11_to_12.sql" version=12 fi if [[ "$version" == "12" ]]; then # Add column date_updated to adlist table echo -e " ${INFO} Upgrading gravity database from version 12 to 13" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/12_to_13.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/12_to_13.sql" version=13 fi if [[ "$version" == "13" ]]; then # Add columns number and status to adlist table echo -e " ${INFO} Upgrading gravity database from version 13 to 14" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/13_to_14.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/13_to_14.sql" version=14 fi if [[ "$version" == "14" ]]; then # Changes the vw_adlist created in 5_to_6 echo -e " ${INFO} Upgrading gravity database from version 14 to 15" - pihole-FTL sqlite3 "${database}" < "${scriptPath}/14_to_15.sql" + pihole-FTL sqlite3 -ni "${database}" < "${scriptPath}/14_to_15.sql" version=15 fi } diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index b76a7ef7..76558e58 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -150,18 +150,18 @@ AddDomain() { domain="$1" # Is the domain in the list we want to add it to? - num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" + num="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")" requestedListname="$(GetListnameFromTypeId "${typeId}")" if [[ "${num}" -ne 0 ]]; then - existingTypeId="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" + existingTypeId="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")" if [[ "${existingTypeId}" == "${typeId}" ]]; then if [[ "${verbose}" == true ]]; then echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!" fi else existingListname="$(GetListnameFromTypeId "${existingTypeId}")" - pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';" if [[ "${verbose}" == true ]]; then echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!" fi @@ -177,10 +177,10 @@ AddDomain() { # Insert only the domain here. The enabled and date_added fields will be filled # with their default values (enabled = true, date_added = current timestamp) if [[ -z "${comment}" ]]; then - pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});" else # also add comment when variable has been set through the "--comment" option - pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');" fi } @@ -189,7 +189,7 @@ RemoveDomain() { domain="$1" # Is the domain in the list we want to remove it from? - num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" + num="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")" requestedListname="$(GetListnameFromTypeId "${typeId}")" @@ -206,14 +206,14 @@ RemoveDomain() { fi reload=true # Remove it from the current list - pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};" } Displaylist() { local count num_pipes domain enabled status nicedate requestedListname requestedListname="$(GetListnameFromTypeId "${typeId}")" - data="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" + data="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)" if [[ -z $data ]]; then echo -e "Not showing empty list" @@ -251,10 +251,10 @@ Displaylist() { } NukeList() { - count=$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};") + count=$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(1) FROM domainlist WHERE type = ${typeId};") listname="$(GetListnameFromTypeId "${typeId}")" if [ "$count" -gt 0 ];then - pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};" echo " ${TICK} Removed ${count} domain(s) from the ${listname}" else echo " ${INFO} ${listname} already empty. Nothing to do!" diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index 5daa025d..b92dd124 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -39,7 +39,7 @@ flushARP(){ # Truncate network_addresses table in pihole-FTL.db # This needs to be done before we can truncate the network table due to # foreign key constraints - if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then + if ! output=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table" echo " Database location: ${DBFILE}" echo " Output: ${output}" @@ -47,7 +47,7 @@ flushARP(){ fi # Truncate network table in pihole-FTL.db - if ! output=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then + if ! output=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM network" 2>&1); then echo -e "${OVER} ${CROSS} Failed to truncate network table" echo " Database location: ${DBFILE}" echo " Output: ${output}" diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 1ca52f45..fb1efbeb 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -865,7 +865,7 @@ dig_at() { # This helps emulate queries to different domains that a user might query # It will also give extra assurance that Pi-hole is correctly resolving and blocking domains local random_url - random_url=$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity WHERE domain not like '||%^' ORDER BY RANDOM() LIMIT 1") + random_url=$(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity WHERE domain not like '||%^' ORDER BY RANDOM() LIMIT 1") # Fallback if no non-ABP style domains were found if [ -z "${random_url}" ]; then random_url="flurry.com" @@ -1226,7 +1226,7 @@ show_db_entries() { IFS=$'\r\n' local entries=() mapfile -t entries < <(\ - pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" \ + pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" \ -cmd ".headers on" \ -cmd ".mode column" \ -cmd ".width ${widths}" \ @@ -1251,7 +1251,7 @@ show_FTL_db_entries() { IFS=$'\r\n' local entries=() mapfile -t entries < <(\ - pihole-FTL sqlite3 "${PIHOLE_FTL_DB_FILE}" \ + pihole-FTL sqlite3 -ni "${PIHOLE_FTL_DB_FILE}" \ -cmd ".headers on" \ -cmd ".mode column" \ -cmd ".width ${widths}" \ @@ -1317,7 +1317,7 @@ analyze_gravity_list() { fi show_db_entries "Info table" "SELECT property,value FROM info" "20 40" - gravity_updated_raw="$(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated_raw="$(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" gravity_updated="$(date -d @"${gravity_updated_raw}")" log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" @@ -1325,7 +1325,7 @@ analyze_gravity_list() { OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() - mapfile -t gravity_sample < <(pihole-FTL sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") + mapfile -t gravity_sample < <(pihole-FTL sqlite3 -ni "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do @@ -1357,7 +1357,7 @@ database_integrity_check(){ log_write "${INFO} Checking foreign key constraints of ${database} ... (this can take several minutes)" unset result - result="$(pihole-FTL sqlite3 "${database}" -cmd ".headers on" -cmd ".mode column" "PRAGMA foreign_key_check" 2>&1 & spinner)" + result="$(pihole-FTL sqlite3 -ni "${database}" -cmd ".headers on" -cmd ".mode column" "PRAGMA foreign_key_check" 2>&1 & spinner)" if [[ -z ${result} ]]; then log_write "${TICK} No foreign key errors in ${database}" else diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh index 3473fad5..b06aac8b 100755 --- a/advanced/Scripts/piholeLogFlush.sh +++ b/advanced/Scripts/piholeLogFlush.sh @@ -63,7 +63,7 @@ else fi fi # Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history) - deleted=$(pihole-FTL sqlite3 "${DBFILE}" "DELETE FROM query_storage WHERE timestamp >= strftime('%s','now')-86400; select changes() from query_storage limit 1") + deleted=$(pihole-FTL sqlite3 -ni "${DBFILE}" "DELETE FROM query_storage WHERE timestamp >= strftime('%s','now')-86400; select changes() from query_storage limit 1") # Restart pihole-FTL to force reloading history sudo pihole restartdns diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 1d3b0a29..ebcc6f79 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -96,7 +96,7 @@ scanDatabaseTable() { # Are there ABP entries on gravity? # Return 1 if abp_domain=1 or Zero if abp_domain=0 or not set abpquerystr="SELECT EXISTS (SELECT 1 FROM info WHERE property='abp_domains' and value='1')" - abpfound="$(pihole-FTL sqlite3 "${gravityDBfile}" "${abpquerystr}")" 2> /dev/null + abpfound="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "${abpquerystr}")" 2> /dev/null # Create search string for ABP entries only if needed if [ "${abpfound}" -eq 1 ]; then @@ -129,7 +129,7 @@ scanDatabaseTable() { fi # Send prepared query to gravity database - result="$(pihole-FTL sqlite3 -separator ',' "${gravityDBfile}" "${querystr}")" 2> /dev/null + result="$(pihole-FTL sqlite3 -ni -separator ',' "${gravityDBfile}" "${querystr}")" 2> /dev/null if [[ -z "${result}" ]]; then # Return early when there are no matches in this table return @@ -166,7 +166,7 @@ scanRegexDatabaseTable() { list_type="${3:-}" # Query all regex from the corresponding database tables - mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${list_type}" 2> /dev/null) + mapfile -t regexList < <(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${list_type}" 2> /dev/null) # If we have regexps to process if [[ "${#regexList[@]}" -ne 0 ]]; then diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index e05a6aff..62ab4ea9 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -560,13 +560,13 @@ CustomizeAdLists() { if CheckUrl "${address}"; then if [[ "${args[2]}" == "enable" ]]; then - pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" elif [[ "${args[2]}" == "disable" ]]; then - pihole-FTL sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" elif [[ "${args[2]}" == "add" ]]; then - pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" elif [[ "${args[2]}" == "del" ]]; then - pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" else echo "Not permitted" return 1 @@ -700,12 +700,12 @@ addAudit() done # Insert only the domain here. The date_added field will be # filled with its default value (date_added = current timestamp) - pihole-FTL sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};" } clearAudit() { - pihole-FTL sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "DELETE FROM domain_audit;" } SetPrivacyLevel() { diff --git a/gravity.sh b/gravity.sh index db199090..62631e3c 100755 --- a/gravity.sh +++ b/gravity.sh @@ -84,7 +84,7 @@ fi # Generate new SQLite3 file from schema template generate_gravity_database() { - if ! pihole-FTL sqlite3 "${gravityDBfile}" < "${gravityDBschema}"; then + if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" < "${gravityDBschema}"; then echo -e " ${CROSS} Unable to create ${gravityDBfile}" return 1 fi @@ -99,7 +99,7 @@ gravity_swap_databases() { echo -ne " ${INFO} ${str}..." # The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once - output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) + output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -135,7 +135,7 @@ gravity_swap_databases() { # Update timestamp when the gravity table was last updated successfully update_gravity_timestamp() { - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -179,7 +179,7 @@ database_table_from_file() { # Get MAX(id) from domainlist when INSERTing into this table if [[ "${table}" == "domainlist" ]]; then - rowid="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" + rowid="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" if [[ -z "$rowid" ]]; then rowid=0 fi @@ -209,7 +209,7 @@ database_table_from_file() { # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -229,7 +229,7 @@ database_table_from_file() { # Check if a column with name ${2} exists in gravity table with name ${1} gravity_column_exists() { - output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 ) if [[ "${output}" == "1" ]]; then return 0 # Bash 0 is success fi @@ -244,7 +244,7 @@ database_adlist_number() { return; fi - output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -260,7 +260,7 @@ database_adlist_status() { return; fi - output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -378,8 +378,8 @@ gravity_DownloadBlocklists() { # Retrieve source URLs from gravity database # We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true) - mapfile -t sources <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" - mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)" + mapfile -t sources <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)" + mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)" # Parse source domains from $sources mapfile -t sourceDomains <<< "$( @@ -408,7 +408,7 @@ gravity_DownloadBlocklists() { str="Preparing new gravity database" echo -ne " ${INFO} ${str}..." rm "${gravityTEMPfile}" > /dev/null 2>&1 - output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) + output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -428,7 +428,7 @@ gravity_DownloadBlocklists() { copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}" fi - output=$( { pihole-FTL sqlite3 "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 ) + output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -668,12 +668,12 @@ gravity_Table_Count() { local table="${1}" local str="${2}" local num - num="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")" + num="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")" if [[ "${table}" == "gravity" ]]; then local unique - unique="$(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")" + unique="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")" echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})" - pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" + pihole-FTL sqlite3 -ni "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" else echo -e " ${INFO} Number of ${str}: ${num}" fi @@ -754,7 +754,7 @@ database_recovery() { local str="Checking integrity of existing gravity database (this can take a while)" local option="${1}" echo -ne " ${INFO} ${str}..." - result="$(pihole-FTL sqlite3 "${gravityDBfile}" "PRAGMA integrity_check" 2>&1)" + result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "PRAGMA integrity_check" 2>&1)" if [[ ${result} = "ok" ]]; then echo -e "${OVER} ${TICK} ${str} - no errors found" @@ -762,7 +762,7 @@ database_recovery() { str="Checking foreign keys of existing gravity database (this can take a while)" echo -ne " ${INFO} ${str}..." unset result - result="$(pihole-FTL sqlite3 "${gravityDBfile}" "PRAGMA foreign_key_check" 2>&1)" + result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "PRAGMA foreign_key_check" 2>&1)" if [[ -z ${result} ]]; then echo -e "${OVER} ${TICK} ${str} - no errors found" if [[ "${option}" != "force" ]]; then @@ -781,7 +781,7 @@ database_recovery() { echo -ne " ${INFO} ${str}..." # We have to remove any possibly existing recovery database or this will fail rm -f "${gravityDBfile}.recovered" > /dev/null 2>&1 - if result="$(pihole-FTL sqlite3 "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 "${gravityDBfile}.recovered" 2>&1)"; then + if result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 -ni "${gravityDBfile}.recovered" 2>&1)"; then echo -e "${OVER} ${TICK} ${str} - success" mv "${gravityDBfile}" "${gravityDBfile}.old" mv "${gravityDBfile}.recovered" "${gravityDBfile}" From e8338d059009f009f8b93461e4fee51792acb600 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 10 Dec 2023 10:41:00 +0100 Subject: [PATCH 10/12] Install FTL's development branch to get the latest features during tests Signed-off-by: DL6ER --- test/test_any_automated_install.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/test_any_automated_install.py b/test/test_any_automated_install.py index 840d1df7..c1b91664 100644 --- a/test/test_any_automated_install.py +++ b/test/test_any_automated_install.py @@ -176,6 +176,12 @@ def test_installPihole_fresh_install_readableFiles(host): setup_var_file += "INSTALL_WEB_INTERFACE=true\n" setup_var_file += "EOF\n" host.run(setup_var_file) + # Install FTL's development branch to get the latest features + host.run( + """ + echo "development" > /etc/pihole/ftlbranch + """ + ) install = host.run( """ export TERM=xterm @@ -431,6 +437,12 @@ def test_installPihole_fresh_install_readableBlockpage(host, test_webpage): setup_var_file += "INSTALL_WEB_INTERFACE=true\n" setup_var_file += "EOF\n" host.run(setup_var_file) + # Install FTL's development branch to get the latest features + host.run( + """ + echo "development" > /etc/pihole/ftlbranch + """ + ) installWeb = host.run( """ export TERM=xterm From c6049d400214d45f6bd9229e6fd3291a3d53f3dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Dec 2023 10:30:20 +0000 Subject: [PATCH 11/12] Bump github/codeql-action from 2 to 3 Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/codeql-analysis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 6544db61..fc821194 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -29,12 +29,12 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: 'python' - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 From 05e7d0ee9263e17e0bc6706b3822310d7ed084a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 6 Jan 2024 10:10:22 +0000 Subject: [PATCH 12/12] Bump pytest from 7.4.3 to 7.4.4 in /test Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.3 to 7.4.4. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.3...7.4.4) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/requirements.txt b/test/requirements.txt index bfc6d027..0873d097 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,5 +1,5 @@ pyyaml == 6.0.1 -pytest == 7.4.3 +pytest == 7.4.4 pytest-xdist == 3.5.0 pytest-testinfra == 10.0.0 tox == 4.11.4