2017-01-13 07:08:52 +00:00
|
|
|
// Copyright 2017 clair authors
|
2016-01-19 20:16:45 +00:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
package vulnerability
|
2016-01-12 15:40:46 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
2017-07-26 23:23:54 +00:00
|
|
|
"errors"
|
2019-03-06 21:34:58 +00:00
|
|
|
"fmt"
|
2016-01-24 03:02:34 +00:00
|
|
|
"time"
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
"github.com/lib/pq"
|
2017-05-04 17:21:25 +00:00
|
|
|
log "github.com/sirupsen/logrus"
|
|
|
|
|
2016-01-12 15:40:46 +00:00
|
|
|
"github.com/coreos/clair/database"
|
2019-03-06 21:34:58 +00:00
|
|
|
"github.com/coreos/clair/database/pgsql/feature"
|
|
|
|
"github.com/coreos/clair/database/pgsql/monitoring"
|
|
|
|
"github.com/coreos/clair/database/pgsql/page"
|
|
|
|
"github.com/coreos/clair/database/pgsql/util"
|
2016-12-28 01:45:11 +00:00
|
|
|
"github.com/coreos/clair/ext/versionfmt"
|
2019-03-06 21:34:58 +00:00
|
|
|
"github.com/coreos/clair/pkg/pagination"
|
2016-01-12 15:40:46 +00:00
|
|
|
)
|
|
|
|
|
2018-09-19 19:38:07 +00:00
|
|
|
const (
|
|
|
|
searchVulnerability = `
|
|
|
|
SELECT v.id, v.description, v.link, v.severity, v.metadata, n.version_format
|
|
|
|
FROM vulnerability AS v, namespace AS n
|
|
|
|
WHERE v.namespace_id = n.id
|
|
|
|
AND v.name = $1
|
|
|
|
AND n.name = $2
|
|
|
|
AND v.deleted_at IS NULL
|
|
|
|
`
|
|
|
|
|
|
|
|
searchVulnerabilityByID = `
|
|
|
|
SELECT v.name, v.description, v.link, v.severity, v.metadata, n.name, n.version_format
|
|
|
|
FROM vulnerability AS v, namespace AS n
|
|
|
|
WHERE v.namespace_id = n.id
|
|
|
|
AND v.id = $1`
|
|
|
|
|
|
|
|
insertVulnerability = `
|
|
|
|
WITH ns AS (
|
|
|
|
SELECT id FROM namespace WHERE name = $6 AND version_format = $7
|
|
|
|
)
|
|
|
|
INSERT INTO Vulnerability(namespace_id, name, description, link, severity, metadata, created_at)
|
|
|
|
VALUES((SELECT id FROM ns), $1, $2, $3, $4, $5, CURRENT_TIMESTAMP)
|
|
|
|
RETURNING id`
|
|
|
|
|
|
|
|
removeVulnerability = `
|
|
|
|
UPDATE Vulnerability
|
|
|
|
SET deleted_at = CURRENT_TIMESTAMP
|
|
|
|
WHERE namespace_id = (SELECT id FROM Namespace WHERE name = $1)
|
|
|
|
AND name = $2
|
|
|
|
AND deleted_at IS NULL
|
|
|
|
RETURNING id`
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
searchNotificationVulnerableAncestry = `
|
|
|
|
SELECT DISTINCT ON (a.id)
|
|
|
|
a.id, a.name
|
|
|
|
FROM vulnerability_affected_namespaced_feature AS vanf,
|
|
|
|
ancestry_layer AS al, ancestry_feature AS af, ancestry AS a
|
|
|
|
WHERE vanf.vulnerability_id = $1
|
|
|
|
AND a.id >= $2
|
|
|
|
AND al.ancestry_id = a.id
|
|
|
|
AND al.id = af.ancestry_layer_id
|
|
|
|
AND af.namespaced_feature_id = vanf.namespaced_feature_id
|
|
|
|
ORDER BY a.id ASC
|
|
|
|
LIMIT $3;`
|
2017-07-26 23:23:54 +00:00
|
|
|
)
|
2017-01-18 02:40:59 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func queryInvalidateVulnerabilityCache(count int) string {
|
|
|
|
return fmt.Sprintf(`DELETE FROM vulnerability_affected_feature
|
|
|
|
WHERE vulnerability_id IN (%s)`,
|
|
|
|
util.QueryString(1, count))
|
|
|
|
}
|
|
|
|
|
|
|
|
// NOTE(Sida): Every search query can only have count less than postgres set
|
|
|
|
// stack depth. IN will be resolved to nested OR_s and the parser might exceed
|
|
|
|
// stack depth. TODO(Sida): Generate different queries for different count: if
|
|
|
|
// count < 5120, use IN; for count > 5120 and < 65536, use temporary table; for
|
|
|
|
// count > 65535, use is expected to split data into batches.
|
|
|
|
func querySearchLastDeletedVulnerabilityID(count int) string {
|
|
|
|
return fmt.Sprintf(`
|
|
|
|
SELECT vid, vname, nname FROM (
|
|
|
|
SELECT v.id AS vid, v.name AS vname, n.name AS nname,
|
|
|
|
row_number() OVER (
|
|
|
|
PARTITION by (v.name, n.name)
|
|
|
|
ORDER BY v.deleted_at DESC
|
|
|
|
) AS rownum
|
|
|
|
FROM vulnerability AS v, namespace AS n
|
|
|
|
WHERE v.namespace_id = n.id
|
|
|
|
AND (v.name, n.name) IN ( %s )
|
|
|
|
AND v.deleted_at IS NOT NULL
|
|
|
|
) tmp WHERE rownum <= 1`,
|
|
|
|
util.QueryString(2, count))
|
|
|
|
}
|
|
|
|
|
|
|
|
func querySearchNotDeletedVulnerabilityID(count int) string {
|
|
|
|
return fmt.Sprintf(`
|
|
|
|
SELECT v.id, v.name, n.name FROM vulnerability AS v, namespace AS n
|
|
|
|
WHERE v.namespace_id = n.id AND (v.name, n.name) IN (%s)
|
|
|
|
AND v.deleted_at IS NULL`,
|
|
|
|
util.QueryString(2, count))
|
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
type affectedAncestry struct {
|
|
|
|
name string
|
|
|
|
id int64
|
2017-01-18 02:40:59 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
type affectRelation struct {
|
|
|
|
vulnerabilityID int64
|
|
|
|
namespacedFeatureID int64
|
|
|
|
addedBy int64
|
2017-01-18 02:40:59 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
type affectedFeatureRows struct {
|
|
|
|
rows map[int64]database.AffectedFeature
|
|
|
|
}
|
2016-02-26 10:18:45 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func FindVulnerabilities(tx *sql.Tx, vulnerabilities []database.VulnerabilityID) ([]database.NullableVulnerability, error) {
|
|
|
|
defer monitoring.ObserveQueryTime("findVulnerabilities", "", time.Now())
|
2017-07-26 23:23:54 +00:00
|
|
|
resultVuln := make([]database.NullableVulnerability, len(vulnerabilities))
|
|
|
|
vulnIDMap := map[int64][]*database.NullableVulnerability{}
|
2016-03-08 02:24:29 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
//TODO(Sida): Change to bulk search.
|
|
|
|
stmt, err := tx.Prepare(searchVulnerability)
|
2016-02-26 10:18:45 +00:00
|
|
|
if err != nil {
|
2017-07-26 23:23:54 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// load vulnerabilities
|
|
|
|
for i, key := range vulnerabilities {
|
|
|
|
var (
|
|
|
|
id sql.NullInt64
|
|
|
|
vuln = database.NullableVulnerability{
|
|
|
|
VulnerabilityWithAffected: database.VulnerabilityWithAffected{
|
|
|
|
Vulnerability: database.Vulnerability{
|
|
|
|
Name: key.Name,
|
|
|
|
Namespace: database.Namespace{
|
|
|
|
Name: key.Namespace,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-02-26 10:18:45 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
err := stmt.QueryRow(key.Name, key.Namespace).Scan(
|
|
|
|
&id,
|
|
|
|
&vuln.Description,
|
|
|
|
&vuln.Link,
|
|
|
|
&vuln.Severity,
|
|
|
|
&vuln.Metadata,
|
|
|
|
&vuln.Namespace.VersionFormat,
|
2016-02-26 10:18:45 +00:00
|
|
|
)
|
2017-07-26 23:23:54 +00:00
|
|
|
|
|
|
|
if err != nil && err != sql.ErrNoRows {
|
|
|
|
stmt.Close()
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("searchVulnerability", err)
|
2016-02-29 08:29:40 +00:00
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
vuln.Valid = id.Valid
|
|
|
|
resultVuln[i] = vuln
|
|
|
|
if id.Valid {
|
|
|
|
vulnIDMap[id.Int64] = append(vulnIDMap[id.Int64], &resultVuln[i])
|
2016-02-26 10:18:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
if err := stmt.Close(); err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("searchVulnerability", err)
|
2016-02-02 18:29:59 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
toQuery := make([]int64, 0, len(vulnIDMap))
|
|
|
|
for id := range vulnIDMap {
|
|
|
|
toQuery = append(toQuery, id)
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// load vulnerability affected features
|
|
|
|
rows, err := tx.Query(searchVulnerabilityAffected, pq.Array(toQuery))
|
2016-01-12 15:40:46 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("searchVulnerabilityAffected", err)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for rows.Next() {
|
2017-07-26 23:23:54 +00:00
|
|
|
var (
|
|
|
|
id int64
|
|
|
|
f database.AffectedFeature
|
2016-02-04 16:34:01 +00:00
|
|
|
)
|
|
|
|
|
2019-02-19 21:42:00 +00:00
|
|
|
err := rows.Scan(&id, &f.FeatureName, &f.AffectedVersion, &f.FeatureType, &f.FixedInVersion)
|
2016-01-12 15:40:46 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("searchVulnerabilityAffected", err)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
for _, vuln := range vulnIDMap[id] {
|
|
|
|
f.Namespace = vuln.Namespace
|
|
|
|
vuln.Affected = append(vuln.Affected, f)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
}
|
2016-02-04 16:34:01 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
return resultVuln, nil
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func InsertVulnerabilities(tx *sql.Tx, vulnerabilities []database.VulnerabilityWithAffected) error {
|
|
|
|
defer monitoring.ObserveQueryTime("insertVulnerabilities", "all", time.Now())
|
2017-07-26 23:23:54 +00:00
|
|
|
// bulk insert vulnerabilities
|
2019-03-06 21:34:58 +00:00
|
|
|
vulnIDs, err := insertVulnerabilities(tx, vulnerabilities)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
2016-01-24 03:02:34 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// bulk insert vulnerability affected features
|
2019-03-06 21:34:58 +00:00
|
|
|
vulnFeatureMap, err := InsertVulnerabilityAffected(tx, vulnIDs, vulnerabilities)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
2017-01-15 15:52:13 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
return CacheVulnerabiltyAffectedNamespacedFeature(tx, vulnFeatureMap)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// insertVulnerabilityAffected inserts a set of vulnerability affected features for each vulnerability provided.
|
|
|
|
//
|
|
|
|
// i_th vulnerabilityIDs corresponds to i_th vulnerabilities provided.
|
2019-03-06 21:34:58 +00:00
|
|
|
func InsertVulnerabilityAffected(tx *sql.Tx, vulnerabilityIDs []int64, vulnerabilities []database.VulnerabilityWithAffected) (map[int64]affectedFeatureRows, error) {
|
2017-07-26 23:23:54 +00:00
|
|
|
var (
|
|
|
|
vulnFeature = map[int64]affectedFeatureRows{}
|
|
|
|
affectedID int64
|
|
|
|
)
|
2016-02-02 18:29:59 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
types, err := feature.GetFeatureTypeMap(tx)
|
2019-02-19 21:42:00 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
stmt, err := tx.Prepare(insertVulnerabilityAffected)
|
2016-01-12 15:40:46 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("insertVulnerabilityAffected", err)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
defer stmt.Close()
|
|
|
|
for i, vuln := range vulnerabilities {
|
|
|
|
// affected feature row ID -> affected feature
|
|
|
|
affectedFeatures := map[int64]database.AffectedFeature{}
|
|
|
|
for _, f := range vuln.Affected {
|
2019-03-06 21:34:58 +00:00
|
|
|
err := stmt.QueryRow(vulnerabilityIDs[i], f.FeatureName, f.AffectedVersion, types.ByName[f.FeatureType], f.FixedInVersion).Scan(&affectedID)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("insertVulnerabilityAffected", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
affectedFeatures[affectedID] = f
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
vulnFeature[vulnerabilityIDs[i]] = affectedFeatureRows{rows: affectedFeatures}
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
return vulnFeature, nil
|
|
|
|
}
|
2016-02-04 16:34:01 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// insertVulnerabilities inserts a set of unique vulnerabilities into database,
|
|
|
|
// under the assumption that all vulnerabilities are valid.
|
2019-03-06 21:34:58 +00:00
|
|
|
func insertVulnerabilities(tx *sql.Tx, vulnerabilities []database.VulnerabilityWithAffected) ([]int64, error) {
|
2017-07-26 23:23:54 +00:00
|
|
|
var (
|
|
|
|
vulnID int64
|
|
|
|
vulnIDs = make([]int64, 0, len(vulnerabilities))
|
|
|
|
vulnMap = map[database.VulnerabilityID]struct{}{}
|
|
|
|
)
|
2016-02-04 16:34:01 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
for _, v := range vulnerabilities {
|
|
|
|
key := database.VulnerabilityID{
|
|
|
|
Name: v.Name,
|
|
|
|
Namespace: v.Namespace.Name,
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// Ensure uniqueness of vulnerability IDs
|
|
|
|
if _, ok := vulnMap[key]; ok {
|
|
|
|
return nil, errors.New("inserting duplicated vulnerabilities is not allowed")
|
2016-02-05 03:47:01 +00:00
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
vulnMap[key] = struct{}{}
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
//TODO(Sida): Change to bulk insert.
|
|
|
|
stmt, err := tx.Prepare(insertVulnerability)
|
2016-02-04 16:34:01 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("insertVulnerability", err)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
defer stmt.Close()
|
|
|
|
for _, vuln := range vulnerabilities {
|
|
|
|
err := stmt.QueryRow(vuln.Name, vuln.Description,
|
|
|
|
vuln.Link, &vuln.Severity, &vuln.Metadata,
|
|
|
|
vuln.Namespace.Name, vuln.Namespace.VersionFormat).Scan(&vulnID)
|
2016-02-04 22:10:19 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("insertVulnerability", err)
|
2016-02-04 22:10:19 +00:00
|
|
|
}
|
2016-01-24 03:02:51 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
vulnIDs = append(vulnIDs, vulnID)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
return vulnIDs, nil
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func LockFeatureVulnerabilityCache(tx *sql.Tx) error {
|
2017-07-26 23:23:54 +00:00
|
|
|
_, err := tx.Exec(lockVulnerabilityAffects)
|
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return util.HandleError("lockVulnerabilityAffects", err)
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
return nil
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// cacheVulnerabiltyAffectedNamespacedFeature takes in a map of vulnerability ID
|
|
|
|
// to affected feature rows and caches them.
|
2019-03-06 21:34:58 +00:00
|
|
|
func CacheVulnerabiltyAffectedNamespacedFeature(tx *sql.Tx, affected map[int64]affectedFeatureRows) error {
|
2017-07-26 23:23:54 +00:00
|
|
|
// Prevent InsertNamespacedFeatures to modify it.
|
2019-03-06 21:34:58 +00:00
|
|
|
err := LockFeatureVulnerabilityCache(tx)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
vulnIDs := []int64{}
|
|
|
|
for id := range affected {
|
|
|
|
vulnIDs = append(vulnIDs, id)
|
2016-02-02 18:29:59 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
rows, err := tx.Query(searchVulnerabilityPotentialAffected, pq.Array(vulnIDs))
|
2016-02-02 18:29:59 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return util.HandleError("searchVulnerabilityPotentialAffected", err)
|
2016-02-02 18:29:59 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
defer rows.Close()
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
relation := []affectRelation{}
|
|
|
|
for rows.Next() {
|
|
|
|
var (
|
|
|
|
vulnID int64
|
|
|
|
nsfID int64
|
|
|
|
fVersion string
|
|
|
|
addedBy int64
|
|
|
|
)
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
err := rows.Scan(&vulnID, &nsfID, &fVersion, &addedBy)
|
2016-02-02 18:29:59 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return util.HandleError("searchVulnerabilityPotentialAffected", err)
|
2016-02-02 18:29:59 +00:00
|
|
|
}
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
candidate, ok := affected[vulnID].rows[addedBy]
|
|
|
|
|
|
|
|
if !ok {
|
|
|
|
return errors.New("vulnerability affected feature not found")
|
2016-11-11 18:16:40 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
if in, err := versionfmt.InRange(candidate.Namespace.VersionFormat,
|
|
|
|
fVersion,
|
|
|
|
candidate.AffectedVersion); err == nil {
|
|
|
|
if in {
|
|
|
|
relation = append(relation,
|
|
|
|
affectRelation{
|
|
|
|
vulnerabilityID: vulnID,
|
|
|
|
namespacedFeatureID: nsfID,
|
|
|
|
addedBy: addedBy,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//TODO(Sida): Change to bulk insert.
|
|
|
|
for _, r := range relation {
|
|
|
|
result, err := tx.Exec(insertVulnerabilityAffectedNamespacedFeature, r.vulnerabilityID, r.namespacedFeatureID, r.addedBy)
|
2016-02-02 18:29:59 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return util.HandleError("insertVulnerabilityAffectedNamespacedFeature", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if num, err := result.RowsAffected(); err == nil {
|
|
|
|
if num <= 0 {
|
|
|
|
return errors.New("Nothing cached in database")
|
|
|
|
}
|
|
|
|
} else {
|
2016-02-02 18:29:59 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
log.Debugf("Cached %d features in vulnerability_affected_namespaced_feature", len(relation))
|
2016-01-12 15:40:46 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func DeleteVulnerabilities(tx *sql.Tx, vulnerabilities []database.VulnerabilityID) error {
|
|
|
|
defer monitoring.ObserveQueryTime("DeleteVulnerability", "all", time.Now())
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
vulnIDs, err := MarkVulnerabilitiesAsDeleted(tx, vulnerabilities)
|
2016-01-12 15:40:46 +00:00
|
|
|
if err != nil {
|
2017-07-26 23:23:54 +00:00
|
|
|
return err
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
if err := InvalidateVulnerabilityCache(tx, vulnIDs); err != nil {
|
2017-07-26 23:23:54 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2016-01-18 23:52:16 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func InvalidateVulnerabilityCache(tx *sql.Tx, vulnerabilityIDs []int64) error {
|
2017-07-26 23:23:54 +00:00
|
|
|
if len(vulnerabilityIDs) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
2016-01-12 15:40:46 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// Prevent InsertNamespacedFeatures to modify it.
|
2019-03-06 21:34:58 +00:00
|
|
|
err := LockFeatureVulnerabilityCache(tx)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
|
|
|
//TODO(Sida): Make a nicer interface for bulk inserting.
|
|
|
|
keys := make([]interface{}, len(vulnerabilityIDs))
|
|
|
|
for i, id := range vulnerabilityIDs {
|
|
|
|
keys[i] = id
|
2016-01-12 15:40:46 +00:00
|
|
|
}
|
2016-01-18 23:52:16 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
_, err = tx.Exec(queryInvalidateVulnerabilityCache(len(vulnerabilityIDs)), keys...)
|
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return util.HandleError("removeVulnerabilityAffectedFeature", err)
|
2016-01-18 23:52:16 +00:00
|
|
|
}
|
2016-01-12 15:40:46 +00:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2016-01-28 18:35:07 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func MarkVulnerabilitiesAsDeleted(tx *sql.Tx, vulnerabilities []database.VulnerabilityID) ([]int64, error) {
|
2017-07-26 23:23:54 +00:00
|
|
|
var (
|
|
|
|
vulnID sql.NullInt64
|
|
|
|
vulnIDs []int64
|
|
|
|
)
|
2016-02-04 16:34:01 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// mark vulnerabilities deleted
|
|
|
|
stmt, err := tx.Prepare(removeVulnerability)
|
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("removeVulnerability", err)
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
defer stmt.Close()
|
|
|
|
for _, vuln := range vulnerabilities {
|
|
|
|
err := stmt.QueryRow(vuln.Namespace, vuln.Name).Scan(&vulnID)
|
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("removeVulnerability", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
if !vulnID.Valid {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("removeVulnerability", errors.New("Vulnerability to be removed is not in database"))
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
vulnIDs = append(vulnIDs, vulnID.Int64)
|
|
|
|
}
|
|
|
|
return vulnIDs, nil
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
// findLatestDeletedVulnerabilityIDs requires all elements in vulnIDs are in
|
|
|
|
// database and the order of output array is not guaranteed.
|
2019-03-06 21:34:58 +00:00
|
|
|
func FindLatestDeletedVulnerabilityIDs(tx *sql.Tx, vulnIDs []database.VulnerabilityID) ([]sql.NullInt64, error) {
|
|
|
|
return FindVulnerabilityIDs(tx, vulnIDs, true)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2016-02-04 16:34:01 +00:00
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func FindNotDeletedVulnerabilityIDs(tx *sql.Tx, vulnIDs []database.VulnerabilityID) ([]sql.NullInt64, error) {
|
|
|
|
return FindVulnerabilityIDs(tx, vulnIDs, false)
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 21:34:58 +00:00
|
|
|
func FindVulnerabilityIDs(tx *sql.Tx, vulnIDs []database.VulnerabilityID, withLatestDeleted bool) ([]sql.NullInt64, error) {
|
2017-07-26 23:23:54 +00:00
|
|
|
if len(vulnIDs) == 0 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2016-01-24 03:02:34 +00:00
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
vulnIDMap := map[database.VulnerabilityID]sql.NullInt64{}
|
|
|
|
keys := make([]interface{}, len(vulnIDs)*2)
|
|
|
|
for i, vulnID := range vulnIDs {
|
|
|
|
keys[i*2] = vulnID.Name
|
|
|
|
keys[i*2+1] = vulnID.Namespace
|
|
|
|
vulnIDMap[vulnID] = sql.NullInt64{}
|
2016-02-04 16:34:01 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
query := ""
|
|
|
|
if withLatestDeleted {
|
|
|
|
query = querySearchLastDeletedVulnerabilityID(len(vulnIDs))
|
|
|
|
} else {
|
|
|
|
query = querySearchNotDeletedVulnerabilityID(len(vulnIDs))
|
2016-01-28 18:35:07 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
rows, err := tx.Query(query, keys...)
|
2016-01-28 18:35:07 +00:00
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("querySearchVulnerabilityID.LatestDeleted.Query", err)
|
2016-01-28 18:35:07 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
defer rows.Close()
|
|
|
|
var (
|
|
|
|
id sql.NullInt64
|
|
|
|
vulnID database.VulnerabilityID
|
|
|
|
)
|
|
|
|
for rows.Next() {
|
|
|
|
err := rows.Scan(&id, &vulnID.Name, &vulnID.Namespace)
|
|
|
|
if err != nil {
|
2019-03-06 21:34:58 +00:00
|
|
|
return nil, util.HandleError("querySearchVulnerabilityID.LatestDeleted.Scan", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
vulnIDMap[vulnID] = id
|
2016-01-28 18:35:07 +00:00
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
ids := make([]sql.NullInt64, len(vulnIDs))
|
|
|
|
for i, v := range vulnIDs {
|
|
|
|
ids[i] = vulnIDMap[v]
|
|
|
|
}
|
|
|
|
|
|
|
|
return ids, nil
|
2016-01-28 18:35:07 +00:00
|
|
|
}
|
2019-03-06 21:34:58 +00:00
|
|
|
|
|
|
|
func FindPagedVulnerableAncestries(tx *sql.Tx, vulnID int64, limit int, currentToken pagination.Token, key pagination.Key) (database.PagedVulnerableAncestries, error) {
|
|
|
|
vulnPage := database.PagedVulnerableAncestries{Limit: limit}
|
|
|
|
currentPage := page.Page{0}
|
|
|
|
if currentToken != pagination.FirstPageToken {
|
|
|
|
if err := key.UnmarshalToken(currentToken, ¤tPage); err != nil {
|
|
|
|
return vulnPage, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := tx.QueryRow(searchVulnerabilityByID, vulnID).Scan(
|
|
|
|
&vulnPage.Name,
|
|
|
|
&vulnPage.Description,
|
|
|
|
&vulnPage.Link,
|
|
|
|
&vulnPage.Severity,
|
|
|
|
&vulnPage.Metadata,
|
|
|
|
&vulnPage.Namespace.Name,
|
|
|
|
&vulnPage.Namespace.VersionFormat,
|
|
|
|
); err != nil {
|
|
|
|
return vulnPage, util.HandleError("searchVulnerabilityByID", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// the last result is used for the next page's startID
|
|
|
|
rows, err := tx.Query(searchNotificationVulnerableAncestry, vulnID, currentPage.StartID, limit+1)
|
|
|
|
if err != nil {
|
|
|
|
return vulnPage, util.HandleError("searchNotificationVulnerableAncestry", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
ancestries := []affectedAncestry{}
|
|
|
|
for rows.Next() {
|
|
|
|
var ancestry affectedAncestry
|
|
|
|
err := rows.Scan(&ancestry.id, &ancestry.name)
|
|
|
|
if err != nil {
|
|
|
|
return vulnPage, util.HandleError("searchNotificationVulnerableAncestry", err)
|
|
|
|
}
|
|
|
|
ancestries = append(ancestries, ancestry)
|
|
|
|
}
|
|
|
|
|
|
|
|
lastIndex := 0
|
|
|
|
if len(ancestries)-1 < limit {
|
|
|
|
lastIndex = len(ancestries)
|
|
|
|
vulnPage.End = true
|
|
|
|
} else {
|
|
|
|
// Use the last ancestry's ID as the next page.
|
|
|
|
lastIndex = len(ancestries) - 1
|
|
|
|
vulnPage.Next, err = key.MarshalToken(page.Page{ancestries[len(ancestries)-1].id})
|
|
|
|
if err != nil {
|
|
|
|
return vulnPage, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
vulnPage.Affected = map[int]string{}
|
|
|
|
for _, ancestry := range ancestries[0:lastIndex] {
|
|
|
|
vulnPage.Affected[int(ancestry.id)] = ancestry.name
|
|
|
|
}
|
|
|
|
|
|
|
|
vulnPage.Current, err = key.MarshalToken(currentPage)
|
|
|
|
if err != nil {
|
|
|
|
return vulnPage, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return vulnPage, nil
|
|
|
|
}
|