2019-03-05 16:42:59 +00:00
|
|
|
// Copyright 2019 clair authors
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
package pgsql
|
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
|
|
|
"errors"
|
|
|
|
|
|
|
|
log "github.com/sirupsen/logrus"
|
|
|
|
|
|
|
|
"github.com/coreos/clair/database"
|
|
|
|
"github.com/coreos/clair/pkg/commonerr"
|
|
|
|
)
|
|
|
|
|
2018-09-19 19:38:07 +00:00
|
|
|
const (
|
|
|
|
insertAncestry = `
|
|
|
|
INSERT INTO ancestry (name) VALUES ($1) RETURNING id`
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
findAncestryLayerHashes = `
|
|
|
|
SELECT layer.hash, ancestry_layer.ancestry_index
|
2018-09-19 19:38:07 +00:00
|
|
|
FROM layer, ancestry_layer
|
|
|
|
WHERE ancestry_layer.ancestry_id = $1
|
|
|
|
AND ancestry_layer.layer_id = layer.id
|
|
|
|
ORDER BY ancestry_layer.ancestry_index ASC`
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
findAncestryFeatures = `
|
|
|
|
SELECT namespace.name, namespace.version_format, feature.name,
|
2019-02-19 21:42:00 +00:00
|
|
|
feature.version, feature.version_format, feature_type.name, ancestry_layer.ancestry_index,
|
2018-10-08 15:11:30 +00:00
|
|
|
ancestry_feature.feature_detector_id, ancestry_feature.namespace_detector_id
|
2019-02-19 21:42:00 +00:00
|
|
|
FROM namespace, feature, feature_type, namespaced_feature, ancestry_layer, ancestry_feature
|
2018-09-19 19:38:07 +00:00
|
|
|
WHERE ancestry_layer.ancestry_id = $1
|
2019-02-19 21:42:00 +00:00
|
|
|
AND feature_type.id = feature.type
|
2018-09-19 19:38:07 +00:00
|
|
|
AND ancestry_feature.ancestry_layer_id = ancestry_layer.id
|
|
|
|
AND ancestry_feature.namespaced_feature_id = namespaced_feature.id
|
|
|
|
AND namespaced_feature.feature_id = feature.id
|
|
|
|
AND namespaced_feature.namespace_id = namespace.id`
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
findAncestryID = `SELECT id FROM ancestry WHERE name = $1`
|
|
|
|
removeAncestry = `DELETE FROM ancestry WHERE name = $1`
|
|
|
|
insertAncestryLayers = `
|
|
|
|
INSERT INTO ancestry_layer (ancestry_id, ancestry_index, layer_id) VALUES ($1, $2, $3)
|
2018-09-19 19:38:07 +00:00
|
|
|
RETURNING id`
|
2018-10-08 15:11:30 +00:00
|
|
|
insertAncestryFeatures = `
|
2018-09-19 19:38:07 +00:00
|
|
|
INSERT INTO ancestry_feature
|
|
|
|
(ancestry_layer_id, namespaced_feature_id, feature_detector_id, namespace_detector_id) VALUES
|
|
|
|
($1, $2, $3, $4)`
|
|
|
|
)
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) FindAncestry(name string) (database.Ancestry, bool, error) {
|
|
|
|
var (
|
|
|
|
ancestry = database.Ancestry{Name: name}
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
|
|
|
|
id, ok, err := tx.findAncestryID(name)
|
|
|
|
if !ok || err != nil {
|
|
|
|
return ancestry, ok, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if ancestry.By, err = tx.findAncestryDetectors(id); err != nil {
|
|
|
|
return ancestry, false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if ancestry.Layers, err = tx.findAncestryLayers(id); err != nil {
|
|
|
|
return ancestry, false, err
|
|
|
|
}
|
2018-09-07 15:31:35 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return ancestry, true, nil
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (tx *pgSession) UpsertAncestry(ancestry database.Ancestry) error {
|
2018-10-08 15:11:30 +00:00
|
|
|
if !ancestry.Valid() {
|
|
|
|
return database.ErrInvalidParameters
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if err := tx.removeAncestry(ancestry.Name); err != nil {
|
|
|
|
return err
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
id, err := tx.insertAncestry(ancestry.Name)
|
|
|
|
if err != nil {
|
2017-07-26 23:23:54 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
detectorIDs, err := tx.findDetectorIDs(ancestry.By)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
// insert ancestry metadata
|
|
|
|
if err := tx.insertAncestryDetectors(id, detectorIDs); err != nil {
|
2017-07-26 23:23:54 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layers := make([]string, 0, len(ancestry.Layers))
|
|
|
|
for _, layer := range ancestry.Layers {
|
|
|
|
layers = append(layers, layer.Hash)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layerIDs, ok, err := tx.findLayerIDs(layers)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if !ok {
|
|
|
|
log.Error("layer cannot be found, this indicates that the internal logic of calling UpsertAncestry is wrong or the database is corrupted.")
|
|
|
|
return database.ErrMissingEntities
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
ancestryLayerIDs, err := tx.insertAncestryLayers(id, layerIDs)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
for i, id := range ancestryLayerIDs {
|
|
|
|
if err := tx.insertAncestryFeatures(id, ancestry.Layers[i]); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return nil
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) insertAncestry(name string) (int64, error) {
|
|
|
|
var id int64
|
|
|
|
err := tx.QueryRow(insertAncestry, name).Scan(&id)
|
|
|
|
if err != nil {
|
|
|
|
if isErrUniqueViolation(err) {
|
|
|
|
return 0, handleError("insertAncestry", errors.New("other Go-routine is processing this ancestry (skip)"))
|
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return 0, handleError("insertAncestry", err)
|
2018-09-05 15:34:49 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return id, nil
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) findAncestryID(name string) (int64, bool, error) {
|
|
|
|
var id sql.NullInt64
|
|
|
|
if err := tx.QueryRow(findAncestryID, name).Scan(&id); err != nil {
|
|
|
|
if err == sql.ErrNoRows {
|
|
|
|
return 0, false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0, false, handleError("findAncestryID", err)
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return id.Int64, true, nil
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) removeAncestry(name string) error {
|
2017-07-26 23:23:54 +00:00
|
|
|
result, err := tx.Exec(removeAncestry, name)
|
|
|
|
if err != nil {
|
|
|
|
return handleError("removeAncestry", err)
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
affected, err := result.RowsAffected()
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
|
|
|
return handleError("removeAncestry", err)
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if affected != 0 {
|
|
|
|
log.WithField("ancestry", name).Debug("removed ancestry")
|
|
|
|
}
|
|
|
|
|
2017-07-26 23:23:54 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) findAncestryLayers(id int64) ([]database.AncestryLayer, error) {
|
|
|
|
detectors, err := tx.findAllDetectors()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2018-09-07 15:31:35 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layerMap, err := tx.findAncestryLayerHashes(id)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
2018-10-08 15:11:30 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
featureMap, err := tx.findAncestryFeatures(id, detectors)
|
|
|
|
if err != nil {
|
2018-09-07 15:31:35 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layers := make([]database.AncestryLayer, len(layerMap))
|
|
|
|
for index, layer := range layerMap {
|
|
|
|
// index MUST match the ancestry layer slice index.
|
|
|
|
if layers[index].Hash == "" && len(layers[index].Features) == 0 {
|
|
|
|
layers[index] = database.AncestryLayer{
|
|
|
|
Hash: layer,
|
|
|
|
Features: featureMap[index],
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.WithFields(log.Fields{
|
|
|
|
"ancestry ID": id,
|
|
|
|
"duplicated ancestry index": index,
|
|
|
|
}).WithError(database.ErrInconsistent).Error("ancestry layers with same ancestry_index is not allowed")
|
|
|
|
return nil, database.ErrInconsistent
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return layers, nil
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) findAncestryLayerHashes(ancestryID int64) (map[int64]string, error) {
|
|
|
|
// retrieve layer indexes and hashes
|
|
|
|
rows, err := tx.Query(findAncestryLayerHashes, ancestryID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, handleError("findAncestryLayerHashes", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layerHashes := map[int64]string{}
|
2017-07-26 23:23:54 +00:00
|
|
|
for rows.Next() {
|
2018-09-07 15:31:35 +00:00
|
|
|
var (
|
2018-10-08 15:11:30 +00:00
|
|
|
hash string
|
|
|
|
index int64
|
2018-09-07 15:31:35 +00:00
|
|
|
)
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if err = rows.Scan(&hash, &index); err != nil {
|
|
|
|
return nil, handleError("findAncestryLayerHashes", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if _, ok := layerHashes[index]; ok {
|
2018-09-07 15:31:35 +00:00
|
|
|
// one ancestry index should correspond to only one layer
|
|
|
|
return nil, database.ErrInconsistent
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
layerHashes[index] = hash
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return layerHashes, nil
|
|
|
|
}
|
2018-09-07 15:31:35 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) findAncestryFeatures(ancestryID int64, detectors detectorMap) (map[int64][]database.AncestryFeature, error) {
|
|
|
|
// ancestry_index -> ancestry features
|
|
|
|
featureMap := make(map[int64][]database.AncestryFeature)
|
2018-09-07 15:31:35 +00:00
|
|
|
// retrieve ancestry layer's namespaced features
|
2018-10-08 15:11:30 +00:00
|
|
|
rows, err := tx.Query(findAncestryFeatures, ancestryID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, handleError("findAncestryFeatures", err)
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
defer rows.Close()
|
|
|
|
|
2018-09-07 15:31:35 +00:00
|
|
|
for rows.Next() {
|
|
|
|
var (
|
2018-10-08 15:11:30 +00:00
|
|
|
featureDetectorID int64
|
|
|
|
namespaceDetectorID int64
|
|
|
|
feature database.NamespacedFeature
|
2018-09-07 15:31:35 +00:00
|
|
|
// index is used to determine which layer the feature belongs to.
|
|
|
|
index sql.NullInt64
|
|
|
|
)
|
|
|
|
|
|
|
|
if err := rows.Scan(
|
|
|
|
&feature.Namespace.Name,
|
|
|
|
&feature.Namespace.VersionFormat,
|
|
|
|
&feature.Feature.Name,
|
|
|
|
&feature.Feature.Version,
|
|
|
|
&feature.Feature.VersionFormat,
|
2019-02-19 21:42:00 +00:00
|
|
|
&feature.Feature.Type,
|
2018-09-07 15:31:35 +00:00
|
|
|
&index,
|
2018-10-08 15:11:30 +00:00
|
|
|
&featureDetectorID,
|
|
|
|
&namespaceDetectorID,
|
2018-09-07 15:31:35 +00:00
|
|
|
); err != nil {
|
2018-10-08 15:11:30 +00:00
|
|
|
return nil, handleError("findAncestryFeatures", err)
|
2018-09-07 15:31:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if feature.Feature.VersionFormat != feature.Namespace.VersionFormat {
|
|
|
|
// Feature must have the same version format as the associated
|
|
|
|
// namespace version format.
|
|
|
|
return nil, database.ErrInconsistent
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
fDetector, ok := detectors.byID[featureDetectorID]
|
|
|
|
if !ok {
|
|
|
|
return nil, database.ErrInconsistent
|
|
|
|
}
|
|
|
|
|
|
|
|
nsDetector, ok := detectors.byID[namespaceDetectorID]
|
|
|
|
if !ok {
|
|
|
|
return nil, database.ErrInconsistent
|
|
|
|
}
|
2018-09-07 15:31:35 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
featureMap[index.Int64] = append(featureMap[index.Int64], database.AncestryFeature{
|
|
|
|
NamespacedFeature: feature,
|
|
|
|
FeatureBy: fDetector,
|
|
|
|
NamespaceBy: nsDetector,
|
|
|
|
})
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return featureMap, nil
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-09-05 15:34:49 +00:00
|
|
|
// insertAncestryLayers inserts the ancestry layers along with its content into
|
|
|
|
// the database. The layers are 0 based indexed in the original order.
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) insertAncestryLayers(ancestryID int64, layers []int64) ([]int64, error) {
|
|
|
|
stmt, err := tx.Prepare(insertAncestryLayers)
|
2017-07-26 23:23:54 +00:00
|
|
|
if err != nil {
|
2018-10-08 15:11:30 +00:00
|
|
|
return nil, handleError("insertAncestryLayers", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
ancestryLayerIDs := []int64{}
|
|
|
|
for index, layerID := range layers {
|
2018-09-05 15:34:49 +00:00
|
|
|
var ancestryLayerID sql.NullInt64
|
2018-10-08 15:11:30 +00:00
|
|
|
if err := stmt.QueryRow(ancestryID, index, layerID).Scan(&ancestryLayerID); err != nil {
|
|
|
|
return nil, handleError("insertAncestryLayers", commonerr.CombineErrors(err, stmt.Close()))
|
|
|
|
}
|
|
|
|
|
|
|
|
if !ancestryLayerID.Valid {
|
|
|
|
return nil, database.ErrInconsistent
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
ancestryLayerIDs = append(ancestryLayerIDs, ancestryLayerID.Int64)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-09-05 15:34:49 +00:00
|
|
|
if err := stmt.Close(); err != nil {
|
2018-10-08 15:11:30 +00:00
|
|
|
return nil, handleError("insertAncestryLayers", err)
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
return ancestryLayerIDs, nil
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
func (tx *pgSession) insertAncestryFeatures(ancestryLayerID int64, layer database.AncestryLayer) error {
|
|
|
|
detectors, err := tx.findAllDetectors()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
nsFeatureIDs, err := tx.findNamespacedFeatureIDs(layer.GetFeatures())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// find the detectors for each feature
|
|
|
|
stmt, err := tx.Prepare(insertAncestryFeatures)
|
|
|
|
if err != nil {
|
|
|
|
return handleError("insertAncestryFeatures", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
defer stmt.Close()
|
|
|
|
|
|
|
|
for index, id := range nsFeatureIDs {
|
2019-02-25 22:50:40 +00:00
|
|
|
if !id.Valid {
|
|
|
|
return database.ErrMissingEntities
|
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
namespaceDetectorID, ok := detectors.byValue[layer.Features[index].NamespaceBy]
|
|
|
|
if !ok {
|
|
|
|
return database.ErrMissingEntities
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
featureDetectorID, ok := detectors.byValue[layer.Features[index].FeatureBy]
|
|
|
|
if !ok {
|
|
|
|
return database.ErrMissingEntities
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
2018-09-05 15:34:49 +00:00
|
|
|
|
2018-10-08 15:11:30 +00:00
|
|
|
if _, err := stmt.Exec(ancestryLayerID, id, featureDetectorID, namespaceDetectorID); err != nil {
|
|
|
|
return handleError("insertAncestryFeatures", commonerr.CombineErrors(err, stmt.Close()))
|
|
|
|
}
|
2017-07-26 23:23:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|