*: add metadata support along with NVD CVSS
This commit is contained in:
parent
c05848e32d
commit
5fdd9d1a07
@ -63,6 +63,7 @@ func LayerFromDatabaseModel(dbLayer database.Layer, withFeatures, withVulnerabil
|
|||||||
Namespace: dbVuln.Namespace.Name,
|
Namespace: dbVuln.Namespace.Name,
|
||||||
Description: dbVuln.Description,
|
Description: dbVuln.Description,
|
||||||
Severity: string(dbVuln.Severity),
|
Severity: string(dbVuln.Severity),
|
||||||
|
Metadata: dbVuln.Metadata,
|
||||||
}
|
}
|
||||||
|
|
||||||
if dbVuln.FixedBy != types.MaxVersion {
|
if dbVuln.FixedBy != types.MaxVersion {
|
||||||
@ -78,13 +79,14 @@ func LayerFromDatabaseModel(dbLayer database.Layer, withFeatures, withVulnerabil
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Vulnerability struct {
|
type Vulnerability struct {
|
||||||
Name string `json:"Name,omitempty"`
|
Name string `json:"Name,omitempty"`
|
||||||
Namespace string `json:"Namespace,omitempty"`
|
Namespace string `json:"Namespace,omitempty"`
|
||||||
Description string `json:"Description,omitempty"`
|
Description string `json:"Description,omitempty"`
|
||||||
Link string `json:"Link,omitempty"`
|
Link string `json:"Link,omitempty"`
|
||||||
Severity string `json:"Severity,omitempty"`
|
Severity string `json:"Severity,omitempty"`
|
||||||
FixedBy string `json:"FixedBy,omitempty"`
|
Metadata map[string]interface{} `json:"Metadata,omitempty"`
|
||||||
FixedIn []Feature `json:"FixedIn,omitempty"`
|
FixedBy string `json:"FixedBy,omitempty"`
|
||||||
|
FixedIn []Feature `json:"FixedIn,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v Vulnerability) DatabaseModel() (database.Vulnerability, error) {
|
func (v Vulnerability) DatabaseModel() (database.Vulnerability, error) {
|
||||||
@ -115,6 +117,7 @@ func (v Vulnerability) DatabaseModel() (database.Vulnerability, error) {
|
|||||||
Description: v.Description,
|
Description: v.Description,
|
||||||
Link: v.Link,
|
Link: v.Link,
|
||||||
Severity: severity,
|
Severity: severity,
|
||||||
|
Metadata: v.Metadata,
|
||||||
FixedIn: dbFeatures,
|
FixedIn: dbFeatures,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
@ -126,6 +129,7 @@ func VulnerabilityFromDatabaseModel(dbVuln database.Vulnerability, withFixedIn b
|
|||||||
Description: dbVuln.Description,
|
Description: dbVuln.Description,
|
||||||
Link: dbVuln.Link,
|
Link: dbVuln.Link,
|
||||||
Severity: string(dbVuln.Severity),
|
Severity: string(dbVuln.Severity),
|
||||||
|
Metadata: dbVuln.Metadata,
|
||||||
}
|
}
|
||||||
|
|
||||||
if withFixedIn {
|
if withFixedIn {
|
||||||
|
@ -31,6 +31,7 @@ import (
|
|||||||
_ "github.com/coreos/clair/updater/fetchers/debian"
|
_ "github.com/coreos/clair/updater/fetchers/debian"
|
||||||
_ "github.com/coreos/clair/updater/fetchers/rhel"
|
_ "github.com/coreos/clair/updater/fetchers/rhel"
|
||||||
_ "github.com/coreos/clair/updater/fetchers/ubuntu"
|
_ "github.com/coreos/clair/updater/fetchers/ubuntu"
|
||||||
|
_ "github.com/coreos/clair/updater/metadata_fetchers/nvd"
|
||||||
|
|
||||||
_ "github.com/coreos/clair/worker/detectors/data/aci"
|
_ "github.com/coreos/clair/worker/detectors/data/aci"
|
||||||
_ "github.com/coreos/clair/worker/detectors/data/docker"
|
_ "github.com/coreos/clair/worker/detectors/data/docker"
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql/driver"
|
||||||
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/coreos/clair/utils/types"
|
"github.com/coreos/clair/utils/types"
|
||||||
@ -65,6 +67,8 @@ type Vulnerability struct {
|
|||||||
Link string
|
Link string
|
||||||
Severity types.Priority
|
Severity types.Priority
|
||||||
|
|
||||||
|
Metadata MetadataMap
|
||||||
|
|
||||||
FixedIn []FeatureVersion
|
FixedIn []FeatureVersion
|
||||||
LayersIntroducingVulnerability []Layer
|
LayersIntroducingVulnerability []Layer
|
||||||
|
|
||||||
@ -73,6 +77,21 @@ type Vulnerability struct {
|
|||||||
FixedBy types.Version `json:",omitempty"`
|
FixedBy types.Version `json:",omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetadataMap map[string]interface{}
|
||||||
|
|
||||||
|
func (mm *MetadataMap) Scan(value interface{}) error {
|
||||||
|
val, ok := value.([]byte)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return json.Unmarshal(val, mm)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm *MetadataMap) Value() (driver.Value, error) {
|
||||||
|
json, err := json.Marshal(*mm)
|
||||||
|
return string(json), err
|
||||||
|
}
|
||||||
|
|
||||||
type VulnerabilityNotification struct {
|
type VulnerabilityNotification struct {
|
||||||
Model
|
Model
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ func (pgSQL *pgSQL) loadAffectedBy(featureVersions []database.FeatureVersion) er
|
|||||||
var vulnerability database.Vulnerability
|
var vulnerability database.Vulnerability
|
||||||
err := rows.Scan(&featureversionID, &vulnerability.ID, &vulnerability.Name,
|
err := rows.Scan(&featureversionID, &vulnerability.ID, &vulnerability.Name,
|
||||||
&vulnerability.Description, &vulnerability.Link, &vulnerability.Severity,
|
&vulnerability.Description, &vulnerability.Link, &vulnerability.Severity,
|
||||||
&vulnerability.Namespace.Name, &vulnerability.FixedBy)
|
&vulnerability.Metadata, &vulnerability.Namespace.Name, &vulnerability.FixedBy)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return handleError("s_featureversions_vulnerabilities.Scan()", err)
|
return handleError("s_featureversions_vulnerabilities.Scan()", err)
|
||||||
}
|
}
|
||||||
|
@ -88,6 +88,7 @@ CREATE TABLE IF NOT EXISTS Vulnerability (
|
|||||||
description TEXT NULL,
|
description TEXT NULL,
|
||||||
link VARCHAR(128) NULL,
|
link VARCHAR(128) NULL,
|
||||||
severity severity NOT NULL,
|
severity severity NOT NULL,
|
||||||
|
metadata TEXT NULL,
|
||||||
|
|
||||||
UNIQUE (namespace_id, name));
|
UNIQUE (namespace_id, name));
|
||||||
|
|
||||||
|
@ -104,8 +104,8 @@ func init() {
|
|||||||
ORDER BY ltree.ordering`
|
ORDER BY ltree.ordering`
|
||||||
|
|
||||||
queries["s_featureversions_vulnerabilities"] = `
|
queries["s_featureversions_vulnerabilities"] = `
|
||||||
SELECT vafv.featureversion_id, v.id, v.name, v.description, v.link, v.severity, vn.name,
|
SELECT vafv.featureversion_id, v.id, v.name, v.description, v.link, v.severity, v.metadata,
|
||||||
vfif.version
|
vn.name, vfif.version
|
||||||
FROM Vulnerability_Affects_FeatureVersion vafv, Vulnerability v,
|
FROM Vulnerability_Affects_FeatureVersion vafv, Vulnerability v,
|
||||||
Namespace vn, Vulnerability_FixedIn_Feature vfif
|
Namespace vn, Vulnerability_FixedIn_Feature vfif
|
||||||
WHERE vafv.featureversion_id = ANY($1::integer[])
|
WHERE vafv.featureversion_id = ANY($1::integer[])
|
||||||
@ -144,7 +144,7 @@ func init() {
|
|||||||
|
|
||||||
// vulnerability.go
|
// vulnerability.go
|
||||||
queries["f_vulnerability"] = `
|
queries["f_vulnerability"] = `
|
||||||
SELECT v.id, n.id, v.description, v.link, v.severity, vfif.version, f.id, f.Name
|
SELECT v.id, n.id, v.description, v.link, v.severity, v.metadata, vfif.version, f.id, f.Name
|
||||||
FROM Vulnerability v
|
FROM Vulnerability v
|
||||||
JOIN Namespace n ON v.namespace_id = n.id
|
JOIN Namespace n ON v.namespace_id = n.id
|
||||||
LEFT JOIN Vulnerability_FixedIn_Feature vfif ON v.id = vfif.vulnerability_id
|
LEFT JOIN Vulnerability_FixedIn_Feature vfif ON v.id = vfif.vulnerability_id
|
||||||
@ -152,12 +152,14 @@ func init() {
|
|||||||
WHERE n.Name = $1 AND v.Name = $2`
|
WHERE n.Name = $1 AND v.Name = $2`
|
||||||
|
|
||||||
queries["i_vulnerability"] = `
|
queries["i_vulnerability"] = `
|
||||||
INSERT INTO Vulnerability(namespace_id, name, description, link, severity)
|
INSERT INTO Vulnerability(namespace_id, name, description, link, severity, metadata)
|
||||||
VALUES($1, $2, $3, $4, $5)
|
VALUES($1, $2, $3, $4, $5, $6)
|
||||||
RETURNING id`
|
RETURNING id`
|
||||||
|
|
||||||
queries["u_vulnerability"] = `
|
queries["u_vulnerability"] = `
|
||||||
UPDATE Vulnerability SET description = $2, link = $3, severity = $4 WHERE id = $1`
|
UPDATE Vulnerability
|
||||||
|
SET description = $2, link = $3, severity = $4, metadata = $5
|
||||||
|
WHERE id = $1`
|
||||||
|
|
||||||
queries["i_vulnerability_fixedin_feature"] = `
|
queries["i_vulnerability_fixedin_feature"] = `
|
||||||
INSERT INTO Vulnerability_FixedIn_Feature(vulnerability_id, feature_id, version)
|
INSERT INTO Vulnerability_FixedIn_Feature(vulnerability_id, feature_id, version)
|
||||||
|
@ -16,7 +16,9 @@ package pgsql
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"reflect"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/coreos/clair/database"
|
"github.com/coreos/clair/database"
|
||||||
@ -50,8 +52,8 @@ func (pgSQL *pgSQL) FindVulnerability(namespaceName, name string) (database.Vuln
|
|||||||
var featureVersionFeatureName zero.String
|
var featureVersionFeatureName zero.String
|
||||||
|
|
||||||
err := rows.Scan(&vulnerability.ID, &vulnerability.Namespace.ID, &vulnerability.Description,
|
err := rows.Scan(&vulnerability.ID, &vulnerability.Namespace.ID, &vulnerability.Description,
|
||||||
&vulnerability.Link, &vulnerability.Severity, &featureVersionVersion, &featureVersionID,
|
&vulnerability.Link, &vulnerability.Severity, &vulnerability.Metadata,
|
||||||
&featureVersionFeatureName)
|
&featureVersionVersion, &featureVersionID, &featureVersionFeatureName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return vulnerability, handleError("f_vulnerability.Scan()", err)
|
return vulnerability, handleError("f_vulnerability.Scan()", err)
|
||||||
}
|
}
|
||||||
@ -139,6 +141,7 @@ func (pgSQL *pgSQL) insertVulnerability(vulnerability database.Vulnerability) er
|
|||||||
if vulnerability.Description == existingVulnerability.Description &&
|
if vulnerability.Description == existingVulnerability.Description &&
|
||||||
vulnerability.Link == existingVulnerability.Link &&
|
vulnerability.Link == existingVulnerability.Link &&
|
||||||
vulnerability.Severity == existingVulnerability.Severity &&
|
vulnerability.Severity == existingVulnerability.Severity &&
|
||||||
|
reflect.DeepEqual(castMetadata(vulnerability.Metadata), existingVulnerability.Metadata) &&
|
||||||
len(newFixedInFeatureVersions) == 0 &&
|
len(newFixedInFeatureVersions) == 0 &&
|
||||||
len(updatedFixedInFeatureVersions) == 0 {
|
len(updatedFixedInFeatureVersions) == 0 {
|
||||||
|
|
||||||
@ -191,7 +194,8 @@ func (pgSQL *pgSQL) insertVulnerability(vulnerability database.Vulnerability) er
|
|||||||
if existingVulnerability.ID == 0 {
|
if existingVulnerability.ID == 0 {
|
||||||
// Insert new vulnerability.
|
// Insert new vulnerability.
|
||||||
err = tx.QueryRow(getQuery("i_vulnerability"), namespaceID, vulnerability.Name,
|
err = tx.QueryRow(getQuery("i_vulnerability"), namespaceID, vulnerability.Name,
|
||||||
vulnerability.Description, vulnerability.Link, &vulnerability.Severity).Scan(&vulnerability.ID)
|
vulnerability.Description, vulnerability.Link, &vulnerability.Severity,
|
||||||
|
&vulnerability.Metadata).Scan(&vulnerability.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
tx.Rollback()
|
tx.Rollback()
|
||||||
return handleError("i_vulnerability", err)
|
return handleError("i_vulnerability", err)
|
||||||
@ -202,7 +206,8 @@ func (pgSQL *pgSQL) insertVulnerability(vulnerability database.Vulnerability) er
|
|||||||
vulnerability.Link != existingVulnerability.Link ||
|
vulnerability.Link != existingVulnerability.Link ||
|
||||||
vulnerability.Severity != existingVulnerability.Severity {
|
vulnerability.Severity != existingVulnerability.Severity {
|
||||||
_, err = tx.Exec(getQuery("u_vulnerability"), existingVulnerability.ID,
|
_, err = tx.Exec(getQuery("u_vulnerability"), existingVulnerability.ID,
|
||||||
vulnerability.Description, vulnerability.Link, &vulnerability.Severity)
|
vulnerability.Description, vulnerability.Link, &vulnerability.Severity,
|
||||||
|
&vulnerability.Metadata)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
tx.Rollback()
|
tx.Rollback()
|
||||||
return handleError("u_vulnerability", err)
|
return handleError("u_vulnerability", err)
|
||||||
@ -244,6 +249,17 @@ func (pgSQL *pgSQL) insertVulnerability(vulnerability database.Vulnerability) er
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// castMetadata marshals the given database.MetadataMap and unmarshals it again to make sure that
|
||||||
|
// everything has the interface{} type.
|
||||||
|
// It is required when comparing crafted MetadataMap against MetadataMap that we get from the
|
||||||
|
// database.
|
||||||
|
func castMetadata(m database.MetadataMap) database.MetadataMap {
|
||||||
|
c := make(database.MetadataMap)
|
||||||
|
j, _ := json.Marshal(m)
|
||||||
|
json.Unmarshal(j, &c)
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
func diffFixedIn(vulnerability, existingVulnerability database.Vulnerability) (newFixedIn, updatedFixedIn []database.FeatureVersion) {
|
func diffFixedIn(vulnerability, existingVulnerability database.Vulnerability) (newFixedIn, updatedFixedIn []database.FeatureVersion) {
|
||||||
// Build FeatureVersion.Feature.Namespace.Name:FeatureVersion.Feature.Name (NaN) structures.
|
// Build FeatureVersion.Feature.Namespace.Name:FeatureVersion.Feature.Name (NaN) structures.
|
||||||
vulnerabilityFixedInNameMap, vulnerabilityFixedInNameSlice := createFeatureVersionNameMap(vulnerability.FixedIn)
|
vulnerabilityFixedInNameMap, vulnerabilityFixedInNameSlice := createFeatureVersionNameMap(vulnerability.FixedIn)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
package pgsql
|
package pgsql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/coreos/clair/database"
|
"github.com/coreos/clair/database"
|
||||||
@ -195,6 +196,14 @@ func TestInsertVulnerability(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Insert a simple vulnerability and find it.
|
// Insert a simple vulnerability and find it.
|
||||||
|
v1meta := make(map[string]interface{})
|
||||||
|
v1meta["TestInsertVulnerabilityMetadata1"] = "TestInsertVulnerabilityMetadataValue1"
|
||||||
|
v1meta["TestInsertVulnerabilityMetadata2"] = struct {
|
||||||
|
Test string
|
||||||
|
}{
|
||||||
|
Test: "TestInsertVulnerabilityMetadataValue1",
|
||||||
|
}
|
||||||
|
|
||||||
v1 := database.Vulnerability{
|
v1 := database.Vulnerability{
|
||||||
Name: "TestInsertVulnerability1",
|
Name: "TestInsertVulnerability1",
|
||||||
Namespace: n1,
|
Namespace: n1,
|
||||||
@ -202,6 +211,7 @@ func TestInsertVulnerability(t *testing.T) {
|
|||||||
Severity: types.Low,
|
Severity: types.Low,
|
||||||
Description: "TestInsertVulnerabilityDescription1",
|
Description: "TestInsertVulnerabilityDescription1",
|
||||||
Link: "TestInsertVulnerabilityLink1",
|
Link: "TestInsertVulnerabilityLink1",
|
||||||
|
Metadata: v1meta,
|
||||||
}
|
}
|
||||||
err = datastore.InsertVulnerabilities([]database.Vulnerability{v1})
|
err = datastore.InsertVulnerabilities([]database.Vulnerability{v1})
|
||||||
if assert.Nil(t, err) {
|
if assert.Nil(t, err) {
|
||||||
@ -245,6 +255,8 @@ func equalsVuln(t *testing.T, expected, actual *database.Vulnerability) {
|
|||||||
assert.Equal(t, expected.Description, actual.Description)
|
assert.Equal(t, expected.Description, actual.Description)
|
||||||
assert.Equal(t, expected.Link, actual.Link)
|
assert.Equal(t, expected.Link, actual.Link)
|
||||||
assert.Equal(t, expected.Severity, actual.Severity)
|
assert.Equal(t, expected.Severity, actual.Severity)
|
||||||
|
assert.True(t, reflect.DeepEqual(castMetadata(expected.Metadata), actual.Metadata), "Got metadata %#v, expected %#v", actual.Metadata, castMetadata(expected.Metadata))
|
||||||
|
|
||||||
if assert.Len(t, actual.FixedIn, len(expected.FixedIn)) {
|
if assert.Len(t, actual.FixedIn, len(expected.FixedIn)) {
|
||||||
for _, actualFeatureVersion := range actual.FixedIn {
|
for _, actualFeatureVersion := range actual.FixedIn {
|
||||||
found := false
|
found := false
|
||||||
|
@ -428,5 +428,7 @@ func ubuntuPriorityToSeverity(priority string) types.Priority {
|
|||||||
|
|
||||||
// Clean deletes any allocated resources.
|
// Clean deletes any allocated resources.
|
||||||
func (fetcher *UbuntuFetcher) Clean() {
|
func (fetcher *UbuntuFetcher) Clean() {
|
||||||
os.RemoveAll(fetcher.repositoryLocalPath)
|
if fetcher.repositoryLocalPath != "" {
|
||||||
|
os.RemoveAll(fetcher.repositoryLocalPath)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
64
updater/metadata_fetchers.go
Normal file
64
updater/metadata_fetchers.go
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
// Copyright 2015 clair authors
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package updater
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/coreos/clair/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
var metadataFetchers = make(map[string]MetadataFetcher)
|
||||||
|
|
||||||
|
type VulnerabilityWithLock struct {
|
||||||
|
*database.Vulnerability
|
||||||
|
Lock sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// MetadataFetcher
|
||||||
|
type MetadataFetcher interface {
|
||||||
|
// Load runs right before the Updater calls AddMetadata for each vulnerabilities.
|
||||||
|
Load(database.Datastore) error
|
||||||
|
|
||||||
|
// AddMetadata adds metadata to the given database.Vulnerability.
|
||||||
|
// It is expected that the fetcher uses .Lock.Lock() when manipulating the Metadata map.
|
||||||
|
AddMetadata(*VulnerabilityWithLock) error
|
||||||
|
|
||||||
|
// Unload runs right after the Updater finished calling AddMetadata for every vulnerabilities.
|
||||||
|
Unload()
|
||||||
|
|
||||||
|
// Clean deletes any allocated resources.
|
||||||
|
// It is invoked when Clair stops.
|
||||||
|
Clean()
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegisterFetcher makes a Fetcher available by the provided name.
|
||||||
|
// If Register is called twice with the same name or if driver is nil,
|
||||||
|
// it panics.
|
||||||
|
func RegisterMetadataFetcher(name string, f MetadataFetcher) {
|
||||||
|
if name == "" {
|
||||||
|
panic("updater: could not register a MetadataFetcher with an empty name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if f == nil {
|
||||||
|
panic("updater: could not register a nil MetadataFetcher")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, dup := fetchers[name]; dup {
|
||||||
|
panic("updater: RegisterMetadataFetcher called twice for " + name)
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataFetchers[name] = f
|
||||||
|
}
|
19
updater/metadata_fetchers/nvd/nested_read_closer.go
Normal file
19
updater/metadata_fetchers/nvd/nested_read_closer.go
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
package nvd
|
||||||
|
|
||||||
|
import "io"
|
||||||
|
|
||||||
|
// NestedReadCloser wraps an io.Reader and implements io.ReadCloser by closing every embed
|
||||||
|
// io.ReadCloser.
|
||||||
|
// It allows chaining io.ReadCloser together and still keep the ability to close them all in a
|
||||||
|
// simple manner.
|
||||||
|
type NestedReadCloser struct {
|
||||||
|
io.Reader
|
||||||
|
NestedReadClosers []io.ReadCloser
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the gzip.Reader and the underlying io.ReadCloser.
|
||||||
|
func (nrc *NestedReadCloser) Close() {
|
||||||
|
for _, nestedReadCloser := range nrc.NestedReadClosers {
|
||||||
|
nestedReadCloser.Close()
|
||||||
|
}
|
||||||
|
}
|
228
updater/metadata_fetchers/nvd/nvd.go
Normal file
228
updater/metadata_fetchers/nvd/nvd.go
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
package nvd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"compress/gzip"
|
||||||
|
"encoding/xml"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/coreos/clair/database"
|
||||||
|
"github.com/coreos/clair/updater"
|
||||||
|
cerrors "github.com/coreos/clair/utils/errors"
|
||||||
|
"github.com/coreos/pkg/capnslog"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
dataFeedURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
|
||||||
|
dataFeedMetaURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
|
||||||
|
|
||||||
|
metadataKey string = "NVD"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
log = capnslog.NewPackageLogger("github.com/coreos/clair", "updater/fetchers/metadata_fetchers")
|
||||||
|
)
|
||||||
|
|
||||||
|
type NVDMetadataFetcher struct {
|
||||||
|
localPath string
|
||||||
|
dataFeedHashes map[string]string
|
||||||
|
lock sync.Mutex
|
||||||
|
|
||||||
|
metadata map[string]NVDMetadata
|
||||||
|
}
|
||||||
|
|
||||||
|
type NVDMetadata struct {
|
||||||
|
CVSSv2 NVDmetadataCVSSv2
|
||||||
|
}
|
||||||
|
|
||||||
|
type NVDmetadataCVSSv2 struct {
|
||||||
|
Vectors string
|
||||||
|
Score float64
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
updater.RegisterMetadataFetcher("NVD", &NVDMetadataFetcher{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fetcher *NVDMetadataFetcher) Load(datastore database.Datastore) error {
|
||||||
|
fetcher.lock.Lock()
|
||||||
|
defer fetcher.lock.Unlock()
|
||||||
|
|
||||||
|
var err error
|
||||||
|
fetcher.metadata = make(map[string]NVDMetadata)
|
||||||
|
|
||||||
|
// Init if necessary.
|
||||||
|
if fetcher.localPath == "" {
|
||||||
|
// Create a temporary folder to store the NVD data and create hashes struct.
|
||||||
|
if fetcher.localPath, err = ioutil.TempDir(os.TempDir(), "nvd-data"); err != nil {
|
||||||
|
return cerrors.ErrFilesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
fetcher.dataFeedHashes = make(map[string]string)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get data feeds.
|
||||||
|
dataFeedReaders, dataFeedHashes, err := getDataFeeds(fetcher.dataFeedHashes, fetcher.localPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fetcher.dataFeedHashes = dataFeedHashes
|
||||||
|
|
||||||
|
// Parse data feeds.
|
||||||
|
for dataFeedName, dataFeedReader := range dataFeedReaders {
|
||||||
|
var nvd nvd
|
||||||
|
if err = xml.NewDecoder(dataFeedReader).Decode(&nvd); err != nil {
|
||||||
|
log.Errorf("could not decode NVD data feed '%s': %s", dataFeedName, err)
|
||||||
|
return cerrors.ErrCouldNotParse
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each entry of this data feed:
|
||||||
|
for _, nvdEntry := range nvd.Entries {
|
||||||
|
// Create metadata entry.
|
||||||
|
if metadata := nvdEntry.Metadata(); metadata != nil {
|
||||||
|
fetcher.metadata[nvdEntry.Name] = *metadata
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dataFeedReader.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fetcher *NVDMetadataFetcher) AddMetadata(vulnerability *updater.VulnerabilityWithLock) error {
|
||||||
|
fetcher.lock.Lock()
|
||||||
|
defer fetcher.lock.Unlock()
|
||||||
|
|
||||||
|
if nvdMetadata, ok := fetcher.metadata[vulnerability.Name]; ok {
|
||||||
|
vulnerability.Lock.Lock()
|
||||||
|
defer vulnerability.Lock.Unlock()
|
||||||
|
|
||||||
|
// Create Metadata map if necessary.
|
||||||
|
if vulnerability.Metadata == nil {
|
||||||
|
vulnerability.Metadata = make(map[string]interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
vulnerability.Metadata[metadataKey] = nvdMetadata
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fetcher *NVDMetadataFetcher) Unload() {
|
||||||
|
fetcher.lock.Lock()
|
||||||
|
defer fetcher.lock.Unlock()
|
||||||
|
|
||||||
|
fetcher.metadata = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fetcher *NVDMetadataFetcher) Clean() {
|
||||||
|
fetcher.lock.Lock()
|
||||||
|
defer fetcher.lock.Unlock()
|
||||||
|
|
||||||
|
if fetcher.localPath != "" {
|
||||||
|
os.RemoveAll(fetcher.localPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]NestedReadCloser, map[string]string, error) {
|
||||||
|
var dataFeedNames []string
|
||||||
|
for y := 2002; y <= time.Now().Year(); y++ {
|
||||||
|
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get hashes for these feeds.
|
||||||
|
for _, dataFeedName := range dataFeedNames {
|
||||||
|
hash, err := getHashFromMetaURL(fmt.Sprintf(dataFeedMetaURL, dataFeedName))
|
||||||
|
if err != nil {
|
||||||
|
log.Warningf("could get get NVD data feed hash '%s': %s", dataFeedName, err)
|
||||||
|
|
||||||
|
// It's not a big deal, no need interrupt, we're just going to download it again then.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
dataFeedHashes[dataFeedName] = hash
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create io.Reader for every data feed.
|
||||||
|
dataFeedReaders := make(map[string]NestedReadCloser)
|
||||||
|
for _, dataFeedName := range dataFeedNames {
|
||||||
|
fileName := localPath + dataFeedName + ".xml"
|
||||||
|
|
||||||
|
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
|
||||||
|
// The hash is known, the disk should contains the feed. Try to read from it.
|
||||||
|
if localPath != "" {
|
||||||
|
if f, err := os.Open(fileName); err == nil {
|
||||||
|
dataFeedReaders[dataFeedName] = NestedReadCloser{
|
||||||
|
Reader: f,
|
||||||
|
NestedReadClosers: []io.ReadCloser{f},
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download data feed.
|
||||||
|
r, err := http.Get(fmt.Sprintf(dataFeedURL, dataFeedName))
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("could not download NVD data feed file '%s': %s", dataFeedName, err)
|
||||||
|
return dataFeedReaders, dataFeedHashes, cerrors.ErrCouldNotDownload
|
||||||
|
}
|
||||||
|
|
||||||
|
// Un-gzip it.
|
||||||
|
gr, err := gzip.NewReader(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("could not read NVD data feed file '%s': %s", dataFeedName, err)
|
||||||
|
return dataFeedReaders, dataFeedHashes, cerrors.ErrCouldNotDownload
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store it to a file at the same time if possible.
|
||||||
|
if f, err := os.Create(fileName); err == nil {
|
||||||
|
nrc := NestedReadCloser{
|
||||||
|
Reader: io.TeeReader(gr, f),
|
||||||
|
NestedReadClosers: []io.ReadCloser{r.Body, gr, f},
|
||||||
|
}
|
||||||
|
dataFeedReaders[dataFeedName] = nrc
|
||||||
|
} else {
|
||||||
|
nrc := NestedReadCloser{
|
||||||
|
Reader: gr,
|
||||||
|
NestedReadClosers: []io.ReadCloser{gr, r.Body},
|
||||||
|
}
|
||||||
|
dataFeedReaders[dataFeedName] = nrc
|
||||||
|
|
||||||
|
log.Warningf("could not store NVD data feed to filesystem: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataFeedReaders, dataFeedHashes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getHashFromMetaURL(metaURL string) (string, error) {
|
||||||
|
r, err := http.Get(metaURL)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer r.Body.Close()
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(r.Body)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
if strings.HasPrefix(line, "sha256:") {
|
||||||
|
return strings.TrimPrefix(line, "sha256:"), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", errors.New("invalid .meta file format")
|
||||||
|
}
|
82
updater/metadata_fetchers/nvd/xml.go
Normal file
82
updater/metadata_fetchers/nvd/xml.go
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
package nvd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type nvd struct {
|
||||||
|
Entries []nvdEntry `xml:"entry"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nvdEntry struct {
|
||||||
|
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"`
|
||||||
|
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nvdCVSS struct {
|
||||||
|
BaseMetrics nvdCVSSBaseMetrics `xml:"http://scap.nist.gov/schema/cvss-v2/0.2 base_metrics"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nvdCVSSBaseMetrics struct {
|
||||||
|
Score float64 `xml:"score"`
|
||||||
|
AccessVector string `xml:"access-vector"`
|
||||||
|
AccessComplexity string `xml:"access-complexity"`
|
||||||
|
Authentication string `xml:"authentication"`
|
||||||
|
ConfImpact string `xml:"confidentiality-impact"`
|
||||||
|
IntegImpact string `xml:"integrity-impact"`
|
||||||
|
AvailImpact string `xml:"avaibility-impact"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var vectorValuesToLetters map[string]string
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
vectorValuesToLetters = make(map[string]string)
|
||||||
|
vectorValuesToLetters["NETWORK"] = "N"
|
||||||
|
vectorValuesToLetters["ADJACENT_NETWORK"] = "A"
|
||||||
|
vectorValuesToLetters["LOCAL"] = "L"
|
||||||
|
vectorValuesToLetters["HIGH"] = "H"
|
||||||
|
vectorValuesToLetters["MEDIUM"] = "M"
|
||||||
|
vectorValuesToLetters["LOW"] = "L"
|
||||||
|
vectorValuesToLetters["NONE"] = "N"
|
||||||
|
vectorValuesToLetters["SINGLE_INSTANCE"] = "S"
|
||||||
|
vectorValuesToLetters["MULTIPLE_INSTANCES"] = "M"
|
||||||
|
vectorValuesToLetters["PARTIAL"] = "P"
|
||||||
|
vectorValuesToLetters["COMPLETE"] = "C"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n nvdEntry) Metadata() *NVDMetadata {
|
||||||
|
metadata := &NVDMetadata{
|
||||||
|
CVSSv2: NVDmetadataCVSSv2{
|
||||||
|
Vectors: n.CVSS.BaseMetrics.String(),
|
||||||
|
Score: n.CVSS.BaseMetrics.Score,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if metadata.CVSSv2.Vectors == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n nvdCVSSBaseMetrics) String() string {
|
||||||
|
var str string
|
||||||
|
addVec(&str, "AV", n.AccessVector)
|
||||||
|
addVec(&str, "AC", n.AccessComplexity)
|
||||||
|
addVec(&str, "Au", n.Authentication)
|
||||||
|
addVec(&str, "C", n.ConfImpact)
|
||||||
|
addVec(&str, "I", n.IntegImpact)
|
||||||
|
addVec(&str, "A", n.AvailImpact)
|
||||||
|
str = strings.TrimSuffix(str, "/")
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
func addVec(str *string, vec, val string) {
|
||||||
|
if val != "" {
|
||||||
|
if let, ok := vectorValuesToLetters[val]; ok {
|
||||||
|
*str = fmt.Sprintf("%s%s:%s/", *str, vec, let)
|
||||||
|
} else {
|
||||||
|
log.Warningf("unknown value '%v' for CVSSv2 vector '%s'", val, vec)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -19,6 +19,7 @@ package updater
|
|||||||
import (
|
import (
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/coreos/clair/config"
|
"github.com/coreos/clair/config"
|
||||||
@ -144,6 +145,9 @@ func Run(config *config.UpdaterConfig, datastore database.Datastore, st *utils.S
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Clean resources.
|
// Clean resources.
|
||||||
|
for _, metadataFetcher := range metadataFetchers {
|
||||||
|
metadataFetcher.Clean()
|
||||||
|
}
|
||||||
for _, fetcher := range fetchers {
|
for _, fetcher := range fetchers {
|
||||||
fetcher.Clean()
|
fetcher.Clean()
|
||||||
}
|
}
|
||||||
@ -161,10 +165,8 @@ func Update(datastore database.Datastore) {
|
|||||||
// Fetch updates.
|
// Fetch updates.
|
||||||
status, vulnerabilities, flags, notes := fetch(datastore)
|
status, vulnerabilities, flags, notes := fetch(datastore)
|
||||||
|
|
||||||
// TODO(Quentin-M): Complete informations using NVD
|
|
||||||
|
|
||||||
// Insert vulnerabilities.
|
// Insert vulnerabilities.
|
||||||
log.Tracef("beginning insertion of %d vulnerabilities for update", len(vulnerabilities))
|
log.Tracef("inserting %d vulnerabilities for update", len(vulnerabilities))
|
||||||
err := datastore.InsertVulnerabilities(vulnerabilities)
|
err := datastore.InsertVulnerabilities(vulnerabilities)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
promUpdaterErrorsTotal.Inc()
|
promUpdaterErrorsTotal.Inc()
|
||||||
@ -204,6 +206,7 @@ func fetch(datastore database.Datastore) (bool, []database.Vulnerability, map[st
|
|||||||
flags := make(map[string]string)
|
flags := make(map[string]string)
|
||||||
|
|
||||||
// Fetch updates in parallel.
|
// Fetch updates in parallel.
|
||||||
|
log.Info("fetching vulnerability updates")
|
||||||
var responseC = make(chan *FetcherResponse, 0)
|
var responseC = make(chan *FetcherResponse, 0)
|
||||||
for n, f := range fetchers {
|
for n, f := range fetchers {
|
||||||
go func(name string, fetcher Fetcher) {
|
go func(name string, fetcher Fetcher) {
|
||||||
@ -233,7 +236,52 @@ func fetch(datastore database.Datastore) (bool, []database.Vulnerability, map[st
|
|||||||
}
|
}
|
||||||
|
|
||||||
close(responseC)
|
close(responseC)
|
||||||
return status, vulnerabilities, flags, notes
|
return status, addMetadata(datastore, vulnerabilities), flags, notes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add metadata to the specified vulnerabilities using the registered MetadataFetchers, in parallel.
|
||||||
|
func addMetadata(datastore database.Datastore, vulnerabilities []database.Vulnerability) []database.Vulnerability {
|
||||||
|
if len(metadataFetchers) == 0 {
|
||||||
|
return vulnerabilities
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info("adding metadata to vulnerabilities")
|
||||||
|
|
||||||
|
// Wrap vulnerabilities in VulnerabilityWithLock.
|
||||||
|
// It ensures that only one metadata fetcher at a time can modify the Metadata map.
|
||||||
|
vulnerabilitiesWithLocks := make([]*VulnerabilityWithLock, 0, len(vulnerabilities))
|
||||||
|
for i := 0; i < len(vulnerabilities); i++ {
|
||||||
|
vulnerabilitiesWithLocks = append(vulnerabilitiesWithLocks, &VulnerabilityWithLock{
|
||||||
|
Vulnerability: &vulnerabilities[i],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(len(metadataFetchers))
|
||||||
|
|
||||||
|
for n, f := range metadataFetchers {
|
||||||
|
go func(name string, metadataFetcher MetadataFetcher) {
|
||||||
|
defer wg.Done()
|
||||||
|
|
||||||
|
// Load the metadata fetcher.
|
||||||
|
if err := metadataFetcher.Load(datastore); err != nil {
|
||||||
|
promUpdaterErrorsTotal.Inc()
|
||||||
|
log.Errorf("an error occured when loading metadata fetcher '%s': %s.", name, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add metadata to each vulnerability.
|
||||||
|
for _, vulnerability := range vulnerabilitiesWithLocks {
|
||||||
|
metadataFetcher.AddMetadata(vulnerability)
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataFetcher.Unload()
|
||||||
|
}(n, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
return vulnerabilities
|
||||||
}
|
}
|
||||||
|
|
||||||
func getLastUpdate(datastore database.Datastore) time.Time {
|
func getLastUpdate(datastore database.Datastore) time.Time {
|
||||||
|
@ -14,22 +14,10 @@
|
|||||||
|
|
||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import "regexp"
|
||||||
"crypto/sha1"
|
|
||||||
"encoding/hex"
|
|
||||||
"regexp"
|
|
||||||
)
|
|
||||||
|
|
||||||
var urlParametersRegexp = regexp.MustCompile(`(\?|\&)([^=]+)\=([^ &]+)`)
|
var urlParametersRegexp = regexp.MustCompile(`(\?|\&)([^=]+)\=([^ &]+)`)
|
||||||
|
|
||||||
// Hash returns an unique hash of the given string.
|
|
||||||
func Hash(str string) string {
|
|
||||||
h := sha1.New()
|
|
||||||
h.Write([]byte(str))
|
|
||||||
bs := h.Sum(nil)
|
|
||||||
return hex.EncodeToString(bs)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CleanURL removes all parameters from an URL.
|
// CleanURL removes all parameters from an URL.
|
||||||
func CleanURL(str string) string {
|
func CleanURL(str string) string {
|
||||||
return urlParametersRegexp.ReplaceAllString(str, "")
|
return urlParametersRegexp.ReplaceAllString(str, "")
|
||||||
|
@ -1,225 +0,0 @@
|
|||||||
// Copyright 2015 clair authors
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package types
|
|
||||||
|
|
||||||
//
|
|
||||||
// import "fmt"
|
|
||||||
//
|
|
||||||
// // CVSSv2 represents the Common Vulnerability Scoring System (CVSS), that assesses the severity of
|
|
||||||
// // vulnerabilities.
|
|
||||||
// // It describes the CVSS score, but also a vector describing the components from which the score
|
|
||||||
// // was calculated. This provides users of the score confidence in its correctness and provides
|
|
||||||
// // insight into the nature of the vulnerability.
|
|
||||||
// //
|
|
||||||
// // Reference: https://nvd.nist.gov/CVSS/Vector-v2.aspx
|
|
||||||
// type CVSSv2 struct {
|
|
||||||
// // Base Vectors
|
|
||||||
// AccessVector CVSSValue
|
|
||||||
// AccessComplexity CVSSValue
|
|
||||||
// Authentication CVSSValue
|
|
||||||
// ConfImpact CVSSValue
|
|
||||||
// IntegImpact CVSSValue
|
|
||||||
// AvailImpact CVSSValue
|
|
||||||
// // Temporal Vectors
|
|
||||||
// Exploitability CVSSValue
|
|
||||||
// RemediationLevel CVSSValue
|
|
||||||
// ReportConfidence CVSSValue
|
|
||||||
// // Environmental Vectors
|
|
||||||
// CollateralDamagePotential CVSSValue
|
|
||||||
// TargetDistribution CVSSValue
|
|
||||||
// SystemConfidentialityRequirement CVSSValue
|
|
||||||
// SystemIntegrityRequirement CVSSValue
|
|
||||||
// SystemAvailabilityRequirement CVSSValue
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func NewCVSSv2(value string) (*CVSSv2, error) {
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // CVSSValue is the comprehensible value for a CVSS metric.
|
|
||||||
// type CVSSValue string
|
|
||||||
//
|
|
||||||
// // Metric acronym + Value abbreviation -> Comprehensible metric value.
|
|
||||||
// var toValue map[string]func(string) (CVSSValue, error)
|
|
||||||
//
|
|
||||||
// func init() {
|
|
||||||
// parsers = make(map[string]func(string) (CVSSValue, error), 14)
|
|
||||||
// toValue["AV"] = av
|
|
||||||
// toValue["AC"] = ac
|
|
||||||
// toValue["Au"] = au
|
|
||||||
// toValue["C"] = cAndIAndA
|
|
||||||
// toValue["I"] = cAndIAndA
|
|
||||||
// toValue["A"] = cAndIAndA
|
|
||||||
// toValue["E"] = e
|
|
||||||
// toValue["RL"] = rl
|
|
||||||
// toValue["RC"] = rc
|
|
||||||
// toValue["CDP"] = cdp
|
|
||||||
// toValue["TD"] = td
|
|
||||||
// toValue["CR"] = crAndIrAndAr
|
|
||||||
// toValue["IR"] = crAndIrAndAr
|
|
||||||
// toValue["AR"] = crAndIrAndAr
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func av(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "L":
|
|
||||||
// return CVSSValue("Local access"), nil
|
|
||||||
// case "A":
|
|
||||||
// return CVSSValue("Adjacent Network"), nil
|
|
||||||
// case "N":
|
|
||||||
// return CVSSValue("Network"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for AV", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func ac(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "H":
|
|
||||||
// return CVSSValue("High"), nil
|
|
||||||
// case "M":
|
|
||||||
// return CVSSValue("Medium"), nil
|
|
||||||
// case "L":
|
|
||||||
// return CVSSValue("Low"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for AC", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func au(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "N":
|
|
||||||
// return CVSSValue("None required"), nil
|
|
||||||
// case "S":
|
|
||||||
// return CVSSValue("Requires single instance"), nil
|
|
||||||
// case "M":
|
|
||||||
// return CVSSValue("Requires multiple instances"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for Au", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func cAndIAndA(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "N":
|
|
||||||
// return CVSSValue("None"), nil
|
|
||||||
// case "P":
|
|
||||||
// return CVSSValue("Partial"), nil
|
|
||||||
// case "C":
|
|
||||||
// return CVSSValue("Complete"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for C/I/A", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func e(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "U":
|
|
||||||
// return CVSSValue("Unproven"), nil
|
|
||||||
// case "POC":
|
|
||||||
// return CVSSValue("Proof-of-concept"), nil
|
|
||||||
// case "F":
|
|
||||||
// return CVSSValue("Functional"), nil
|
|
||||||
// case "H":
|
|
||||||
// return CVSSValue("High"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for E", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func rl(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "OF":
|
|
||||||
// return CVSSValue("Official-fix"), nil
|
|
||||||
// case "T":
|
|
||||||
// return CVSSValue("Temporary-fix"), nil
|
|
||||||
// case "W":
|
|
||||||
// return CVSSValue("Workaround"), nil
|
|
||||||
// case "U":
|
|
||||||
// return CVSSValue("Unavailable"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for RL", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func rc(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "UC":
|
|
||||||
// return CVSSValue("Unconfirmed"), nil
|
|
||||||
// case "UR":
|
|
||||||
// return CVSSValue("Uncorroborated"), nil
|
|
||||||
// case "C":
|
|
||||||
// return CVSSValue("Confirmed"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for RC", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func cdp(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "N":
|
|
||||||
// return CVSSValue("None"), nil
|
|
||||||
// case "L":
|
|
||||||
// return CVSSValue("Low"), nil
|
|
||||||
// case "LM":
|
|
||||||
// return CVSSValue("Low-Medium"), nil
|
|
||||||
// case "MH":
|
|
||||||
// return CVSSValue("Medium-High"), nil
|
|
||||||
// case "H":
|
|
||||||
// return CVSSValue("High"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for CDP", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func td(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "N":
|
|
||||||
// return CVSSValue("None (0%)"), nil
|
|
||||||
// case "L":
|
|
||||||
// return CVSSValue("Low (1-25%)"), nil
|
|
||||||
// case "M":
|
|
||||||
// return CVSSValue("Medium (26-75%)"), nil
|
|
||||||
// case "H":
|
|
||||||
// return CVSSValue("High (76-100%)"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for TD", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func crAndIrAndAr(v string) (CVSSValue, error) {
|
|
||||||
// switch v {
|
|
||||||
// case "L":
|
|
||||||
// return CVSSValue("Low"), nil
|
|
||||||
// case "M":
|
|
||||||
// return CVSSValue("Medium"), nil
|
|
||||||
// case "H":
|
|
||||||
// return CVSSValue("High"), nil
|
|
||||||
// case "ND":
|
|
||||||
// return CVSSValue("Not Defined"), nil
|
|
||||||
// default:
|
|
||||||
// return "", fmt.Errorf("%v is not a valid value for CR/IR/AR", v)
|
|
||||||
// }
|
|
||||||
// }
|
|
@ -56,9 +56,6 @@ func TestExec(t *testing.T) {
|
|||||||
|
|
||||||
// TestString tests the string.go file
|
// TestString tests the string.go file
|
||||||
func TestString(t *testing.T) {
|
func TestString(t *testing.T) {
|
||||||
assert.Equal(t, Hash("abc123"), Hash("abc123"))
|
|
||||||
assert.NotEqual(t, Hash("abc123."), Hash("abc123"))
|
|
||||||
|
|
||||||
assert.False(t, Contains("", []string{}))
|
assert.False(t, Contains("", []string{}))
|
||||||
assert.True(t, Contains("a", []string{"a", "b"}))
|
assert.True(t, Contains("a", []string{"a", "b"}))
|
||||||
assert.False(t, Contains("c", []string{"a", "b"}))
|
assert.False(t, Contains("c", []string{"a", "b"}))
|
||||||
|
Loading…
Reference in New Issue
Block a user