2017-01-04 02:44:32 +00:00
|
|
|
// Copyright 2017 clair authors
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
// Package nvd implements a vulnerability metadata appender using the NIST NVD
|
|
|
|
// database.
|
2016-02-01 23:41:40 +00:00
|
|
|
package nvd
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"compress/gzip"
|
|
|
|
"encoding/xml"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2017-05-04 17:21:25 +00:00
|
|
|
log "github.com/sirupsen/logrus"
|
2017-01-04 02:44:32 +00:00
|
|
|
|
2016-02-01 23:41:40 +00:00
|
|
|
"github.com/coreos/clair/database"
|
2017-01-04 02:44:32 +00:00
|
|
|
"github.com/coreos/clair/ext/vulnmdsrc"
|
2017-01-13 07:08:52 +00:00
|
|
|
"github.com/coreos/clair/pkg/commonerr"
|
2016-02-01 23:41:40 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
dataFeedURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
|
|
|
|
dataFeedMetaURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
appenderName string = "NVD"
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2017-05-04 17:21:25 +00:00
|
|
|
logDataFeedName string = "data feed name"
|
|
|
|
)
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
type appender struct {
|
2016-02-01 23:41:40 +00:00
|
|
|
localPath string
|
|
|
|
dataFeedHashes map[string]string
|
2017-01-04 02:44:32 +00:00
|
|
|
metadata map[string]NVDMetadata
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type NVDMetadata struct {
|
|
|
|
CVSSv2 NVDmetadataCVSSv2
|
|
|
|
}
|
|
|
|
|
|
|
|
type NVDmetadataCVSSv2 struct {
|
|
|
|
Vectors string
|
|
|
|
Score float64
|
|
|
|
}
|
|
|
|
|
|
|
|
func init() {
|
2017-01-04 02:44:32 +00:00
|
|
|
vulnmdsrc.RegisterAppender(appenderName, &appender{})
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) BuildCache(datastore database.Datastore) error {
|
2016-02-01 23:41:40 +00:00
|
|
|
var err error
|
2017-01-04 02:44:32 +00:00
|
|
|
a.metadata = make(map[string]NVDMetadata)
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// Init if necessary.
|
2017-01-04 02:44:32 +00:00
|
|
|
if a.localPath == "" {
|
2016-02-01 23:41:40 +00:00
|
|
|
// Create a temporary folder to store the NVD data and create hashes struct.
|
2017-01-04 02:44:32 +00:00
|
|
|
if a.localPath, err = ioutil.TempDir(os.TempDir(), "nvd-data"); err != nil {
|
2017-01-13 07:08:52 +00:00
|
|
|
return commonerr.ErrFilesystem
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
a.dataFeedHashes = make(map[string]string)
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get data feeds.
|
2017-01-04 02:44:32 +00:00
|
|
|
dataFeedReaders, dataFeedHashes, err := getDataFeeds(a.dataFeedHashes, a.localPath)
|
2016-02-01 23:41:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-01-04 02:44:32 +00:00
|
|
|
a.dataFeedHashes = dataFeedHashes
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// Parse data feeds.
|
|
|
|
for dataFeedName, dataFeedReader := range dataFeedReaders {
|
|
|
|
var nvd nvd
|
|
|
|
if err = xml.NewDecoder(dataFeedReader).Decode(&nvd); err != nil {
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed")
|
2017-01-13 07:08:52 +00:00
|
|
|
return commonerr.ErrCouldNotParse
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// For each entry of this data feed:
|
|
|
|
for _, nvdEntry := range nvd.Entries {
|
|
|
|
// Create metadata entry.
|
|
|
|
if metadata := nvdEntry.Metadata(); metadata != nil {
|
2017-01-04 02:44:32 +00:00
|
|
|
a.metadata[nvdEntry.Name] = *metadata
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dataFeedReader.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) Append(vulnName string, appendFunc vulnmdsrc.AppendFunc) error {
|
|
|
|
if nvdMetadata, ok := a.metadata[vulnName]; ok {
|
2017-01-15 15:52:13 +00:00
|
|
|
appendFunc(appenderName, nvdMetadata, SeverityFromCVSS(nvdMetadata.CVSSv2.Score))
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) PurgeCache() {
|
|
|
|
a.metadata = nil
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) Clean() {
|
|
|
|
os.RemoveAll(a.localPath)
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]NestedReadCloser, map[string]string, error) {
|
|
|
|
var dataFeedNames []string
|
|
|
|
for y := 2002; y <= time.Now().Year(); y++ {
|
|
|
|
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get hashes for these feeds.
|
|
|
|
for _, dataFeedName := range dataFeedNames {
|
|
|
|
hash, err := getHashFromMetaURL(fmt.Sprintf(dataFeedMetaURL, dataFeedName))
|
|
|
|
if err != nil {
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Warning("could not get NVD data feed hash")
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// It's not a big deal, no need interrupt, we're just going to download it again then.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
dataFeedHashes[dataFeedName] = hash
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create io.Reader for every data feed.
|
|
|
|
dataFeedReaders := make(map[string]NestedReadCloser)
|
|
|
|
for _, dataFeedName := range dataFeedNames {
|
|
|
|
fileName := localPath + dataFeedName + ".xml"
|
|
|
|
|
|
|
|
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
|
|
|
|
// The hash is known, the disk should contains the feed. Try to read from it.
|
|
|
|
if localPath != "" {
|
|
|
|
if f, err := os.Open(fileName); err == nil {
|
|
|
|
dataFeedReaders[dataFeedName] = NestedReadCloser{
|
|
|
|
Reader: f,
|
|
|
|
NestedReadClosers: []io.ReadCloser{f},
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Download data feed.
|
|
|
|
r, err := http.Get(fmt.Sprintf(dataFeedURL, dataFeedName))
|
|
|
|
if err != nil {
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not download NVD data feed")
|
2017-01-13 07:08:52 +00:00
|
|
|
return dataFeedReaders, dataFeedHashes, commonerr.ErrCouldNotDownload
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Un-gzip it.
|
|
|
|
gr, err := gzip.NewReader(r.Body)
|
|
|
|
if err != nil {
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not read NVD data feed")
|
2017-01-13 07:08:52 +00:00
|
|
|
return dataFeedReaders, dataFeedHashes, commonerr.ErrCouldNotDownload
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Store it to a file at the same time if possible.
|
|
|
|
if f, err := os.Create(fileName); err == nil {
|
|
|
|
nrc := NestedReadCloser{
|
|
|
|
Reader: io.TeeReader(gr, f),
|
|
|
|
NestedReadClosers: []io.ReadCloser{r.Body, gr, f},
|
|
|
|
}
|
|
|
|
dataFeedReaders[dataFeedName] = nrc
|
|
|
|
} else {
|
|
|
|
nrc := NestedReadCloser{
|
|
|
|
Reader: gr,
|
|
|
|
NestedReadClosers: []io.ReadCloser{gr, r.Body},
|
|
|
|
}
|
|
|
|
dataFeedReaders[dataFeedName] = nrc
|
|
|
|
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).Warning("could not store NVD data feed to filesystem")
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return dataFeedReaders, dataFeedHashes, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func getHashFromMetaURL(metaURL string) (string, error) {
|
|
|
|
r, err := http.Get(metaURL)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer r.Body.Close()
|
|
|
|
|
|
|
|
scanner := bufio.NewScanner(r.Body)
|
|
|
|
for scanner.Scan() {
|
|
|
|
line := scanner.Text()
|
|
|
|
if strings.HasPrefix(line, "sha256:") {
|
|
|
|
return strings.TrimPrefix(line, "sha256:"), nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err := scanner.Err(); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return "", errors.New("invalid .meta file format")
|
|
|
|
}
|
2016-11-18 17:08:22 +00:00
|
|
|
|
2017-01-19 18:42:37 +00:00
|
|
|
// SeverityFromCVSS converts the CVSS Score (0.0 - 10.0) into a
|
|
|
|
// database.Severity following the qualitative rating scale available in the
|
|
|
|
// CVSS v3.0 specification (https://www.first.org/cvss/specification-document),
|
|
|
|
// Table 14.
|
|
|
|
//
|
2017-01-15 15:52:13 +00:00
|
|
|
// The Negligible level is set for CVSS scores between [0, 1), replacing the
|
|
|
|
// specified None level, originally used for a score of 0.
|
2017-01-19 18:42:37 +00:00
|
|
|
func SeverityFromCVSS(score float64) database.Severity {
|
2016-11-18 17:08:22 +00:00
|
|
|
switch {
|
|
|
|
case score < 1.0:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.NegligibleSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 3.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.LowSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 6.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.MediumSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 8.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.HighSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score <= 10:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.CriticalSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
}
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.UnknownSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
}
|