2017-01-04 02:44:32 +00:00
|
|
|
// Copyright 2017 clair authors
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
// Package nvd implements a vulnerability metadata appender using the NIST NVD
|
|
|
|
// database.
|
2016-02-01 23:41:40 +00:00
|
|
|
package nvd
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"compress/gzip"
|
2018-10-16 22:42:15 +00:00
|
|
|
"encoding/json"
|
2016-02-01 23:41:40 +00:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
2018-07-17 12:11:24 +00:00
|
|
|
"path/filepath"
|
2016-02-01 23:41:40 +00:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2017-05-04 17:21:25 +00:00
|
|
|
log "github.com/sirupsen/logrus"
|
2017-01-04 02:44:32 +00:00
|
|
|
|
2016-02-01 23:41:40 +00:00
|
|
|
"github.com/coreos/clair/database"
|
2017-01-04 02:44:32 +00:00
|
|
|
"github.com/coreos/clair/ext/vulnmdsrc"
|
2017-01-13 07:08:52 +00:00
|
|
|
"github.com/coreos/clair/pkg/commonerr"
|
2018-09-06 19:41:40 +00:00
|
|
|
"github.com/coreos/clair/pkg/httputil"
|
2016-02-01 23:41:40 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2018-10-16 22:42:15 +00:00
|
|
|
dataFeedURL string = "https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-%s.json.gz"
|
|
|
|
dataFeedMetaURL string = "https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-%s.meta"
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
appenderName string = "NVD"
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2017-05-04 17:21:25 +00:00
|
|
|
logDataFeedName string = "data feed name"
|
|
|
|
)
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
type appender struct {
|
2016-02-01 23:41:40 +00:00
|
|
|
localPath string
|
|
|
|
dataFeedHashes map[string]string
|
2017-01-04 02:44:32 +00:00
|
|
|
metadata map[string]NVDMetadata
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type NVDMetadata struct {
|
|
|
|
CVSSv2 NVDmetadataCVSSv2
|
2018-10-16 23:08:17 +00:00
|
|
|
CVSSv3 NVDmetadataCVSSv3
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type NVDmetadataCVSSv2 struct {
|
2018-07-12 21:40:05 +00:00
|
|
|
PublishedDateTime string
|
|
|
|
Vectors string
|
|
|
|
Score float64
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2018-10-16 23:08:17 +00:00
|
|
|
type NVDmetadataCVSSv3 struct {
|
2018-10-19 14:44:23 +00:00
|
|
|
Vectors string
|
|
|
|
Score float64
|
|
|
|
ExploitabilityScore float64
|
|
|
|
ImpactScore float64
|
2018-10-16 23:08:17 +00:00
|
|
|
}
|
|
|
|
|
2016-02-01 23:41:40 +00:00
|
|
|
func init() {
|
2017-01-04 02:44:32 +00:00
|
|
|
vulnmdsrc.RegisterAppender(appenderName, &appender{})
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) BuildCache(datastore database.Datastore) error {
|
2016-02-01 23:41:40 +00:00
|
|
|
var err error
|
2017-01-04 02:44:32 +00:00
|
|
|
a.metadata = make(map[string]NVDMetadata)
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// Init if necessary.
|
2017-01-04 02:44:32 +00:00
|
|
|
if a.localPath == "" {
|
2016-02-01 23:41:40 +00:00
|
|
|
// Create a temporary folder to store the NVD data and create hashes struct.
|
2017-01-04 02:44:32 +00:00
|
|
|
if a.localPath, err = ioutil.TempDir(os.TempDir(), "nvd-data"); err != nil {
|
2017-01-13 07:08:52 +00:00
|
|
|
return commonerr.ErrFilesystem
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
a.dataFeedHashes = make(map[string]string)
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get data feeds.
|
2017-01-04 02:44:32 +00:00
|
|
|
dataFeedReaders, dataFeedHashes, err := getDataFeeds(a.dataFeedHashes, a.localPath)
|
2016-02-01 23:41:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-01-04 02:44:32 +00:00
|
|
|
a.dataFeedHashes = dataFeedHashes
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// Parse data feeds.
|
2018-07-12 21:40:05 +00:00
|
|
|
for dataFeedName, dataFileName := range dataFeedReaders {
|
|
|
|
f, err := os.Open(dataFileName)
|
|
|
|
if err != nil {
|
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not open NVD data file")
|
|
|
|
return commonerr.ErrCouldNotParse
|
|
|
|
}
|
2018-10-16 22:42:15 +00:00
|
|
|
|
2018-07-12 21:40:05 +00:00
|
|
|
r := bufio.NewReader(f)
|
2018-10-16 22:52:27 +00:00
|
|
|
if err := a.parseDataFeed(r); err != nil {
|
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not parse NVD data file")
|
|
|
|
return err
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
2018-10-16 22:52:27 +00:00
|
|
|
f.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (a *appender) parseDataFeed(r io.Reader) error {
|
|
|
|
var nvd nvd
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2018-10-16 22:52:27 +00:00
|
|
|
if err := json.NewDecoder(r).Decode(&nvd); err != nil {
|
|
|
|
return commonerr.ErrCouldNotParse
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, nvdEntry := range nvd.Entries {
|
|
|
|
// Create metadata entry.
|
|
|
|
if metadata := nvdEntry.Metadata(); metadata != nil {
|
|
|
|
a.metadata[nvdEntry.Name()] = *metadata
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) Append(vulnName string, appendFunc vulnmdsrc.AppendFunc) error {
|
|
|
|
if nvdMetadata, ok := a.metadata[vulnName]; ok {
|
2017-01-15 15:52:13 +00:00
|
|
|
appendFunc(appenderName, nvdMetadata, SeverityFromCVSS(nvdMetadata.CVSSv2.Score))
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) PurgeCache() {
|
|
|
|
a.metadata = nil
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2017-01-04 02:44:32 +00:00
|
|
|
func (a *appender) Clean() {
|
|
|
|
os.RemoveAll(a.localPath)
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
2018-07-12 21:40:05 +00:00
|
|
|
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]string, map[string]string, error) {
|
2016-02-01 23:41:40 +00:00
|
|
|
var dataFeedNames []string
|
|
|
|
for y := 2002; y <= time.Now().Year(); y++ {
|
|
|
|
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get hashes for these feeds.
|
|
|
|
for _, dataFeedName := range dataFeedNames {
|
|
|
|
hash, err := getHashFromMetaURL(fmt.Sprintf(dataFeedMetaURL, dataFeedName))
|
|
|
|
if err != nil {
|
2017-05-04 17:21:25 +00:00
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Warning("could not get NVD data feed hash")
|
2016-02-01 23:41:40 +00:00
|
|
|
|
|
|
|
// It's not a big deal, no need interrupt, we're just going to download it again then.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
dataFeedHashes[dataFeedName] = hash
|
|
|
|
}
|
|
|
|
|
2018-07-12 21:40:05 +00:00
|
|
|
// Create map containing the name and filename for every data feed.
|
|
|
|
dataFeedReaders := make(map[string]string)
|
2016-02-01 23:41:40 +00:00
|
|
|
for _, dataFeedName := range dataFeedNames {
|
2018-10-16 22:42:15 +00:00
|
|
|
fileName := filepath.Join(localPath, fmt.Sprintf("%s.json", dataFeedName))
|
|
|
|
|
2016-02-01 23:41:40 +00:00
|
|
|
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
|
|
|
|
// The hash is known, the disk should contains the feed. Try to read from it.
|
|
|
|
if localPath != "" {
|
|
|
|
if f, err := os.Open(fileName); err == nil {
|
2018-07-12 21:40:05 +00:00
|
|
|
f.Close()
|
|
|
|
dataFeedReaders[dataFeedName] = fileName
|
2016-02-01 23:41:40 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
err := downloadFeed(dataFeedName, fileName)
|
|
|
|
if err != nil {
|
|
|
|
return dataFeedReaders, dataFeedHashes, err
|
|
|
|
}
|
|
|
|
dataFeedReaders[dataFeedName] = fileName
|
|
|
|
}
|
|
|
|
}
|
2018-09-06 19:41:40 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
return dataFeedReaders, dataFeedHashes, nil
|
|
|
|
}
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
func downloadFeed(dataFeedName, fileName string) error {
|
|
|
|
// Download data feed.
|
|
|
|
r, err := httputil.GetWithUserAgent(fmt.Sprintf(dataFeedURL, dataFeedName))
|
|
|
|
if err != nil {
|
|
|
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not download NVD data feed")
|
|
|
|
return commonerr.ErrCouldNotDownload
|
|
|
|
}
|
|
|
|
defer r.Body.Close()
|
2018-09-07 21:13:31 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
if !httputil.Status2xx(r) {
|
|
|
|
log.WithFields(log.Fields{"StatusCode": r.StatusCode, "DataFeedName": dataFeedName}).Error("Failed to download NVD data feed")
|
|
|
|
return commonerr.ErrCouldNotDownload
|
|
|
|
}
|
2018-09-07 21:13:31 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
// Un-gzip it.
|
|
|
|
gr, err := gzip.NewReader(r.Body)
|
|
|
|
if err != nil {
|
|
|
|
log.WithError(err).WithFields(log.Fields{"StatusCode": r.StatusCode, "DataFeedName": dataFeedName}).Error("could not read NVD data feed")
|
|
|
|
return commonerr.ErrCouldNotDownload
|
|
|
|
}
|
2018-09-07 21:13:31 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
// Store it to a file at the same time if possible.
|
|
|
|
f, err := os.Create(fileName)
|
|
|
|
if err != nil {
|
|
|
|
log.WithError(err).WithField("Filename", fileName).Warning("could not store NVD data feed to filesystem")
|
|
|
|
return commonerr.ErrFilesystem
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
2018-09-10 18:42:22 +00:00
|
|
|
defer f.Close()
|
2016-02-01 23:41:40 +00:00
|
|
|
|
2018-09-10 18:42:22 +00:00
|
|
|
_, err = io.Copy(f, gr)
|
|
|
|
if err != nil {
|
|
|
|
log.WithError(err).WithField("Filename", fileName).Warning("could not stream NVD data feed to filesystem")
|
|
|
|
return commonerr.ErrFilesystem
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2016-02-01 23:41:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func getHashFromMetaURL(metaURL string) (string, error) {
|
2018-09-06 19:41:40 +00:00
|
|
|
r, err := httputil.GetWithUserAgent(metaURL)
|
2016-02-01 23:41:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer r.Body.Close()
|
|
|
|
|
2018-09-06 19:41:40 +00:00
|
|
|
if !httputil.Status2xx(r) {
|
2018-09-07 14:47:11 +00:00
|
|
|
return "", errors.New(metaURL + " failed status code: " + string(r.StatusCode))
|
2018-09-06 19:41:40 +00:00
|
|
|
}
|
|
|
|
|
2016-02-01 23:41:40 +00:00
|
|
|
scanner := bufio.NewScanner(r.Body)
|
|
|
|
for scanner.Scan() {
|
|
|
|
line := scanner.Text()
|
|
|
|
if strings.HasPrefix(line, "sha256:") {
|
|
|
|
return strings.TrimPrefix(line, "sha256:"), nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err := scanner.Err(); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return "", errors.New("invalid .meta file format")
|
|
|
|
}
|
2016-11-18 17:08:22 +00:00
|
|
|
|
2017-01-19 18:42:37 +00:00
|
|
|
// SeverityFromCVSS converts the CVSS Score (0.0 - 10.0) into a
|
|
|
|
// database.Severity following the qualitative rating scale available in the
|
|
|
|
// CVSS v3.0 specification (https://www.first.org/cvss/specification-document),
|
|
|
|
// Table 14.
|
|
|
|
//
|
2017-01-15 15:52:13 +00:00
|
|
|
// The Negligible level is set for CVSS scores between [0, 1), replacing the
|
|
|
|
// specified None level, originally used for a score of 0.
|
2017-01-19 18:42:37 +00:00
|
|
|
func SeverityFromCVSS(score float64) database.Severity {
|
2016-11-18 17:08:22 +00:00
|
|
|
switch {
|
|
|
|
case score < 1.0:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.NegligibleSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 3.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.LowSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 6.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.MediumSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score < 8.9:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.HighSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
case score <= 10:
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.CriticalSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
}
|
2017-01-19 18:42:37 +00:00
|
|
|
return database.UnknownSeverity
|
2016-11-18 17:08:22 +00:00
|
|
|
}
|