ext: Parse NVD JSON feed instead of XML

The JSON feed provides some values that are not available in the XML
feed such as CVSSv3.
This commit is contained in:
Kate Murphy 2018-10-16 18:42:15 -04:00
parent 17539bda60
commit aab46f5658
No known key found for this signature in database
GPG Key ID: DE24040826F4BD73
2 changed files with 45 additions and 27 deletions

View File

@ -22,27 +22,39 @@ import (
) )
type nvd struct { type nvd struct {
Entries []nvdEntry `xml:"entry"` Entries []nvdEntry `json:"CVE_Items"`
} }
type nvdEntry struct { type nvdEntry struct {
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"` CVE nvdCVE `json:"cve"`
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"` Impact nvdImpact `json:"impact"`
PublishedDateTime string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 published-datetime"` PublishedDateTime string `json:"publishedDate"`
} }
type nvdCVSS struct { type nvdCVE struct {
BaseMetrics nvdCVSSBaseMetrics `xml:"http://scap.nist.gov/schema/cvss-v2/0.2 base_metrics"` Metadata nvdCVEMetadata `json:"CVE_data_meta"`
} }
type nvdCVSSBaseMetrics struct { type nvdCVEMetadata struct {
Score float64 `xml:"score"` CVEID string `json:"ID"`
AccessVector string `xml:"access-vector"` }
AccessComplexity string `xml:"access-complexity"`
Authentication string `xml:"authentication"` type nvdImpact struct {
ConfImpact string `xml:"confidentiality-impact"` BaseMetricV2 nvdBaseMetricV2 `json:"baseMetricV2"`
IntegImpact string `xml:"integrity-impact"` }
AvailImpact string `xml:"availability-impact"`
type nvdBaseMetricV2 struct {
CVSSv2 nvdCVSSv2 `json:"cvssV2"`
}
type nvdCVSSv2 struct {
Score float64 `json:"baseScore"`
AccessVector string `json:"accessVector"`
AccessComplexity string `json:"accessComplexity"`
Authentication string `json:"authentication"`
ConfImpact string `json:"confidentialityImpact"`
IntegImpact string `json:"integrityImpact"`
AvailImpact string `json:"availabilityImpact"`
} }
var vectorValuesToLetters map[string]string var vectorValuesToLetters map[string]string
@ -56,8 +68,8 @@ func init() {
vectorValuesToLetters["MEDIUM"] = "M" vectorValuesToLetters["MEDIUM"] = "M"
vectorValuesToLetters["LOW"] = "L" vectorValuesToLetters["LOW"] = "L"
vectorValuesToLetters["NONE"] = "N" vectorValuesToLetters["NONE"] = "N"
vectorValuesToLetters["SINGLE_INSTANCE"] = "S" vectorValuesToLetters["SINGLE"] = "S"
vectorValuesToLetters["MULTIPLE_INSTANCES"] = "M" vectorValuesToLetters["MULTIPLE"] = "M"
vectorValuesToLetters["PARTIAL"] = "P" vectorValuesToLetters["PARTIAL"] = "P"
vectorValuesToLetters["COMPLETE"] = "C" vectorValuesToLetters["COMPLETE"] = "C"
} }
@ -66,18 +78,23 @@ func (n nvdEntry) Metadata() *NVDMetadata {
metadata := &NVDMetadata{ metadata := &NVDMetadata{
CVSSv2: NVDmetadataCVSSv2{ CVSSv2: NVDmetadataCVSSv2{
PublishedDateTime: n.PublishedDateTime, PublishedDateTime: n.PublishedDateTime,
Vectors: n.CVSS.BaseMetrics.String(), Vectors: n.Impact.BaseMetricV2.CVSSv2.String(),
Score: n.CVSS.BaseMetrics.Score, Score: n.Impact.BaseMetricV2.CVSSv2.Score,
}, },
} }
if metadata.CVSSv2.Vectors == "" { if metadata.CVSSv2.Vectors == "" {
return nil return nil
} }
return metadata return metadata
} }
func (n nvdCVSSBaseMetrics) String() string { func (n nvdEntry) Name() string {
return n.CVE.Metadata.CVEID
}
func (n nvdCVSSv2) String() string {
var str string var str string
addVec(&str, "AV", n.AccessVector) addVec(&str, "AV", n.AccessVector)
addVec(&str, "AC", n.AccessComplexity) addVec(&str, "AC", n.AccessComplexity)
@ -94,7 +111,7 @@ func addVec(str *string, vec, val string) {
if let, ok := vectorValuesToLetters[val]; ok { if let, ok := vectorValuesToLetters[val]; ok {
*str = fmt.Sprintf("%s%s:%s/", *str, vec, let) *str = fmt.Sprintf("%s%s:%s/", *str, vec, let)
} else { } else {
log.WithFields(log.Fields{"value": val, "vector": vec}).Warning("unknown value for CVSSv2 vector") log.WithFields(log.Fields{"value": val, "vector": vec}).Warning("unknown value for CVSS vector")
} }
} }
} }

View File

@ -19,7 +19,7 @@ package nvd
import ( import (
"bufio" "bufio"
"compress/gzip" "compress/gzip"
"encoding/xml" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"io" "io"
@ -39,8 +39,8 @@ import (
) )
const ( const (
dataFeedURL string = "https://nvd.nist.gov/feeds/xml/cve/2.0/nvdcve-2.0-%s.xml.gz" dataFeedURL string = "https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-%s.json.gz"
dataFeedMetaURL string = "https://nvd.nist.gov/feeds/xml/cve/2.0/nvdcve-2.0-%s.meta" dataFeedMetaURL string = "https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-%s.meta"
appenderName string = "NVD" appenderName string = "NVD"
@ -96,8 +96,9 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
return commonerr.ErrCouldNotParse return commonerr.ErrCouldNotParse
} }
var nvd nvd var nvd nvd
r := bufio.NewReader(f) r := bufio.NewReader(f)
if err = xml.NewDecoder(r).Decode(&nvd); err != nil { if err := json.NewDecoder(r).Decode(&nvd); err != nil {
f.Close() f.Close()
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed") log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed")
return commonerr.ErrCouldNotParse return commonerr.ErrCouldNotParse
@ -107,7 +108,7 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
for _, nvdEntry := range nvd.Entries { for _, nvdEntry := range nvd.Entries {
// Create metadata entry. // Create metadata entry.
if metadata := nvdEntry.Metadata(); metadata != nil { if metadata := nvdEntry.Metadata(); metadata != nil {
a.metadata[nvdEntry.Name] = *metadata a.metadata[nvdEntry.Name()] = *metadata
} }
} }
f.Close() f.Close()
@ -154,7 +155,8 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
// Create map containing the name and filename for every data feed. // Create map containing the name and filename for every data feed.
dataFeedReaders := make(map[string]string) dataFeedReaders := make(map[string]string)
for _, dataFeedName := range dataFeedNames { for _, dataFeedName := range dataFeedNames {
fileName := filepath.Join(localPath, fmt.Sprintf("%s.xml", dataFeedName)) fileName := filepath.Join(localPath, fmt.Sprintf("%s.json", dataFeedName))
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] { if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
// The hash is known, the disk should contains the feed. Try to read from it. // The hash is known, the disk should contains the feed. Try to read from it.
if localPath != "" { if localPath != "" {
@ -177,7 +179,6 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
} }
func downloadFeed(dataFeedName, fileName string) error { func downloadFeed(dataFeedName, fileName string) error {
// Download data feed. // Download data feed.
r, err := httputil.GetWithUserAgent(fmt.Sprintf(dataFeedURL, dataFeedName)) r, err := httputil.GetWithUserAgent(fmt.Sprintf(dataFeedURL, dataFeedName))
if err != nil { if err != nil {