adding publisher datetime and updating nvd feed download

This commit is contained in:
ErikThoreson 2018-07-12 16:40:05 -05:00
parent 158bb31b77
commit df1dd5c149
3 changed files with 33 additions and 63 deletions

View File

@ -1,33 +0,0 @@
// Copyright 2017 clair authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package nvd
import "io"
// NestedReadCloser wraps an io.Reader and implements io.ReadCloser by closing every embed
// io.ReadCloser.
// It allows chaining io.ReadCloser together and still keep the ability to close them all in a
// simple manner.
type NestedReadCloser struct {
io.Reader
NestedReadClosers []io.ReadCloser
}
// Close closes the gzip.Reader and the underlying io.ReadCloser.
func (nrc *NestedReadCloser) Close() {
for _, nestedReadCloser := range nrc.NestedReadClosers {
nestedReadCloser.Close()
}
}

View File

@ -38,8 +38,8 @@ import (
)
const (
dataFeedURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
dataFeedMetaURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
dataFeedURL string = "https://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
dataFeedMetaURL string = "https://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
appenderName string = "NVD"
@ -57,8 +57,9 @@ type NVDMetadata struct {
}
type NVDmetadataCVSSv2 struct {
Vectors string
Score float64
PublishedDateTime string
Vectors string
Score float64
}
func init() {
@ -87,9 +88,16 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
a.dataFeedHashes = dataFeedHashes
// Parse data feeds.
for dataFeedName, dataFeedReader := range dataFeedReaders {
for dataFeedName, dataFileName := range dataFeedReaders {
f, err := os.Open(dataFileName)
if err != nil {
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not open NVD data file")
return commonerr.ErrCouldNotParse
}
var nvd nvd
if err = xml.NewDecoder(dataFeedReader).Decode(&nvd); err != nil {
r := bufio.NewReader(f)
if err = xml.NewDecoder(r).Decode(&nvd); err != nil {
f.Close()
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed")
return commonerr.ErrCouldNotParse
}
@ -101,8 +109,7 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
a.metadata[nvdEntry.Name] = *metadata
}
}
dataFeedReader.Close()
f.Close()
}
return nil
@ -124,7 +131,7 @@ func (a *appender) Clean() {
os.RemoveAll(a.localPath)
}
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]NestedReadCloser, map[string]string, error) {
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]string, map[string]string, error) {
var dataFeedNames []string
for y := 2002; y <= time.Now().Year(); y++ {
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
@ -143,19 +150,16 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
dataFeedHashes[dataFeedName] = hash
}
// Create io.Reader for every data feed.
dataFeedReaders := make(map[string]NestedReadCloser)
// Create map containing the name and filename for every data feed.
dataFeedReaders := make(map[string]string)
for _, dataFeedName := range dataFeedNames {
fileName := localPath + dataFeedName + ".xml"
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
// The hash is known, the disk should contains the feed. Try to read from it.
if localPath != "" {
if f, err := os.Open(fileName); err == nil {
dataFeedReaders[dataFeedName] = NestedReadCloser{
Reader: f,
NestedReadClosers: []io.ReadCloser{f},
}
f.Close()
dataFeedReaders[dataFeedName] = fileName
continue
}
}
@ -176,20 +180,17 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
// Store it to a file at the same time if possible.
if f, err := os.Create(fileName); err == nil {
nrc := NestedReadCloser{
Reader: io.TeeReader(gr, f),
NestedReadClosers: []io.ReadCloser{r.Body, gr, f},
_, err = io.Copy(f, gr)
if err != nil {
log.WithError(err).Warning("could not stream NVD data feed to filesystem")
}
dataFeedReaders[dataFeedName] = nrc
dataFeedReaders[dataFeedName] = fileName
f.Close()
} else {
nrc := NestedReadCloser{
Reader: gr,
NestedReadClosers: []io.ReadCloser{gr, r.Body},
}
dataFeedReaders[dataFeedName] = nrc
log.WithError(err).Warning("could not store NVD data feed to filesystem")
}
r.Body.Close()
}
}

View File

@ -26,8 +26,9 @@ type nvd struct {
}
type nvdEntry struct {
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"`
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"`
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"`
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"`
PublishedDateTime string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 published-datetime"`
}
type nvdCVSS struct {
@ -64,8 +65,9 @@ func init() {
func (n nvdEntry) Metadata() *NVDMetadata {
metadata := &NVDMetadata{
CVSSv2: NVDmetadataCVSSv2{
Vectors: n.CVSS.BaseMetrics.String(),
Score: n.CVSS.BaseMetrics.Score,
PublishedDateTime: n.PublishedDateTime,
Vectors: n.CVSS.BaseMetrics.String(),
Score: n.CVSS.BaseMetrics.Score,
},
}