adding publisher datetime and updating nvd feed download
This commit is contained in:
parent
158bb31b77
commit
df1dd5c149
@ -1,33 +0,0 @@
|
|||||||
// Copyright 2017 clair authors
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package nvd
|
|
||||||
|
|
||||||
import "io"
|
|
||||||
|
|
||||||
// NestedReadCloser wraps an io.Reader and implements io.ReadCloser by closing every embed
|
|
||||||
// io.ReadCloser.
|
|
||||||
// It allows chaining io.ReadCloser together and still keep the ability to close them all in a
|
|
||||||
// simple manner.
|
|
||||||
type NestedReadCloser struct {
|
|
||||||
io.Reader
|
|
||||||
NestedReadClosers []io.ReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close closes the gzip.Reader and the underlying io.ReadCloser.
|
|
||||||
func (nrc *NestedReadCloser) Close() {
|
|
||||||
for _, nestedReadCloser := range nrc.NestedReadClosers {
|
|
||||||
nestedReadCloser.Close()
|
|
||||||
}
|
|
||||||
}
|
|
@ -38,8 +38,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
dataFeedURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
|
dataFeedURL string = "https://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.xml.gz"
|
||||||
dataFeedMetaURL string = "http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
|
dataFeedMetaURL string = "https://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%s.meta"
|
||||||
|
|
||||||
appenderName string = "NVD"
|
appenderName string = "NVD"
|
||||||
|
|
||||||
@ -57,8 +57,9 @@ type NVDMetadata struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type NVDmetadataCVSSv2 struct {
|
type NVDmetadataCVSSv2 struct {
|
||||||
Vectors string
|
PublishedDateTime string
|
||||||
Score float64
|
Vectors string
|
||||||
|
Score float64
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -87,9 +88,16 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
|
|||||||
a.dataFeedHashes = dataFeedHashes
|
a.dataFeedHashes = dataFeedHashes
|
||||||
|
|
||||||
// Parse data feeds.
|
// Parse data feeds.
|
||||||
for dataFeedName, dataFeedReader := range dataFeedReaders {
|
for dataFeedName, dataFileName := range dataFeedReaders {
|
||||||
|
f, err := os.Open(dataFileName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not open NVD data file")
|
||||||
|
return commonerr.ErrCouldNotParse
|
||||||
|
}
|
||||||
var nvd nvd
|
var nvd nvd
|
||||||
if err = xml.NewDecoder(dataFeedReader).Decode(&nvd); err != nil {
|
r := bufio.NewReader(f)
|
||||||
|
if err = xml.NewDecoder(r).Decode(&nvd); err != nil {
|
||||||
|
f.Close()
|
||||||
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed")
|
log.WithError(err).WithField(logDataFeedName, dataFeedName).Error("could not decode NVD data feed")
|
||||||
return commonerr.ErrCouldNotParse
|
return commonerr.ErrCouldNotParse
|
||||||
}
|
}
|
||||||
@ -101,8 +109,7 @@ func (a *appender) BuildCache(datastore database.Datastore) error {
|
|||||||
a.metadata[nvdEntry.Name] = *metadata
|
a.metadata[nvdEntry.Name] = *metadata
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
f.Close()
|
||||||
dataFeedReader.Close()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -124,7 +131,7 @@ func (a *appender) Clean() {
|
|||||||
os.RemoveAll(a.localPath)
|
os.RemoveAll(a.localPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]NestedReadCloser, map[string]string, error) {
|
func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[string]string, map[string]string, error) {
|
||||||
var dataFeedNames []string
|
var dataFeedNames []string
|
||||||
for y := 2002; y <= time.Now().Year(); y++ {
|
for y := 2002; y <= time.Now().Year(); y++ {
|
||||||
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
|
dataFeedNames = append(dataFeedNames, strconv.Itoa(y))
|
||||||
@ -143,19 +150,16 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
|
|||||||
dataFeedHashes[dataFeedName] = hash
|
dataFeedHashes[dataFeedName] = hash
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create io.Reader for every data feed.
|
// Create map containing the name and filename for every data feed.
|
||||||
dataFeedReaders := make(map[string]NestedReadCloser)
|
dataFeedReaders := make(map[string]string)
|
||||||
for _, dataFeedName := range dataFeedNames {
|
for _, dataFeedName := range dataFeedNames {
|
||||||
fileName := localPath + dataFeedName + ".xml"
|
fileName := localPath + dataFeedName + ".xml"
|
||||||
|
|
||||||
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
|
if h, ok := dataFeedHashes[dataFeedName]; ok && h == dataFeedHashes[dataFeedName] {
|
||||||
// The hash is known, the disk should contains the feed. Try to read from it.
|
// The hash is known, the disk should contains the feed. Try to read from it.
|
||||||
if localPath != "" {
|
if localPath != "" {
|
||||||
if f, err := os.Open(fileName); err == nil {
|
if f, err := os.Open(fileName); err == nil {
|
||||||
dataFeedReaders[dataFeedName] = NestedReadCloser{
|
f.Close()
|
||||||
Reader: f,
|
dataFeedReaders[dataFeedName] = fileName
|
||||||
NestedReadClosers: []io.ReadCloser{f},
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -176,20 +180,17 @@ func getDataFeeds(dataFeedHashes map[string]string, localPath string) (map[strin
|
|||||||
|
|
||||||
// Store it to a file at the same time if possible.
|
// Store it to a file at the same time if possible.
|
||||||
if f, err := os.Create(fileName); err == nil {
|
if f, err := os.Create(fileName); err == nil {
|
||||||
nrc := NestedReadCloser{
|
_, err = io.Copy(f, gr)
|
||||||
Reader: io.TeeReader(gr, f),
|
if err != nil {
|
||||||
NestedReadClosers: []io.ReadCloser{r.Body, gr, f},
|
log.WithError(err).Warning("could not stream NVD data feed to filesystem")
|
||||||
}
|
}
|
||||||
dataFeedReaders[dataFeedName] = nrc
|
dataFeedReaders[dataFeedName] = fileName
|
||||||
|
f.Close()
|
||||||
} else {
|
} else {
|
||||||
nrc := NestedReadCloser{
|
|
||||||
Reader: gr,
|
|
||||||
NestedReadClosers: []io.ReadCloser{gr, r.Body},
|
|
||||||
}
|
|
||||||
dataFeedReaders[dataFeedName] = nrc
|
|
||||||
|
|
||||||
log.WithError(err).Warning("could not store NVD data feed to filesystem")
|
log.WithError(err).Warning("could not store NVD data feed to filesystem")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
r.Body.Close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,8 +26,9 @@ type nvd struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type nvdEntry struct {
|
type nvdEntry struct {
|
||||||
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"`
|
Name string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cve-id"`
|
||||||
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"`
|
CVSS nvdCVSS `xml:"http://scap.nist.gov/schema/vulnerability/0.4 cvss"`
|
||||||
|
PublishedDateTime string `xml:"http://scap.nist.gov/schema/vulnerability/0.4 published-datetime"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type nvdCVSS struct {
|
type nvdCVSS struct {
|
||||||
@ -64,8 +65,9 @@ func init() {
|
|||||||
func (n nvdEntry) Metadata() *NVDMetadata {
|
func (n nvdEntry) Metadata() *NVDMetadata {
|
||||||
metadata := &NVDMetadata{
|
metadata := &NVDMetadata{
|
||||||
CVSSv2: NVDmetadataCVSSv2{
|
CVSSv2: NVDmetadataCVSSv2{
|
||||||
Vectors: n.CVSS.BaseMetrics.String(),
|
PublishedDateTime: n.PublishedDateTime,
|
||||||
Score: n.CVSS.BaseMetrics.Score,
|
Vectors: n.CVSS.BaseMetrics.String(),
|
||||||
|
Score: n.CVSS.BaseMetrics.Score,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user