API: drop v1 api, changed v2 api for Clair v3.
This commit is contained in:
parent
6c9a131b09
commit
a378cb070c
37
api/api.go
37
api/api.go
@ -35,11 +35,9 @@ const timeoutResponse = `{"Error":{"Message":"Clair failed to respond within the
|
||||
|
||||
// Config is the configuration for the API service.
|
||||
type Config struct {
|
||||
Port int
|
||||
GrpcPort int
|
||||
HealthPort int
|
||||
Timeout time.Duration
|
||||
PaginationKey string
|
||||
CertFile, KeyFile, CAFile string
|
||||
}
|
||||
|
||||
@ -51,40 +49,7 @@ func RunV2(cfg *Config, store database.Datastore) {
|
||||
if tlsConfig != nil {
|
||||
log.Info("main API configured with client certificate authentication")
|
||||
}
|
||||
v2.Run(cfg.GrpcPort, tlsConfig, cfg.PaginationKey, cfg.CertFile, cfg.KeyFile, store)
|
||||
}
|
||||
|
||||
func Run(cfg *Config, store database.Datastore, st *stopper.Stopper) {
|
||||
defer st.End()
|
||||
|
||||
// Do not run the API service if there is no config.
|
||||
if cfg == nil {
|
||||
log.Info("main API service is disabled.")
|
||||
return
|
||||
}
|
||||
log.WithField("port", cfg.Port).Info("starting main API")
|
||||
|
||||
tlsConfig, err := tlsClientConfig(cfg.CAFile)
|
||||
if err != nil {
|
||||
log.WithError(err).Fatal("could not initialize client cert authentication")
|
||||
}
|
||||
if tlsConfig != nil {
|
||||
log.Info("main API configured with client certificate authentication")
|
||||
}
|
||||
|
||||
srv := &graceful.Server{
|
||||
Timeout: 0, // Already handled by our TimeOut middleware
|
||||
NoSignalHandling: true, // We want to use our own Stopper
|
||||
Server: &http.Server{
|
||||
Addr: ":" + strconv.Itoa(cfg.Port),
|
||||
TLSConfig: tlsConfig,
|
||||
Handler: http.TimeoutHandler(newAPIHandler(cfg, store), cfg.Timeout, timeoutResponse),
|
||||
},
|
||||
}
|
||||
|
||||
listenAndServeWithStopper(srv, st, cfg.CertFile, cfg.KeyFile)
|
||||
|
||||
log.Info("main API stopped")
|
||||
v2.Run(cfg.GrpcPort, tlsConfig, cfg.CertFile, cfg.KeyFile, store)
|
||||
}
|
||||
|
||||
func RunHealth(cfg *Config, store database.Datastore, st *stopper.Stopper) {
|
||||
|
@ -16,13 +16,9 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/coreos/clair/api/httputil"
|
||||
"github.com/coreos/clair/api/v1"
|
||||
"github.com/coreos/clair/database"
|
||||
)
|
||||
|
||||
@ -30,34 +26,6 @@ import (
|
||||
// depending on the API version specified in the request URI.
|
||||
type router map[string]*httprouter.Router
|
||||
|
||||
// Let's hope we never have more than 99 API versions.
|
||||
const apiVersionLength = len("v99")
|
||||
|
||||
func newAPIHandler(cfg *Config, store database.Datastore) http.Handler {
|
||||
router := make(router)
|
||||
router["/v1"] = v1.NewRouter(store, cfg.PaginationKey)
|
||||
return router
|
||||
}
|
||||
|
||||
func (rtr router) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
urlStr := r.URL.String()
|
||||
var version string
|
||||
if len(urlStr) >= apiVersionLength {
|
||||
version = urlStr[:apiVersionLength]
|
||||
}
|
||||
|
||||
if router, _ := rtr[version]; router != nil {
|
||||
// Remove the version number from the request path to let the router do its
|
||||
// job but do not update the RequestURI
|
||||
r.URL.Path = strings.Replace(r.URL.Path, version, "", 1)
|
||||
router.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{"status": http.StatusNotFound, "method": r.Method, "request uri": r.RequestURI, "remote addr": httputil.GetClientAddr(r)}).Info("Served HTTP request")
|
||||
http.NotFound(w, r)
|
||||
}
|
||||
|
||||
func newHealthHandler(store database.Datastore) http.Handler {
|
||||
router := httprouter.New()
|
||||
router.GET("/health", healthHandler(store))
|
||||
|
317
api/v1/models.go
317
api/v1/models.go
@ -1,317 +0,0 @@
|
||||
// Copyright 2017 clair authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/coreos/clair/api/token"
|
||||
"github.com/coreos/clair/database"
|
||||
"github.com/coreos/clair/ext/versionfmt"
|
||||
)
|
||||
|
||||
type Error struct {
|
||||
Message string `json:"Message,omitempty"`
|
||||
}
|
||||
|
||||
type Layer struct {
|
||||
Name string `json:"Name,omitempty"`
|
||||
NamespaceNames []string `json:"NamespaceNames,omitempty"`
|
||||
Path string `json:"Path,omitempty"`
|
||||
Headers map[string]string `json:"Headers,omitempty"`
|
||||
ParentName string `json:"ParentName,omitempty"`
|
||||
Format string `json:"Format,omitempty"`
|
||||
IndexedByVersion int `json:"IndexedByVersion,omitempty"`
|
||||
Features []Feature `json:"Features,omitempty"`
|
||||
}
|
||||
|
||||
func LayerFromDatabaseModel(dbLayer database.Layer, withFeatures, withVulnerabilities bool) Layer {
|
||||
layer := Layer{
|
||||
Name: dbLayer.Name,
|
||||
IndexedByVersion: dbLayer.EngineVersion,
|
||||
}
|
||||
|
||||
if dbLayer.Parent != nil {
|
||||
layer.ParentName = dbLayer.Parent.Name
|
||||
}
|
||||
|
||||
for _, ns := range dbLayer.Namespaces {
|
||||
layer.NamespaceNames = append(layer.NamespaceNames, ns.Name)
|
||||
}
|
||||
|
||||
if withFeatures || withVulnerabilities && dbLayer.Features != nil {
|
||||
for _, dbFeatureVersion := range dbLayer.Features {
|
||||
feature := Feature{
|
||||
Name: dbFeatureVersion.Feature.Name,
|
||||
NamespaceName: dbFeatureVersion.Feature.Namespace.Name,
|
||||
VersionFormat: dbFeatureVersion.Feature.Namespace.VersionFormat,
|
||||
Version: dbFeatureVersion.Version,
|
||||
AddedBy: dbFeatureVersion.AddedBy.Name,
|
||||
}
|
||||
|
||||
for _, dbVuln := range dbFeatureVersion.AffectedBy {
|
||||
vuln := Vulnerability{
|
||||
Name: dbVuln.Name,
|
||||
NamespaceName: dbVuln.Namespace.Name,
|
||||
Description: dbVuln.Description,
|
||||
Link: dbVuln.Link,
|
||||
Severity: string(dbVuln.Severity),
|
||||
Metadata: dbVuln.Metadata,
|
||||
}
|
||||
|
||||
if dbVuln.FixedBy != versionfmt.MaxVersion {
|
||||
vuln.FixedBy = dbVuln.FixedBy
|
||||
}
|
||||
feature.Vulnerabilities = append(feature.Vulnerabilities, vuln)
|
||||
}
|
||||
layer.Features = append(layer.Features, feature)
|
||||
}
|
||||
}
|
||||
|
||||
return layer
|
||||
}
|
||||
|
||||
type Namespace struct {
|
||||
Name string `json:"Name,omitempty"`
|
||||
VersionFormat string `json:"VersionFormat,omitempty"`
|
||||
}
|
||||
|
||||
type Vulnerability struct {
|
||||
Name string `json:"Name,omitempty"`
|
||||
NamespaceName string `json:"NamespaceName,omitempty"`
|
||||
Description string `json:"Description,omitempty"`
|
||||
Link string `json:"Link,omitempty"`
|
||||
Severity string `json:"Severity,omitempty"`
|
||||
Metadata map[string]interface{} `json:"Metadata,omitempty"`
|
||||
FixedBy string `json:"FixedBy,omitempty"`
|
||||
FixedIn []Feature `json:"FixedIn,omitempty"`
|
||||
}
|
||||
|
||||
func (v Vulnerability) DatabaseModel() (database.Vulnerability, error) {
|
||||
severity, err := database.NewSeverity(v.Severity)
|
||||
if err != nil {
|
||||
return database.Vulnerability{}, err
|
||||
}
|
||||
|
||||
var dbFeatures []database.FeatureVersion
|
||||
for _, feature := range v.FixedIn {
|
||||
dbFeature, err := feature.DatabaseModel()
|
||||
if err != nil {
|
||||
return database.Vulnerability{}, err
|
||||
}
|
||||
|
||||
dbFeatures = append(dbFeatures, dbFeature)
|
||||
}
|
||||
|
||||
return database.Vulnerability{
|
||||
Name: v.Name,
|
||||
Namespace: database.Namespace{Name: v.NamespaceName},
|
||||
Description: v.Description,
|
||||
Link: v.Link,
|
||||
Severity: severity,
|
||||
Metadata: v.Metadata,
|
||||
FixedIn: dbFeatures,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func VulnerabilityFromDatabaseModel(dbVuln database.Vulnerability, withFixedIn bool) Vulnerability {
|
||||
vuln := Vulnerability{
|
||||
Name: dbVuln.Name,
|
||||
NamespaceName: dbVuln.Namespace.Name,
|
||||
Description: dbVuln.Description,
|
||||
Link: dbVuln.Link,
|
||||
Severity: string(dbVuln.Severity),
|
||||
Metadata: dbVuln.Metadata,
|
||||
}
|
||||
|
||||
if withFixedIn {
|
||||
for _, dbFeatureVersion := range dbVuln.FixedIn {
|
||||
vuln.FixedIn = append(vuln.FixedIn, FeatureFromDatabaseModel(dbFeatureVersion))
|
||||
}
|
||||
}
|
||||
|
||||
return vuln
|
||||
}
|
||||
|
||||
type Feature struct {
|
||||
Name string `json:"Name,omitempty"`
|
||||
NamespaceName string `json:"NamespaceName,omitempty"`
|
||||
VersionFormat string `json:"VersionFormat,omitempty"`
|
||||
Version string `json:"Version,omitempty"`
|
||||
Vulnerabilities []Vulnerability `json:"Vulnerabilities,omitempty"`
|
||||
AddedBy string `json:"AddedBy,omitempty"`
|
||||
}
|
||||
|
||||
func FeatureFromDatabaseModel(dbFeatureVersion database.FeatureVersion) Feature {
|
||||
version := dbFeatureVersion.Version
|
||||
if version == versionfmt.MaxVersion {
|
||||
version = "None"
|
||||
}
|
||||
|
||||
return Feature{
|
||||
Name: dbFeatureVersion.Feature.Name,
|
||||
NamespaceName: dbFeatureVersion.Feature.Namespace.Name,
|
||||
VersionFormat: dbFeatureVersion.Feature.Namespace.VersionFormat,
|
||||
Version: version,
|
||||
AddedBy: dbFeatureVersion.AddedBy.Name,
|
||||
}
|
||||
}
|
||||
|
||||
func (f Feature) DatabaseModel() (fv database.FeatureVersion, err error) {
|
||||
var version string
|
||||
if f.Version == "None" {
|
||||
version = versionfmt.MaxVersion
|
||||
} else {
|
||||
err = versionfmt.Valid(f.VersionFormat, f.Version)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
version = f.Version
|
||||
}
|
||||
|
||||
fv = database.FeatureVersion{
|
||||
Feature: database.Feature{
|
||||
Name: f.Name,
|
||||
Namespace: database.Namespace{
|
||||
Name: f.NamespaceName,
|
||||
VersionFormat: f.VersionFormat,
|
||||
},
|
||||
},
|
||||
Version: version,
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type Notification struct {
|
||||
Name string `json:"Name,omitempty"`
|
||||
Created string `json:"Created,omitempty"`
|
||||
Notified string `json:"Notified,omitempty"`
|
||||
Deleted string `json:"Deleted,omitempty"`
|
||||
Limit int `json:"Limit,omitempty"`
|
||||
Page string `json:"Page,omitempty"`
|
||||
NextPage string `json:"NextPage,omitempty"`
|
||||
Old *VulnerabilityWithLayers `json:"Old,omitempty"`
|
||||
New *VulnerabilityWithLayers `json:"New,omitempty"`
|
||||
}
|
||||
|
||||
func NotificationFromDatabaseModel(dbNotification database.VulnerabilityNotification, limit int, pageToken string, nextPage database.VulnerabilityNotificationPageNumber, key string) Notification {
|
||||
var oldVuln *VulnerabilityWithLayers
|
||||
if dbNotification.OldVulnerability != nil {
|
||||
v := VulnerabilityWithLayersFromDatabaseModel(*dbNotification.OldVulnerability)
|
||||
oldVuln = &v
|
||||
}
|
||||
|
||||
var newVuln *VulnerabilityWithLayers
|
||||
if dbNotification.NewVulnerability != nil {
|
||||
v := VulnerabilityWithLayersFromDatabaseModel(*dbNotification.NewVulnerability)
|
||||
newVuln = &v
|
||||
}
|
||||
|
||||
var nextPageStr string
|
||||
if nextPage != database.NoVulnerabilityNotificationPage {
|
||||
nextPageBytes, _ := token.Marshal(nextPage, key)
|
||||
nextPageStr = string(nextPageBytes)
|
||||
}
|
||||
|
||||
var created, notified, deleted string
|
||||
if !dbNotification.Created.IsZero() {
|
||||
created = fmt.Sprintf("%d", dbNotification.Created.Unix())
|
||||
}
|
||||
if !dbNotification.Notified.IsZero() {
|
||||
notified = fmt.Sprintf("%d", dbNotification.Notified.Unix())
|
||||
}
|
||||
if !dbNotification.Deleted.IsZero() {
|
||||
deleted = fmt.Sprintf("%d", dbNotification.Deleted.Unix())
|
||||
}
|
||||
|
||||
// TODO(jzelinskie): implement "changed" key
|
||||
fmt.Println(dbNotification.Deleted.IsZero())
|
||||
return Notification{
|
||||
Name: dbNotification.Name,
|
||||
Created: created,
|
||||
Notified: notified,
|
||||
Deleted: deleted,
|
||||
Limit: limit,
|
||||
Page: pageToken,
|
||||
NextPage: nextPageStr,
|
||||
Old: oldVuln,
|
||||
New: newVuln,
|
||||
}
|
||||
}
|
||||
|
||||
type VulnerabilityWithLayers struct {
|
||||
Vulnerability *Vulnerability `json:"Vulnerability,omitempty"`
|
||||
|
||||
// This field is guaranteed to be in order only for pagination.
|
||||
// Indices from different notifications may not be comparable.
|
||||
OrderedLayersIntroducingVulnerability []OrderedLayerName `json:"OrderedLayersIntroducingVulnerability,omitempty"`
|
||||
|
||||
// This field is deprecated.
|
||||
LayersIntroducingVulnerability []string `json:"LayersIntroducingVulnerability,omitempty"`
|
||||
}
|
||||
|
||||
type OrderedLayerName struct {
|
||||
Index int `json:"Index"`
|
||||
LayerName string `json:"LayerName"`
|
||||
}
|
||||
|
||||
func VulnerabilityWithLayersFromDatabaseModel(dbVuln database.Vulnerability) VulnerabilityWithLayers {
|
||||
vuln := VulnerabilityFromDatabaseModel(dbVuln, true)
|
||||
|
||||
var layers []string
|
||||
var orderedLayers []OrderedLayerName
|
||||
for _, layer := range dbVuln.LayersIntroducingVulnerability {
|
||||
layers = append(layers, layer.Name)
|
||||
orderedLayers = append(orderedLayers, OrderedLayerName{
|
||||
Index: layer.ID,
|
||||
LayerName: layer.Name,
|
||||
})
|
||||
}
|
||||
|
||||
return VulnerabilityWithLayers{
|
||||
Vulnerability: &vuln,
|
||||
OrderedLayersIntroducingVulnerability: orderedLayers,
|
||||
LayersIntroducingVulnerability: layers,
|
||||
}
|
||||
}
|
||||
|
||||
type LayerEnvelope struct {
|
||||
Layer *Layer `json:"Layer,omitempty"`
|
||||
Error *Error `json:"Error,omitempty"`
|
||||
}
|
||||
|
||||
type NamespaceEnvelope struct {
|
||||
Namespaces *[]Namespace `json:"Namespaces,omitempty"`
|
||||
Error *Error `json:"Error,omitempty"`
|
||||
}
|
||||
|
||||
type VulnerabilityEnvelope struct {
|
||||
Vulnerability *Vulnerability `json:"Vulnerability,omitempty"`
|
||||
Vulnerabilities *[]Vulnerability `json:"Vulnerabilities,omitempty"`
|
||||
NextPage string `json:"NextPage,omitempty"`
|
||||
Error *Error `json:"Error,omitempty"`
|
||||
}
|
||||
|
||||
type NotificationEnvelope struct {
|
||||
Notification *Notification `json:"Notification,omitempty"`
|
||||
Error *Error `json:"Error,omitempty"`
|
||||
}
|
||||
|
||||
type FeatureEnvelope struct {
|
||||
Feature *Feature `json:"Feature,omitempty"`
|
||||
Features *[]Feature `json:"Features,omitempty"`
|
||||
Error *Error `json:"Error,omitempty"`
|
||||
}
|
100
api/v1/router.go
100
api/v1/router.go
@ -1,100 +0,0 @@
|
||||
// Copyright 2015 clair authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package v1 implements the first version of the Clair API.
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/coreos/clair/api/httputil"
|
||||
"github.com/coreos/clair/database"
|
||||
)
|
||||
|
||||
var (
|
||||
promResponseDurationMilliseconds = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "clair_api_response_duration_milliseconds",
|
||||
Help: "The duration of time it takes to receieve and write a response to an API request",
|
||||
Buckets: prometheus.ExponentialBuckets(9.375, 2, 10),
|
||||
}, []string{"route", "code"})
|
||||
)
|
||||
|
||||
func init() {
|
||||
prometheus.MustRegister(promResponseDurationMilliseconds)
|
||||
}
|
||||
|
||||
type handler func(http.ResponseWriter, *http.Request, httprouter.Params, *context) (route string, status int)
|
||||
|
||||
func httpHandler(h handler, ctx *context) httprouter.Handle {
|
||||
return func(w http.ResponseWriter, r *http.Request, p httprouter.Params) {
|
||||
start := time.Now()
|
||||
route, status := h(w, r, p, ctx)
|
||||
statusStr := strconv.Itoa(status)
|
||||
if status == 0 {
|
||||
statusStr = "???"
|
||||
}
|
||||
|
||||
promResponseDurationMilliseconds.
|
||||
WithLabelValues(route, statusStr).
|
||||
Observe(float64(time.Since(start).Nanoseconds()) / float64(time.Millisecond))
|
||||
|
||||
log.WithFields(log.Fields{"remote addr": httputil.GetClientAddr(r), "method": r.Method, "request uri": r.RequestURI, "status": statusStr, "elapsed time": time.Since(start)}).Info("Handled HTTP request")
|
||||
}
|
||||
}
|
||||
|
||||
type context struct {
|
||||
Store database.Datastore
|
||||
PaginationKey string
|
||||
}
|
||||
|
||||
// NewRouter creates an HTTP router for version 1 of the Clair API.
|
||||
func NewRouter(store database.Datastore, paginationKey string) *httprouter.Router {
|
||||
router := httprouter.New()
|
||||
ctx := &context{store, paginationKey}
|
||||
|
||||
// Layers
|
||||
router.POST("/layers", httpHandler(postLayer, ctx))
|
||||
router.GET("/layers/:layerName", httpHandler(getLayer, ctx))
|
||||
router.DELETE("/layers/:layerName", httpHandler(deleteLayer, ctx))
|
||||
|
||||
// Namespaces
|
||||
router.GET("/namespaces", httpHandler(getNamespaces, ctx))
|
||||
|
||||
// Vulnerabilities
|
||||
router.GET("/namespaces/:namespaceName/vulnerabilities", httpHandler(getVulnerabilities, ctx))
|
||||
router.POST("/namespaces/:namespaceName/vulnerabilities", httpHandler(postVulnerability, ctx))
|
||||
router.GET("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName", httpHandler(getVulnerability, ctx))
|
||||
router.PUT("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName", httpHandler(putVulnerability, ctx))
|
||||
router.DELETE("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName", httpHandler(deleteVulnerability, ctx))
|
||||
|
||||
// Fixes
|
||||
router.GET("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName/fixes", httpHandler(getFixes, ctx))
|
||||
router.PUT("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName/fixes/:fixName", httpHandler(putFix, ctx))
|
||||
router.DELETE("/namespaces/:namespaceName/vulnerabilities/:vulnerabilityName/fixes/:fixName", httpHandler(deleteFix, ctx))
|
||||
|
||||
// Notifications
|
||||
router.GET("/notifications/:notificationName", httpHandler(getNotification, ctx))
|
||||
router.DELETE("/notifications/:notificationName", httpHandler(deleteNotification, ctx))
|
||||
|
||||
// Metrics
|
||||
router.GET("/metrics", httpHandler(getMetrics, ctx))
|
||||
|
||||
return router
|
||||
}
|
503
api/v1/routes.go
503
api/v1/routes.go
@ -1,503 +0,0 @@
|
||||
// Copyright 2015 clair authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v1
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/coreos/clair"
|
||||
"github.com/coreos/clair/api/token"
|
||||
"github.com/coreos/clair/database"
|
||||
"github.com/coreos/clair/pkg/commonerr"
|
||||
"github.com/coreos/clair/pkg/tarutil"
|
||||
)
|
||||
|
||||
const (
|
||||
// These are the route identifiers for prometheus.
|
||||
postLayerRoute = "v1/postLayer"
|
||||
getLayerRoute = "v1/getLayer"
|
||||
deleteLayerRoute = "v1/deleteLayer"
|
||||
getNamespacesRoute = "v1/getNamespaces"
|
||||
getVulnerabilitiesRoute = "v1/getVulnerabilities"
|
||||
postVulnerabilityRoute = "v1/postVulnerability"
|
||||
getVulnerabilityRoute = "v1/getVulnerability"
|
||||
putVulnerabilityRoute = "v1/putVulnerability"
|
||||
deleteVulnerabilityRoute = "v1/deleteVulnerability"
|
||||
getFixesRoute = "v1/getFixes"
|
||||
putFixRoute = "v1/putFix"
|
||||
deleteFixRoute = "v1/deleteFix"
|
||||
getNotificationRoute = "v1/getNotification"
|
||||
deleteNotificationRoute = "v1/deleteNotification"
|
||||
getMetricsRoute = "v1/getMetrics"
|
||||
|
||||
// maxBodySize restricts client request bodies to 1MiB.
|
||||
maxBodySize int64 = 1048576
|
||||
|
||||
// statusUnprocessableEntity represents the 422 (Unprocessable Entity) status code, which means
|
||||
// the server understands the content type of the request entity
|
||||
// (hence a 415(Unsupported Media Type) status code is inappropriate), and the syntax of the
|
||||
// request entity is correct (thus a 400 (Bad Request) status code is inappropriate) but was
|
||||
// unable to process the contained instructions.
|
||||
statusUnprocessableEntity = 422
|
||||
)
|
||||
|
||||
func decodeJSON(r *http.Request, v interface{}) error {
|
||||
defer r.Body.Close()
|
||||
return json.NewDecoder(io.LimitReader(r.Body, maxBodySize)).Decode(v)
|
||||
}
|
||||
|
||||
func writeResponse(w http.ResponseWriter, r *http.Request, status int, resp interface{}) {
|
||||
// Headers must be written before the response.
|
||||
header := w.Header()
|
||||
header.Set("Content-Type", "application/json;charset=utf-8")
|
||||
header.Set("Server", "clair")
|
||||
|
||||
// Gzip the response if the client supports it.
|
||||
var writer io.Writer = w
|
||||
if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
|
||||
gzipWriter := gzip.NewWriter(w)
|
||||
defer gzipWriter.Close()
|
||||
writer = gzipWriter
|
||||
|
||||
header.Set("Content-Encoding", "gzip")
|
||||
}
|
||||
|
||||
// Write the response.
|
||||
w.WriteHeader(status)
|
||||
err := json.NewEncoder(writer).Encode(resp)
|
||||
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *json.MarshalerError, *json.UnsupportedTypeError, *json.UnsupportedValueError:
|
||||
panic("v1: failed to marshal response: " + err.Error())
|
||||
default:
|
||||
log.WithError(err).Warning("failed to write response")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func postLayer(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
request := LayerEnvelope{}
|
||||
err := decodeJSON(r, &request)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return postLayerRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if request.Layer == nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, LayerEnvelope{Error: &Error{"failed to provide layer"}})
|
||||
return postLayerRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
err = clair.ProcessLayer(ctx.Store, request.Layer.Format, request.Layer.Name, request.Layer.ParentName, request.Layer.Path, request.Layer.Headers)
|
||||
if err != nil {
|
||||
if err == tarutil.ErrCouldNotExtract ||
|
||||
err == tarutil.ErrExtractedFileTooBig ||
|
||||
err == clair.ErrUnsupported {
|
||||
writeResponse(w, r, statusUnprocessableEntity, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return postLayerRoute, statusUnprocessableEntity
|
||||
}
|
||||
|
||||
if _, badreq := err.(*commonerr.ErrBadRequest); badreq {
|
||||
writeResponse(w, r, http.StatusBadRequest, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return postLayerRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusInternalServerError, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return postLayerRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusCreated, LayerEnvelope{Layer: &Layer{
|
||||
Name: request.Layer.Name,
|
||||
ParentName: request.Layer.ParentName,
|
||||
Path: request.Layer.Path,
|
||||
Headers: request.Layer.Headers,
|
||||
Format: request.Layer.Format,
|
||||
IndexedByVersion: clair.Version,
|
||||
}})
|
||||
return postLayerRoute, http.StatusCreated
|
||||
}
|
||||
|
||||
func getLayer(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
_, withFeatures := r.URL.Query()["features"]
|
||||
_, withVulnerabilities := r.URL.Query()["vulnerabilities"]
|
||||
|
||||
dbLayer, err := ctx.Store.FindLayer(p.ByName("layerName"), withFeatures, withVulnerabilities)
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return getLayerRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return getLayerRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
layer := LayerFromDatabaseModel(dbLayer, withFeatures, withVulnerabilities)
|
||||
|
||||
writeResponse(w, r, http.StatusOK, LayerEnvelope{Layer: &layer})
|
||||
return getLayerRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func deleteLayer(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
err := ctx.Store.DeleteLayer(p.ByName("layerName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteLayerRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, LayerEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteLayerRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return deleteLayerRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func getNamespaces(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
dbNamespaces, err := ctx.Store.ListNamespaces()
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, NamespaceEnvelope{Error: &Error{err.Error()}})
|
||||
return getNamespacesRoute, http.StatusInternalServerError
|
||||
}
|
||||
var namespaces []Namespace
|
||||
for _, dbNamespace := range dbNamespaces {
|
||||
namespaces = append(namespaces, Namespace{
|
||||
Name: dbNamespace.Name,
|
||||
VersionFormat: dbNamespace.VersionFormat,
|
||||
})
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusOK, NamespaceEnvelope{Namespaces: &namespaces})
|
||||
return getNamespacesRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func getVulnerabilities(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
query := r.URL.Query()
|
||||
|
||||
limitStrs, limitExists := query["limit"]
|
||||
if !limitExists {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"must provide limit query parameter"}})
|
||||
return getVulnerabilitiesRoute, http.StatusBadRequest
|
||||
}
|
||||
limit, err := strconv.Atoi(limitStrs[0])
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"invalid limit format: " + err.Error()}})
|
||||
return getVulnerabilitiesRoute, http.StatusBadRequest
|
||||
} else if limit < 0 {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"limit value should not be less than zero"}})
|
||||
return getVulnerabilitiesRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
page := 0
|
||||
pageStrs, pageExists := query["page"]
|
||||
if pageExists {
|
||||
err = token.Unmarshal(pageStrs[0], ctx.PaginationKey, &page)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"invalid page format: " + err.Error()}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
}
|
||||
|
||||
namespace := p.ByName("namespaceName")
|
||||
if namespace == "" {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"namespace should not be empty"}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
dbVulns, nextPage, err := ctx.Store.ListVulnerabilities(namespace, limit, page)
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return getVulnerabilityRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return getVulnerabilitiesRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
var vulns []Vulnerability
|
||||
for _, dbVuln := range dbVulns {
|
||||
vuln := VulnerabilityFromDatabaseModel(dbVuln, false)
|
||||
vulns = append(vulns, vuln)
|
||||
}
|
||||
|
||||
var nextPageStr string
|
||||
if nextPage != -1 {
|
||||
nextPageBytes, err := token.Marshal(nextPage, ctx.PaginationKey)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"failed to marshal token: " + err.Error()}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
nextPageStr = string(nextPageBytes)
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusOK, VulnerabilityEnvelope{Vulnerabilities: &vulns, NextPage: nextPageStr})
|
||||
return getVulnerabilitiesRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func postVulnerability(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
request := VulnerabilityEnvelope{}
|
||||
err := decodeJSON(r, &request)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return postVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if request.Vulnerability == nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"failed to provide vulnerability"}})
|
||||
return postVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
vuln, err := request.Vulnerability.DatabaseModel()
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return postVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
err = ctx.Store.InsertVulnerabilities([]database.Vulnerability{vuln}, true)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *commonerr.ErrBadRequest:
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return postVulnerabilityRoute, http.StatusBadRequest
|
||||
default:
|
||||
writeResponse(w, r, http.StatusInternalServerError, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return postVulnerabilityRoute, http.StatusInternalServerError
|
||||
}
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusCreated, VulnerabilityEnvelope{Vulnerability: request.Vulnerability})
|
||||
return postVulnerabilityRoute, http.StatusCreated
|
||||
}
|
||||
|
||||
func getVulnerability(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
_, withFixedIn := r.URL.Query()["fixedIn"]
|
||||
|
||||
dbVuln, err := ctx.Store.FindVulnerability(p.ByName("namespaceName"), p.ByName("vulnerabilityName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return getVulnerabilityRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return getVulnerabilityRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
vuln := VulnerabilityFromDatabaseModel(dbVuln, withFixedIn)
|
||||
|
||||
writeResponse(w, r, http.StatusOK, VulnerabilityEnvelope{Vulnerability: &vuln})
|
||||
return getVulnerabilityRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func putVulnerability(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
request := VulnerabilityEnvelope{}
|
||||
err := decodeJSON(r, &request)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return putVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if request.Vulnerability == nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"failed to provide vulnerability"}})
|
||||
return putVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if len(request.Vulnerability.FixedIn) != 0 {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{"Vulnerability.FixedIn must be empty"}})
|
||||
return putVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
vuln, err := request.Vulnerability.DatabaseModel()
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return putVulnerabilityRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
vuln.Namespace.Name = p.ByName("namespaceName")
|
||||
vuln.Name = p.ByName("vulnerabilityName")
|
||||
|
||||
err = ctx.Store.InsertVulnerabilities([]database.Vulnerability{vuln}, true)
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *commonerr.ErrBadRequest:
|
||||
writeResponse(w, r, http.StatusBadRequest, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return putVulnerabilityRoute, http.StatusBadRequest
|
||||
default:
|
||||
writeResponse(w, r, http.StatusInternalServerError, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return putVulnerabilityRoute, http.StatusInternalServerError
|
||||
}
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusOK, VulnerabilityEnvelope{Vulnerability: request.Vulnerability})
|
||||
return putVulnerabilityRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func deleteVulnerability(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
err := ctx.Store.DeleteVulnerability(p.ByName("namespaceName"), p.ByName("vulnerabilityName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteVulnerabilityRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, VulnerabilityEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteVulnerabilityRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return deleteVulnerabilityRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func getFixes(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
dbVuln, err := ctx.Store.FindVulnerability(p.ByName("namespaceName"), p.ByName("vulnerabilityName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return getFixesRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return getFixesRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
vuln := VulnerabilityFromDatabaseModel(dbVuln, true)
|
||||
writeResponse(w, r, http.StatusOK, FeatureEnvelope{Features: &vuln.FixedIn})
|
||||
return getFixesRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func putFix(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
request := FeatureEnvelope{}
|
||||
err := decodeJSON(r, &request)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return putFixRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if request.Feature == nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, FeatureEnvelope{Error: &Error{"failed to provide feature"}})
|
||||
return putFixRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
if request.Feature.Name != p.ByName("fixName") {
|
||||
writeResponse(w, r, http.StatusBadRequest, FeatureEnvelope{Error: &Error{"feature name in URL and JSON do not match"}})
|
||||
return putFixRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
dbFix, err := request.Feature.DatabaseModel()
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return putFixRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
err = ctx.Store.InsertVulnerabilityFixes(p.ByName("vulnerabilityNamespace"), p.ByName("vulnerabilityName"), []database.FeatureVersion{dbFix})
|
||||
if err != nil {
|
||||
switch err.(type) {
|
||||
case *commonerr.ErrBadRequest:
|
||||
writeResponse(w, r, http.StatusBadRequest, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return putFixRoute, http.StatusBadRequest
|
||||
default:
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return putFixRoute, http.StatusNotFound
|
||||
}
|
||||
writeResponse(w, r, http.StatusInternalServerError, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return putFixRoute, http.StatusInternalServerError
|
||||
}
|
||||
}
|
||||
|
||||
writeResponse(w, r, http.StatusOK, FeatureEnvelope{Feature: request.Feature})
|
||||
return putFixRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func deleteFix(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
err := ctx.Store.DeleteVulnerabilityFix(p.ByName("vulnerabilityNamespace"), p.ByName("vulnerabilityName"), p.ByName("fixName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteFixRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, FeatureEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteFixRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return deleteFixRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func getNotification(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
query := r.URL.Query()
|
||||
|
||||
limitStrs, limitExists := query["limit"]
|
||||
if !limitExists {
|
||||
writeResponse(w, r, http.StatusBadRequest, NotificationEnvelope{Error: &Error{"must provide limit query parameter"}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
limit, err := strconv.Atoi(limitStrs[0])
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, NotificationEnvelope{Error: &Error{"invalid limit format: " + err.Error()}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
|
||||
var pageToken string
|
||||
page := database.VulnerabilityNotificationFirstPage
|
||||
pageStrs, pageExists := query["page"]
|
||||
if pageExists {
|
||||
err := token.Unmarshal(pageStrs[0], ctx.PaginationKey, &page)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, NotificationEnvelope{Error: &Error{"invalid page format: " + err.Error()}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
pageToken = pageStrs[0]
|
||||
} else {
|
||||
pageTokenBytes, err := token.Marshal(page, ctx.PaginationKey)
|
||||
if err != nil {
|
||||
writeResponse(w, r, http.StatusBadRequest, NotificationEnvelope{Error: &Error{"failed to marshal token: " + err.Error()}})
|
||||
return getNotificationRoute, http.StatusBadRequest
|
||||
}
|
||||
pageToken = string(pageTokenBytes)
|
||||
}
|
||||
|
||||
dbNotification, nextPage, err := ctx.Store.GetNotification(p.ByName("notificationName"), limit, page)
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, NotificationEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteNotificationRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, NotificationEnvelope{Error: &Error{err.Error()}})
|
||||
return getNotificationRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
notification := NotificationFromDatabaseModel(dbNotification, limit, pageToken, nextPage, ctx.PaginationKey)
|
||||
|
||||
writeResponse(w, r, http.StatusOK, NotificationEnvelope{Notification: ¬ification})
|
||||
return getNotificationRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func deleteNotification(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
err := ctx.Store.DeleteNotification(p.ByName("notificationName"))
|
||||
if err == commonerr.ErrNotFound {
|
||||
writeResponse(w, r, http.StatusNotFound, NotificationEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteNotificationRoute, http.StatusNotFound
|
||||
} else if err != nil {
|
||||
writeResponse(w, r, http.StatusInternalServerError, NotificationEnvelope{Error: &Error{err.Error()}})
|
||||
return deleteNotificationRoute, http.StatusInternalServerError
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return deleteNotificationRoute, http.StatusOK
|
||||
}
|
||||
|
||||
func getMetrics(w http.ResponseWriter, r *http.Request, p httprouter.Params, ctx *context) (string, int) {
|
||||
prometheus.Handler().ServeHTTP(w, r)
|
||||
return getMetricsRoute, 0
|
||||
}
|
@ -9,20 +9,20 @@ It is generated from these files:
|
||||
|
||||
It has these top-level messages:
|
||||
Vulnerability
|
||||
ClairStatus
|
||||
Feature
|
||||
Ancestry
|
||||
LayersIntroducingVulnerabilty
|
||||
OrderedLayerName
|
||||
Layer
|
||||
Notification
|
||||
Page
|
||||
IndexedAncestryName
|
||||
PagedVulnerableAncestries
|
||||
PostAncestryRequest
|
||||
PostAncestryResponse
|
||||
GetAncestryRequest
|
||||
GetAncestryResponse
|
||||
GetNotificationRequest
|
||||
GetNotificationResponse
|
||||
DeleteNotificationRequest
|
||||
MarkNotificationAsReadRequest
|
||||
*/
|
||||
package clairpb
|
||||
|
||||
@ -31,6 +31,7 @@ import fmt "fmt"
|
||||
import math "math"
|
||||
import _ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
import google_protobuf1 "github.com/golang/protobuf/ptypes/empty"
|
||||
import google_protobuf2 "github.com/golang/protobuf/ptypes/timestamp"
|
||||
|
||||
import (
|
||||
context "golang.org/x/net/context"
|
||||
@ -49,14 +50,16 @@ var _ = math.Inf
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
type Vulnerability struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"`
|
||||
Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"`
|
||||
Link string `protobuf:"bytes,4,opt,name=link" json:"link,omitempty"`
|
||||
Severity string `protobuf:"bytes,5,opt,name=severity" json:"severity,omitempty"`
|
||||
Metadata string `protobuf:"bytes,6,opt,name=metadata" json:"metadata,omitempty"`
|
||||
FixedBy string `protobuf:"bytes,7,opt,name=fixed_by,json=fixedBy" json:"fixed_by,omitempty"`
|
||||
FixedInFeatures []*Feature `protobuf:"bytes,8,rep,name=fixed_in_features,json=fixedInFeatures" json:"fixed_in_features,omitempty"`
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"`
|
||||
Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"`
|
||||
Link string `protobuf:"bytes,4,opt,name=link" json:"link,omitempty"`
|
||||
Severity string `protobuf:"bytes,5,opt,name=severity" json:"severity,omitempty"`
|
||||
Metadata string `protobuf:"bytes,6,opt,name=metadata" json:"metadata,omitempty"`
|
||||
// fixed_by exists when vulnerability is under feature.
|
||||
FixedBy string `protobuf:"bytes,7,opt,name=fixed_by,json=fixedBy" json:"fixed_by,omitempty"`
|
||||
// affected_versions exists when vulnerability is under notification.
|
||||
AffectedVersions []*Feature `protobuf:"bytes,8,rep,name=affected_versions,json=affectedVersions" json:"affected_versions,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Vulnerability) Reset() { *m = Vulnerability{} }
|
||||
@ -113,9 +116,43 @@ func (m *Vulnerability) GetFixedBy() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Vulnerability) GetFixedInFeatures() []*Feature {
|
||||
func (m *Vulnerability) GetAffectedVersions() []*Feature {
|
||||
if m != nil {
|
||||
return m.FixedInFeatures
|
||||
return m.AffectedVersions
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ClairStatus struct {
|
||||
// listers and detectors are processors implemented in this Clair and used to
|
||||
// scan ancestries
|
||||
Listers []string `protobuf:"bytes,1,rep,name=listers" json:"listers,omitempty"`
|
||||
Detectors []string `protobuf:"bytes,2,rep,name=detectors" json:"detectors,omitempty"`
|
||||
LastUpdateTime *google_protobuf2.Timestamp `protobuf:"bytes,3,opt,name=last_update_time,json=lastUpdateTime" json:"last_update_time,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ClairStatus) Reset() { *m = ClairStatus{} }
|
||||
func (m *ClairStatus) String() string { return proto.CompactTextString(m) }
|
||||
func (*ClairStatus) ProtoMessage() {}
|
||||
func (*ClairStatus) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
|
||||
func (m *ClairStatus) GetListers() []string {
|
||||
if m != nil {
|
||||
return m.Listers
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ClairStatus) GetDetectors() []string {
|
||||
if m != nil {
|
||||
return m.Detectors
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ClairStatus) GetLastUpdateTime() *google_protobuf2.Timestamp {
|
||||
if m != nil {
|
||||
return m.LastUpdateTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -125,14 +162,13 @@ type Feature struct {
|
||||
NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"`
|
||||
Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"`
|
||||
VersionFormat string `protobuf:"bytes,4,opt,name=version_format,json=versionFormat" json:"version_format,omitempty"`
|
||||
AddedBy string `protobuf:"bytes,5,opt,name=added_by,json=addedBy" json:"added_by,omitempty"`
|
||||
Vulnerabilities []*Vulnerability `protobuf:"bytes,6,rep,name=vulnerabilities" json:"vulnerabilities,omitempty"`
|
||||
Vulnerabilities []*Vulnerability `protobuf:"bytes,5,rep,name=vulnerabilities" json:"vulnerabilities,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Feature) Reset() { *m = Feature{} }
|
||||
func (m *Feature) String() string { return proto.CompactTextString(m) }
|
||||
func (*Feature) ProtoMessage() {}
|
||||
func (*Feature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
func (*Feature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
|
||||
func (m *Feature) GetName() string {
|
||||
if m != nil {
|
||||
@ -162,13 +198,6 @@ func (m *Feature) GetVersionFormat() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Feature) GetAddedBy() string {
|
||||
if m != nil {
|
||||
return m.AddedBy
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Feature) GetVulnerabilities() []*Vulnerability {
|
||||
if m != nil {
|
||||
return m.Vulnerabilities
|
||||
@ -177,15 +206,20 @@ func (m *Feature) GetVulnerabilities() []*Vulnerability {
|
||||
}
|
||||
|
||||
type Ancestry struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
EngineVersion int32 `protobuf:"varint,2,opt,name=engine_version,json=engineVersion" json:"engine_version,omitempty"`
|
||||
Layers []*Layer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"`
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
Features []*Feature `protobuf:"bytes,2,rep,name=features" json:"features,omitempty"`
|
||||
Layers []*Layer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"`
|
||||
// scanned_listers and scanned_detectors are used to scan this ancestry, it
|
||||
// may be different from listers and detectors in ClairStatus since the
|
||||
// ancestry could be scanned by previous version of Clair.
|
||||
ScannedListers []string `protobuf:"bytes,4,rep,name=scanned_listers,json=scannedListers" json:"scanned_listers,omitempty"`
|
||||
ScannedDetectors []string `protobuf:"bytes,5,rep,name=scanned_detectors,json=scannedDetectors" json:"scanned_detectors,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Ancestry) Reset() { *m = Ancestry{} }
|
||||
func (m *Ancestry) String() string { return proto.CompactTextString(m) }
|
||||
func (*Ancestry) ProtoMessage() {}
|
||||
func (*Ancestry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
func (*Ancestry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
|
||||
func (m *Ancestry) GetName() string {
|
||||
if m != nil {
|
||||
@ -194,11 +228,11 @@ func (m *Ancestry) GetName() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Ancestry) GetEngineVersion() int32 {
|
||||
func (m *Ancestry) GetFeatures() []*Feature {
|
||||
if m != nil {
|
||||
return m.EngineVersion
|
||||
return m.Features
|
||||
}
|
||||
return 0
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Ancestry) GetLayers() []*Layer {
|
||||
@ -208,91 +242,49 @@ func (m *Ancestry) GetLayers() []*Layer {
|
||||
return nil
|
||||
}
|
||||
|
||||
type LayersIntroducingVulnerabilty struct {
|
||||
Vulnerability *Vulnerability `protobuf:"bytes,1,opt,name=vulnerability" json:"vulnerability,omitempty"`
|
||||
Layers []*OrderedLayerName `protobuf:"bytes,2,rep,name=layers" json:"layers,omitempty"`
|
||||
}
|
||||
|
||||
func (m *LayersIntroducingVulnerabilty) Reset() { *m = LayersIntroducingVulnerabilty{} }
|
||||
func (m *LayersIntroducingVulnerabilty) String() string { return proto.CompactTextString(m) }
|
||||
func (*LayersIntroducingVulnerabilty) ProtoMessage() {}
|
||||
func (*LayersIntroducingVulnerabilty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
|
||||
func (m *LayersIntroducingVulnerabilty) GetVulnerability() *Vulnerability {
|
||||
func (m *Ancestry) GetScannedListers() []string {
|
||||
if m != nil {
|
||||
return m.Vulnerability
|
||||
return m.ScannedListers
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *LayersIntroducingVulnerabilty) GetLayers() []*OrderedLayerName {
|
||||
func (m *Ancestry) GetScannedDetectors() []string {
|
||||
if m != nil {
|
||||
return m.Layers
|
||||
return m.ScannedDetectors
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type OrderedLayerName struct {
|
||||
Index int32 `protobuf:"varint,1,opt,name=index" json:"index,omitempty"`
|
||||
LayerName string `protobuf:"bytes,2,opt,name=layer_name,json=layerName" json:"layer_name,omitempty"`
|
||||
}
|
||||
|
||||
func (m *OrderedLayerName) Reset() { *m = OrderedLayerName{} }
|
||||
func (m *OrderedLayerName) String() string { return proto.CompactTextString(m) }
|
||||
func (*OrderedLayerName) ProtoMessage() {}
|
||||
func (*OrderedLayerName) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
|
||||
|
||||
func (m *OrderedLayerName) GetIndex() int32 {
|
||||
if m != nil {
|
||||
return m.Index
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *OrderedLayerName) GetLayerName() string {
|
||||
if m != nil {
|
||||
return m.LayerName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Layer struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
NamespaceNames []string `protobuf:"bytes,2,rep,name=namespace_names,json=namespaceNames" json:"namespace_names,omitempty"`
|
||||
Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Layer) Reset() { *m = Layer{} }
|
||||
func (m *Layer) String() string { return proto.CompactTextString(m) }
|
||||
func (*Layer) ProtoMessage() {}
|
||||
func (*Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
|
||||
func (*Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
|
||||
|
||||
func (m *Layer) GetName() string {
|
||||
func (m *Layer) GetHash() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
return m.Hash
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Layer) GetNamespaceNames() []string {
|
||||
if m != nil {
|
||||
return m.NamespaceNames
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Notification struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
Created string `protobuf:"bytes,2,opt,name=created" json:"created,omitempty"`
|
||||
Notified string `protobuf:"bytes,3,opt,name=notified" json:"notified,omitempty"`
|
||||
Deleted string `protobuf:"bytes,4,opt,name=deleted" json:"deleted,omitempty"`
|
||||
Limit int32 `protobuf:"varint,5,opt,name=limit" json:"limit,omitempty"`
|
||||
Page *Page `protobuf:"bytes,6,opt,name=page" json:"page,omitempty"`
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
Created string `protobuf:"bytes,2,opt,name=created" json:"created,omitempty"`
|
||||
Notified string `protobuf:"bytes,3,opt,name=notified" json:"notified,omitempty"`
|
||||
Deleted string `protobuf:"bytes,4,opt,name=deleted" json:"deleted,omitempty"`
|
||||
Old *PagedVulnerableAncestries `protobuf:"bytes,5,opt,name=old" json:"old,omitempty"`
|
||||
New *PagedVulnerableAncestries `protobuf:"bytes,6,opt,name=new" json:"new,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Notification) Reset() { *m = Notification{} }
|
||||
func (m *Notification) String() string { return proto.CompactTextString(m) }
|
||||
func (*Notification) ProtoMessage() {}
|
||||
func (*Notification) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
|
||||
func (*Notification) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
|
||||
|
||||
func (m *Notification) GetName() string {
|
||||
if m != nil {
|
||||
@ -322,60 +314,95 @@ func (m *Notification) GetDeleted() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Notification) GetLimit() int32 {
|
||||
if m != nil {
|
||||
return m.Limit
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Notification) GetPage() *Page {
|
||||
if m != nil {
|
||||
return m.Page
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Page struct {
|
||||
ThisToken string `protobuf:"bytes,1,opt,name=this_token,json=thisToken" json:"this_token,omitempty"`
|
||||
NextToken string `protobuf:"bytes,2,opt,name=next_token,json=nextToken" json:"next_token,omitempty"`
|
||||
Old *LayersIntroducingVulnerabilty `protobuf:"bytes,3,opt,name=old" json:"old,omitempty"`
|
||||
New *LayersIntroducingVulnerabilty `protobuf:"bytes,4,opt,name=new" json:"new,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Page) Reset() { *m = Page{} }
|
||||
func (m *Page) String() string { return proto.CompactTextString(m) }
|
||||
func (*Page) ProtoMessage() {}
|
||||
func (*Page) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
|
||||
|
||||
func (m *Page) GetThisToken() string {
|
||||
if m != nil {
|
||||
return m.ThisToken
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Page) GetNextToken() string {
|
||||
if m != nil {
|
||||
return m.NextToken
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Page) GetOld() *LayersIntroducingVulnerabilty {
|
||||
func (m *Notification) GetOld() *PagedVulnerableAncestries {
|
||||
if m != nil {
|
||||
return m.Old
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Page) GetNew() *LayersIntroducingVulnerabilty {
|
||||
func (m *Notification) GetNew() *PagedVulnerableAncestries {
|
||||
if m != nil {
|
||||
return m.New
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type IndexedAncestryName struct {
|
||||
// index is unique to name in all streams simultaneously streamed, increasing
|
||||
// and larger than all indexes in previous page in same stream.
|
||||
Index int32 `protobuf:"varint,1,opt,name=index" json:"index,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (m *IndexedAncestryName) Reset() { *m = IndexedAncestryName{} }
|
||||
func (m *IndexedAncestryName) String() string { return proto.CompactTextString(m) }
|
||||
func (*IndexedAncestryName) ProtoMessage() {}
|
||||
func (*IndexedAncestryName) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
|
||||
|
||||
func (m *IndexedAncestryName) GetIndex() int32 {
|
||||
if m != nil {
|
||||
return m.Index
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *IndexedAncestryName) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type PagedVulnerableAncestries struct {
|
||||
CurrentPage string `protobuf:"bytes,1,opt,name=current_page,json=currentPage" json:"current_page,omitempty"`
|
||||
// if next_page is empty, it signals the end of all pages.
|
||||
NextPage string `protobuf:"bytes,2,opt,name=next_page,json=nextPage" json:"next_page,omitempty"`
|
||||
Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"`
|
||||
Vulnerability *Vulnerability `protobuf:"bytes,4,opt,name=vulnerability" json:"vulnerability,omitempty"`
|
||||
Ancestries []*IndexedAncestryName `protobuf:"bytes,5,rep,name=ancestries" json:"ancestries,omitempty"`
|
||||
}
|
||||
|
||||
func (m *PagedVulnerableAncestries) Reset() { *m = PagedVulnerableAncestries{} }
|
||||
func (m *PagedVulnerableAncestries) String() string { return proto.CompactTextString(m) }
|
||||
func (*PagedVulnerableAncestries) ProtoMessage() {}
|
||||
func (*PagedVulnerableAncestries) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
|
||||
|
||||
func (m *PagedVulnerableAncestries) GetCurrentPage() string {
|
||||
if m != nil {
|
||||
return m.CurrentPage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *PagedVulnerableAncestries) GetNextPage() string {
|
||||
if m != nil {
|
||||
return m.NextPage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *PagedVulnerableAncestries) GetLimit() int32 {
|
||||
if m != nil {
|
||||
return m.Limit
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *PagedVulnerableAncestries) GetVulnerability() *Vulnerability {
|
||||
if m != nil {
|
||||
return m.Vulnerability
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *PagedVulnerableAncestries) GetAncestries() []*IndexedAncestryName {
|
||||
if m != nil {
|
||||
return m.Ancestries
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type PostAncestryRequest struct {
|
||||
AncestryName string `protobuf:"bytes,1,opt,name=ancestry_name,json=ancestryName" json:"ancestry_name,omitempty"`
|
||||
Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"`
|
||||
@ -409,7 +436,7 @@ func (m *PostAncestryRequest) GetLayers() []*PostAncestryRequest_PostLayer {
|
||||
}
|
||||
|
||||
type PostAncestryRequest_PostLayer struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"`
|
||||
Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"`
|
||||
Headers map[string]string `protobuf:"bytes,3,rep,name=headers" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
}
|
||||
@ -421,9 +448,9 @@ func (*PostAncestryRequest_PostLayer) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{8, 0}
|
||||
}
|
||||
|
||||
func (m *PostAncestryRequest_PostLayer) GetName() string {
|
||||
func (m *PostAncestryRequest_PostLayer) GetHash() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
return m.Hash
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@ -443,7 +470,7 @@ func (m *PostAncestryRequest_PostLayer) GetHeaders() map[string]string {
|
||||
}
|
||||
|
||||
type PostAncestryResponse struct {
|
||||
EngineVersion int32 `protobuf:"varint,1,opt,name=engine_version,json=engineVersion" json:"engine_version,omitempty"`
|
||||
Status *ClairStatus `protobuf:"bytes,1,opt,name=status" json:"status,omitempty"`
|
||||
}
|
||||
|
||||
func (m *PostAncestryResponse) Reset() { *m = PostAncestryResponse{} }
|
||||
@ -451,11 +478,11 @@ func (m *PostAncestryResponse) String() string { return proto.Compact
|
||||
func (*PostAncestryResponse) ProtoMessage() {}
|
||||
func (*PostAncestryResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
|
||||
|
||||
func (m *PostAncestryResponse) GetEngineVersion() int32 {
|
||||
func (m *PostAncestryResponse) GetStatus() *ClairStatus {
|
||||
if m != nil {
|
||||
return m.EngineVersion
|
||||
return m.Status
|
||||
}
|
||||
return 0
|
||||
return nil
|
||||
}
|
||||
|
||||
type GetAncestryRequest struct {
|
||||
@ -491,8 +518,8 @@ func (m *GetAncestryRequest) GetWithFeatures() bool {
|
||||
}
|
||||
|
||||
type GetAncestryResponse struct {
|
||||
Ancestry *Ancestry `protobuf:"bytes,1,opt,name=ancestry" json:"ancestry,omitempty"`
|
||||
Features []*Feature `protobuf:"bytes,2,rep,name=features" json:"features,omitempty"`
|
||||
Ancestry *Ancestry `protobuf:"bytes,1,opt,name=ancestry" json:"ancestry,omitempty"`
|
||||
Status *ClairStatus `protobuf:"bytes,2,opt,name=status" json:"status,omitempty"`
|
||||
}
|
||||
|
||||
func (m *GetAncestryResponse) Reset() { *m = GetAncestryResponse{} }
|
||||
@ -507,17 +534,19 @@ func (m *GetAncestryResponse) GetAncestry() *Ancestry {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *GetAncestryResponse) GetFeatures() []*Feature {
|
||||
func (m *GetAncestryResponse) GetStatus() *ClairStatus {
|
||||
if m != nil {
|
||||
return m.Features
|
||||
return m.Status
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type GetNotificationRequest struct {
|
||||
Page string `protobuf:"bytes,1,opt,name=page" json:"page,omitempty"`
|
||||
Limit int32 `protobuf:"varint,2,opt,name=limit" json:"limit,omitempty"`
|
||||
Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"`
|
||||
// if the vulnerability_page is empty, it implies the first page.
|
||||
OldVulnerabilityPage string `protobuf:"bytes,1,opt,name=old_vulnerability_page,json=oldVulnerabilityPage" json:"old_vulnerability_page,omitempty"`
|
||||
NewVulnerabilityPage string `protobuf:"bytes,2,opt,name=new_vulnerability_page,json=newVulnerabilityPage" json:"new_vulnerability_page,omitempty"`
|
||||
Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"`
|
||||
Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (m *GetNotificationRequest) Reset() { *m = GetNotificationRequest{} }
|
||||
@ -525,9 +554,16 @@ func (m *GetNotificationRequest) String() string { return proto.Compa
|
||||
func (*GetNotificationRequest) ProtoMessage() {}
|
||||
func (*GetNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} }
|
||||
|
||||
func (m *GetNotificationRequest) GetPage() string {
|
||||
func (m *GetNotificationRequest) GetOldVulnerabilityPage() string {
|
||||
if m != nil {
|
||||
return m.Page
|
||||
return m.OldVulnerabilityPage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *GetNotificationRequest) GetNewVulnerabilityPage() string {
|
||||
if m != nil {
|
||||
return m.NewVulnerabilityPage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@ -562,16 +598,16 @@ func (m *GetNotificationResponse) GetNotification() *Notification {
|
||||
return nil
|
||||
}
|
||||
|
||||
type DeleteNotificationRequest struct {
|
||||
type MarkNotificationAsReadRequest struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (m *DeleteNotificationRequest) Reset() { *m = DeleteNotificationRequest{} }
|
||||
func (m *DeleteNotificationRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*DeleteNotificationRequest) ProtoMessage() {}
|
||||
func (*DeleteNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
|
||||
func (m *MarkNotificationAsReadRequest) Reset() { *m = MarkNotificationAsReadRequest{} }
|
||||
func (m *MarkNotificationAsReadRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*MarkNotificationAsReadRequest) ProtoMessage() {}
|
||||
func (*MarkNotificationAsReadRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
|
||||
|
||||
func (m *DeleteNotificationRequest) GetName() string {
|
||||
func (m *MarkNotificationAsReadRequest) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
@ -580,13 +616,13 @@ func (m *DeleteNotificationRequest) GetName() string {
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Vulnerability)(nil), "clairpb.Vulnerability")
|
||||
proto.RegisterType((*ClairStatus)(nil), "clairpb.ClairStatus")
|
||||
proto.RegisterType((*Feature)(nil), "clairpb.Feature")
|
||||
proto.RegisterType((*Ancestry)(nil), "clairpb.Ancestry")
|
||||
proto.RegisterType((*LayersIntroducingVulnerabilty)(nil), "clairpb.LayersIntroducingVulnerabilty")
|
||||
proto.RegisterType((*OrderedLayerName)(nil), "clairpb.OrderedLayerName")
|
||||
proto.RegisterType((*Layer)(nil), "clairpb.Layer")
|
||||
proto.RegisterType((*Notification)(nil), "clairpb.Notification")
|
||||
proto.RegisterType((*Page)(nil), "clairpb.Page")
|
||||
proto.RegisterType((*IndexedAncestryName)(nil), "clairpb.IndexedAncestryName")
|
||||
proto.RegisterType((*PagedVulnerableAncestries)(nil), "clairpb.PagedVulnerableAncestries")
|
||||
proto.RegisterType((*PostAncestryRequest)(nil), "clairpb.PostAncestryRequest")
|
||||
proto.RegisterType((*PostAncestryRequest_PostLayer)(nil), "clairpb.PostAncestryRequest.PostLayer")
|
||||
proto.RegisterType((*PostAncestryResponse)(nil), "clairpb.PostAncestryResponse")
|
||||
@ -594,7 +630,7 @@ func init() {
|
||||
proto.RegisterType((*GetAncestryResponse)(nil), "clairpb.GetAncestryResponse")
|
||||
proto.RegisterType((*GetNotificationRequest)(nil), "clairpb.GetNotificationRequest")
|
||||
proto.RegisterType((*GetNotificationResponse)(nil), "clairpb.GetNotificationResponse")
|
||||
proto.RegisterType((*DeleteNotificationRequest)(nil), "clairpb.DeleteNotificationRequest")
|
||||
proto.RegisterType((*MarkNotificationAsReadRequest)(nil), "clairpb.MarkNotificationAsReadRequest")
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
@ -706,7 +742,7 @@ var _AncestryService_serviceDesc = grpc.ServiceDesc{
|
||||
|
||||
type NotificationServiceClient interface {
|
||||
GetNotification(ctx context.Context, in *GetNotificationRequest, opts ...grpc.CallOption) (*GetNotificationResponse, error)
|
||||
DeleteNotification(ctx context.Context, in *DeleteNotificationRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error)
|
||||
MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error)
|
||||
}
|
||||
|
||||
type notificationServiceClient struct {
|
||||
@ -726,9 +762,9 @@ func (c *notificationServiceClient) GetNotification(ctx context.Context, in *Get
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *notificationServiceClient) DeleteNotification(ctx context.Context, in *DeleteNotificationRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) {
|
||||
func (c *notificationServiceClient) MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) {
|
||||
out := new(google_protobuf1.Empty)
|
||||
err := grpc.Invoke(ctx, "/clairpb.NotificationService/DeleteNotification", in, out, c.cc, opts...)
|
||||
err := grpc.Invoke(ctx, "/clairpb.NotificationService/MarkNotificationAsRead", in, out, c.cc, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -739,7 +775,7 @@ func (c *notificationServiceClient) DeleteNotification(ctx context.Context, in *
|
||||
|
||||
type NotificationServiceServer interface {
|
||||
GetNotification(context.Context, *GetNotificationRequest) (*GetNotificationResponse, error)
|
||||
DeleteNotification(context.Context, *DeleteNotificationRequest) (*google_protobuf1.Empty, error)
|
||||
MarkNotificationAsRead(context.Context, *MarkNotificationAsReadRequest) (*google_protobuf1.Empty, error)
|
||||
}
|
||||
|
||||
func RegisterNotificationServiceServer(s *grpc.Server, srv NotificationServiceServer) {
|
||||
@ -764,20 +800,20 @@ func _NotificationService_GetNotification_Handler(srv interface{}, ctx context.C
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _NotificationService_DeleteNotification_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(DeleteNotificationRequest)
|
||||
func _NotificationService_MarkNotificationAsRead_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(MarkNotificationAsReadRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(NotificationServiceServer).DeleteNotification(ctx, in)
|
||||
return srv.(NotificationServiceServer).MarkNotificationAsRead(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/clairpb.NotificationService/DeleteNotification",
|
||||
FullMethod: "/clairpb.NotificationService/MarkNotificationAsRead",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(NotificationServiceServer).DeleteNotification(ctx, req.(*DeleteNotificationRequest))
|
||||
return srv.(NotificationServiceServer).MarkNotificationAsRead(ctx, req.(*MarkNotificationAsReadRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
@ -791,8 +827,8 @@ var _NotificationService_serviceDesc = grpc.ServiceDesc{
|
||||
Handler: _NotificationService_GetNotification_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteNotification",
|
||||
Handler: _NotificationService_DeleteNotification_Handler,
|
||||
MethodName: "MarkNotificationAsRead",
|
||||
Handler: _NotificationService_MarkNotificationAsRead_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
@ -802,71 +838,78 @@ var _NotificationService_serviceDesc = grpc.ServiceDesc{
|
||||
func init() { proto.RegisterFile("clair.proto", fileDescriptor0) }
|
||||
|
||||
var fileDescriptor0 = []byte{
|
||||
// 1042 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x6e, 0x1b, 0x45,
|
||||
0x14, 0xd6, 0xda, 0x71, 0x6c, 0x1f, 0xdb, 0x49, 0x3a, 0x49, 0xd3, 0x8d, 0x93, 0x88, 0x74, 0x11,
|
||||
0xa5, 0xaa, 0xc0, 0x56, 0xd3, 0x9b, 0x12, 0x01, 0x82, 0xa8, 0x6d, 0xa8, 0x04, 0xa5, 0x5a, 0xaa,
|
||||
0x5c, 0x70, 0x63, 0x4d, 0xbc, 0x27, 0xce, 0x28, 0xeb, 0x59, 0xb3, 0x3b, 0x76, 0x62, 0x55, 0xdc,
|
||||
0xf0, 0x04, 0x54, 0x3c, 0x06, 0x2f, 0xc0, 0x15, 0x2f, 0xd1, 0x27, 0x00, 0xf1, 0x16, 0xdc, 0xa0,
|
||||
0xf9, 0xf5, 0xae, 0x63, 0x23, 0x7e, 0xae, 0x3c, 0xe7, 0x7c, 0xe7, 0xe7, 0x3b, 0x3f, 0x33, 0x5e,
|
||||
0x68, 0xf4, 0x63, 0xca, 0xd2, 0xce, 0x28, 0x4d, 0x44, 0x42, 0xaa, 0x4a, 0x18, 0x9d, 0xb5, 0xf7,
|
||||
0x06, 0x49, 0x32, 0x88, 0xb1, 0x4b, 0x47, 0xac, 0x4b, 0x39, 0x4f, 0x04, 0x15, 0x2c, 0xe1, 0x99,
|
||||
0x36, 0x6b, 0xef, 0x1a, 0x54, 0x49, 0x67, 0xe3, 0xf3, 0x2e, 0x0e, 0x47, 0x62, 0xaa, 0xc1, 0xe0,
|
||||
0x4d, 0x09, 0x5a, 0xa7, 0xe3, 0x98, 0x63, 0x4a, 0xcf, 0x58, 0xcc, 0xc4, 0x94, 0x10, 0x58, 0xe1,
|
||||
0x74, 0x88, 0xbe, 0x77, 0xe0, 0xdd, 0xaf, 0x87, 0xea, 0x4c, 0xde, 0x83, 0x35, 0xf9, 0x9b, 0x8d,
|
||||
0x68, 0x1f, 0x7b, 0x0a, 0x2d, 0x29, 0xb4, 0xe5, 0xb4, 0x2f, 0xa4, 0xd9, 0x01, 0x34, 0x22, 0xcc,
|
||||
0xfa, 0x29, 0x1b, 0xc9, 0xfc, 0x7e, 0x59, 0xd9, 0xe4, 0x55, 0x32, 0x78, 0xcc, 0xf8, 0xa5, 0xbf,
|
||||
0xa2, 0x83, 0xcb, 0x33, 0x69, 0x43, 0x2d, 0xc3, 0x09, 0xa6, 0x4c, 0x4c, 0xfd, 0x8a, 0xd2, 0x3b,
|
||||
0x59, 0x62, 0x43, 0x14, 0x34, 0xa2, 0x82, 0xfa, 0xab, 0x1a, 0xb3, 0x32, 0xd9, 0x81, 0xda, 0x39,
|
||||
0xbb, 0xc6, 0xa8, 0x77, 0x36, 0xf5, 0xab, 0x0a, 0xab, 0x2a, 0xf9, 0x78, 0x4a, 0x3e, 0x86, 0x5b,
|
||||
0x1a, 0x62, 0xbc, 0x77, 0x8e, 0x54, 0x8c, 0x53, 0xcc, 0xfc, 0xda, 0x41, 0xf9, 0x7e, 0xe3, 0x70,
|
||||
0xa3, 0x63, 0xba, 0xd6, 0x79, 0xa6, 0x81, 0x70, 0x5d, 0x99, 0x3e, 0xe7, 0x46, 0xce, 0x82, 0xdf,
|
||||
0x3d, 0xa8, 0x1a, 0xe1, 0xff, 0x74, 0xc3, 0x87, 0xea, 0x04, 0xd3, 0x6c, 0xd6, 0x09, 0x2b, 0xca,
|
||||
0x00, 0xe6, 0xd8, 0x3b, 0x4f, 0xd2, 0x21, 0x15, 0xa6, 0x1f, 0x2d, 0xa3, 0x7d, 0xa6, 0x94, 0xb2,
|
||||
0x40, 0x1a, 0x45, 0xba, 0x40, 0xdd, 0x98, 0xaa, 0x92, 0x8f, 0xa7, 0xe4, 0x33, 0x58, 0x9f, 0xe4,
|
||||
0xa6, 0xc6, 0x30, 0xf3, 0x57, 0x55, 0x79, 0xdb, 0xae, 0xbc, 0xc2, 0x54, 0xc3, 0x79, 0xf3, 0x60,
|
||||
0x08, 0xb5, 0xcf, 0x79, 0x1f, 0x33, 0x91, 0x2e, 0x1d, 0x39, 0xf2, 0x01, 0xe3, 0xd8, 0xb3, 0x45,
|
||||
0xc8, 0x22, 0x2b, 0x61, 0x4b, 0x6b, 0x4f, 0x4d, 0x29, 0xf7, 0x60, 0x35, 0xa6, 0x53, 0x4c, 0x33,
|
||||
0xbf, 0xac, 0xf2, 0xaf, 0xb9, 0xfc, 0x5f, 0x4a, 0x75, 0x68, 0xd0, 0xe0, 0x47, 0x0f, 0xf6, 0x95,
|
||||
0x26, 0x7b, 0xce, 0x45, 0x9a, 0x44, 0xe3, 0x3e, 0xe3, 0x83, 0x19, 0x45, 0x21, 0x67, 0xd6, 0xca,
|
||||
0x73, 0x9c, 0x2a, 0x36, 0xcb, 0x0b, 0x2a, 0x1a, 0x93, 0x87, 0x8e, 0x47, 0x49, 0xf1, 0xd8, 0x71,
|
||||
0x6e, 0x5f, 0xa7, 0x11, 0xa6, 0x18, 0xa9, 0xe4, 0x72, 0x2e, 0x8e, 0xd2, 0x09, 0x6c, 0xcc, 0x63,
|
||||
0x64, 0x0b, 0x2a, 0x8c, 0x47, 0x78, 0xad, 0x92, 0x57, 0x42, 0x2d, 0x90, 0x7d, 0x00, 0xe5, 0x93,
|
||||
0x1f, 0x76, 0x3d, 0xb6, 0x4e, 0xc1, 0x13, 0xa8, 0xa8, 0x08, 0x0b, 0xfb, 0xf8, 0x3e, 0xac, 0x17,
|
||||
0x97, 0x45, 0x33, 0xac, 0x87, 0x6b, 0x85, 0x6d, 0xc9, 0x82, 0x9f, 0x3d, 0x68, 0xbe, 0x48, 0x04,
|
||||
0x3b, 0x67, 0x7d, 0x6a, 0xef, 0xca, 0x8d, 0x68, 0x3e, 0x54, 0xfb, 0x29, 0x52, 0x81, 0x91, 0xa1,
|
||||
0x61, 0x45, 0x79, 0x53, 0xb8, 0xf2, 0xc6, 0xc8, 0xac, 0x9b, 0x93, 0xa5, 0x57, 0x84, 0x31, 0x4a,
|
||||
0x2f, 0xbd, 0x68, 0x56, 0x94, 0xf5, 0xc6, 0x6c, 0xc8, 0x84, 0xda, 0xaf, 0x4a, 0xa8, 0x05, 0x72,
|
||||
0x17, 0x56, 0x46, 0x74, 0x80, 0xea, 0xc6, 0x35, 0x0e, 0x5b, 0xae, 0x95, 0x2f, 0xe9, 0x00, 0x43,
|
||||
0x05, 0x05, 0xbf, 0x78, 0xb0, 0x22, 0x45, 0xd9, 0x1b, 0x71, 0xc1, 0xb2, 0x9e, 0x48, 0x2e, 0x91,
|
||||
0x1b, 0xae, 0x75, 0xa9, 0x79, 0x25, 0x15, 0x12, 0xe6, 0x78, 0x2d, 0x0c, 0x6c, 0x5a, 0x27, 0x35,
|
||||
0x1a, 0x7e, 0x0c, 0xe5, 0x24, 0xd6, 0x84, 0x1b, 0x87, 0xf7, 0x8a, 0xbb, 0xb3, 0x6c, 0x53, 0x42,
|
||||
0xe9, 0x22, 0x3d, 0x39, 0x5e, 0xa9, 0x7a, 0xfe, 0x85, 0x27, 0xc7, 0xab, 0xe0, 0x6d, 0x09, 0x36,
|
||||
0x5f, 0x26, 0x99, 0xb0, 0xeb, 0x1f, 0xe2, 0x77, 0x63, 0xcc, 0x04, 0x79, 0x17, 0x5a, 0xd4, 0xa8,
|
||||
0x7a, 0xb9, 0xc6, 0x37, 0xad, 0x52, 0x2d, 0xc8, 0x36, 0xac, 0x9a, 0x2b, 0xab, 0x6b, 0x31, 0x12,
|
||||
0xf9, 0x74, 0xee, 0x1e, 0xcc, 0x18, 0x2d, 0x48, 0xa5, 0x74, 0x85, 0xfb, 0xd1, 0xfe, 0xd5, 0x83,
|
||||
0xba, 0xd3, 0x2e, 0x1c, 0x3d, 0x91, 0x43, 0x11, 0x17, 0x26, 0xaf, 0x3a, 0x93, 0xaf, 0xa0, 0x7a,
|
||||
0x81, 0x34, 0x9a, 0xa5, 0x7d, 0xf4, 0xcf, 0xd2, 0x76, 0xbe, 0xd0, 0x5e, 0x4f, 0xb9, 0x44, 0x6d,
|
||||
0x8c, 0xf6, 0x11, 0x34, 0xf3, 0x00, 0xd9, 0x80, 0xf2, 0x25, 0x4e, 0x0d, 0x0b, 0x79, 0x94, 0xfb,
|
||||
0x32, 0xa1, 0xf1, 0xd8, 0x5e, 0x02, 0x2d, 0x1c, 0x95, 0x1e, 0x7b, 0xc1, 0x27, 0xb0, 0x55, 0x4c,
|
||||
0x99, 0x8d, 0x12, 0x9e, 0x2d, 0x7a, 0x47, 0xbc, 0x05, 0xef, 0x48, 0xf0, 0xc6, 0x03, 0x72, 0x82,
|
||||
0xff, 0x6d, 0x26, 0x0f, 0x61, 0xeb, 0x8a, 0x89, 0x8b, 0xde, 0xfc, 0x8b, 0x28, 0x39, 0xd6, 0xc2,
|
||||
0x4d, 0x89, 0x9d, 0x16, 0x21, 0x19, 0x57, 0xb9, 0xb8, 0x3f, 0x87, 0xb2, 0xb2, 0x6d, 0x4a, 0xa5,
|
||||
0xfb, 0x1f, 0x48, 0x61, 0xb3, 0x40, 0xc9, 0x54, 0xf4, 0x21, 0xd4, 0x6c, 0x7a, 0xf3, 0x46, 0xdd,
|
||||
0x72, 0x5d, 0x77, 0xc6, 0xce, 0x84, 0x7c, 0x00, 0x35, 0x97, 0xa5, 0xb4, 0xe4, 0x2f, 0xc8, 0x59,
|
||||
0x04, 0xa7, 0xb0, 0x7d, 0x82, 0x22, 0xff, 0x0e, 0xd8, 0x56, 0x10, 0x73, 0x29, 0x3d, 0x3b, 0xff,
|
||||
0x01, 0xce, 0xae, 0x6f, 0x29, 0x7f, 0x7d, 0xed, 0xf6, 0x94, 0x67, 0xdb, 0x13, 0xbc, 0x82, 0x3b,
|
||||
0x37, 0xe2, 0x9a, 0x7a, 0x3e, 0x82, 0x26, 0xcf, 0xe9, 0x4d, 0x4d, 0xb7, 0x1d, 0xc9, 0x82, 0x53,
|
||||
0xc1, 0x34, 0xe8, 0xc2, 0xce, 0x13, 0xf5, 0x92, 0x2c, 0x21, 0x3c, 0xbf, 0xc4, 0x87, 0xbf, 0x79,
|
||||
0xb0, 0x6e, 0x7b, 0xf4, 0x0d, 0xa6, 0x13, 0xd6, 0x47, 0x42, 0xa1, 0x99, 0xdf, 0x1c, 0xb2, 0xf7,
|
||||
0x77, 0x3b, 0xdc, 0xde, 0x5f, 0x82, 0xea, 0x62, 0x82, 0xad, 0x1f, 0xde, 0xfe, 0xf1, 0x53, 0x69,
|
||||
0x2d, 0xa8, 0x77, 0xed, 0x00, 0x8e, 0xbc, 0x07, 0xe4, 0x12, 0x1a, 0xb9, 0x49, 0x92, 0x5d, 0x17,
|
||||
0xe3, 0xe6, 0xca, 0xb5, 0xf7, 0x16, 0x83, 0x26, 0xfe, 0x5d, 0x15, 0x7f, 0x97, 0xec, 0xb8, 0xf8,
|
||||
0xdd, 0xd7, 0x85, 0x0d, 0xfd, 0xfe, 0xf0, 0x4f, 0x0f, 0x36, 0xf3, 0xfd, 0xb0, 0x75, 0x66, 0xb0,
|
||||
0x3e, 0x37, 0x02, 0xf2, 0x4e, 0x3e, 0xd7, 0x82, 0x1e, 0xb6, 0x0f, 0x96, 0x1b, 0x18, 0x42, 0xfb,
|
||||
0x8a, 0xd0, 0x1d, 0x72, 0xbb, 0x9b, 0x9f, 0x4c, 0xd6, 0x7d, 0xad, 0xc8, 0x90, 0x04, 0xc8, 0xcd,
|
||||
0x09, 0x91, 0xc0, 0x85, 0x5d, 0x3a, 0xbe, 0xf6, 0x76, 0x47, 0x7f, 0x37, 0x76, 0xec, 0x77, 0x63,
|
||||
0xe7, 0xa9, 0xfc, 0x6e, 0xb4, 0x09, 0x1f, 0x2c, 0x4e, 0x78, 0x5c, 0xff, 0xd6, 0x7e, 0x96, 0x9e,
|
||||
0xad, 0x2a, 0xcf, 0x47, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0x5b, 0x9c, 0x1d, 0xc4, 0xb5, 0x0a,
|
||||
0x00, 0x00,
|
||||
// 1156 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x4d, 0x6f, 0xdb, 0x46,
|
||||
0x13, 0x06, 0x25, 0xcb, 0x92, 0x46, 0xf2, 0xd7, 0x5a, 0x51, 0x68, 0xd9, 0x46, 0x1c, 0xbe, 0x78,
|
||||
0xd3, 0x20, 0x6d, 0x25, 0x54, 0xf6, 0xa1, 0x35, 0xd2, 0x8f, 0xa4, 0x4e, 0xd2, 0x02, 0x49, 0x10,
|
||||
0x30, 0xa9, 0x0f, 0xbd, 0x08, 0x6b, 0x72, 0x64, 0x13, 0xa6, 0x48, 0x96, 0xbb, 0xb2, 0x2c, 0x04,
|
||||
0xbd, 0xb4, 0xc7, 0x9e, 0xda, 0xfe, 0x8f, 0xfe, 0x84, 0x5e, 0x0b, 0xf4, 0x9a, 0x7b, 0x81, 0x02,
|
||||
0xbd, 0xf6, 0x3f, 0x14, 0xbb, 0xdc, 0xa5, 0x48, 0x89, 0x0e, 0x8c, 0xf6, 0x24, 0xce, 0xcc, 0x33,
|
||||
0xbb, 0x33, 0xcf, 0x33, 0x3b, 0x10, 0x34, 0x1c, 0x9f, 0x7a, 0x71, 0x37, 0x8a, 0x43, 0x1e, 0x92,
|
||||
0xaa, 0x34, 0xa2, 0x93, 0xce, 0xce, 0x69, 0x18, 0x9e, 0xfa, 0xd8, 0xa3, 0x91, 0xd7, 0xa3, 0x41,
|
||||
0x10, 0x72, 0xca, 0xbd, 0x30, 0x60, 0x09, 0xac, 0xb3, 0xad, 0xa2, 0xd2, 0x3a, 0x19, 0x0f, 0x7b,
|
||||
0x38, 0x8a, 0xf8, 0x54, 0x05, 0x6f, 0xcd, 0x07, 0xb9, 0x37, 0x42, 0xc6, 0xe9, 0x28, 0x4a, 0x00,
|
||||
0xd6, 0x4f, 0x25, 0x58, 0x39, 0x1e, 0xfb, 0x01, 0xc6, 0xf4, 0xc4, 0xf3, 0x3d, 0x3e, 0x25, 0x04,
|
||||
0x96, 0x02, 0x3a, 0x42, 0xd3, 0xd8, 0x33, 0xee, 0xd6, 0x6d, 0xf9, 0x4d, 0xfe, 0x0f, 0xab, 0xe2,
|
||||
0x97, 0x45, 0xd4, 0xc1, 0x81, 0x8c, 0x96, 0x64, 0x74, 0x25, 0xf5, 0x3e, 0x17, 0xb0, 0x3d, 0x68,
|
||||
0xb8, 0xc8, 0x9c, 0xd8, 0x8b, 0x44, 0x81, 0x66, 0x59, 0x62, 0xb2, 0x2e, 0x71, 0xb8, 0xef, 0x05,
|
||||
0xe7, 0xe6, 0x52, 0x72, 0xb8, 0xf8, 0x26, 0x1d, 0xa8, 0x31, 0xbc, 0xc0, 0xd8, 0xe3, 0x53, 0xb3,
|
||||
0x22, 0xfd, 0xa9, 0x2d, 0x62, 0x23, 0xe4, 0xd4, 0xa5, 0x9c, 0x9a, 0xcb, 0x49, 0x4c, 0xdb, 0x64,
|
||||
0x0b, 0x6a, 0x43, 0xef, 0x12, 0xdd, 0xc1, 0xc9, 0xd4, 0xac, 0xca, 0x58, 0x55, 0xda, 0x0f, 0xa7,
|
||||
0xe4, 0x63, 0xd8, 0xa0, 0xc3, 0x21, 0x3a, 0x1c, 0xdd, 0xc1, 0x05, 0xc6, 0x4c, 0xd0, 0x65, 0xd6,
|
||||
0xf6, 0xca, 0x77, 0x1b, 0xfd, 0xf5, 0xae, 0xa2, 0xb5, 0xfb, 0x18, 0x29, 0x1f, 0xc7, 0x68, 0xaf,
|
||||
0x6b, 0xe8, 0xb1, 0x42, 0x5a, 0x3f, 0x18, 0xd0, 0xf8, 0x5c, 0xa0, 0x5e, 0x72, 0xca, 0xc7, 0x8c,
|
||||
0x98, 0x50, 0xf5, 0x3d, 0xc6, 0x31, 0x66, 0xa6, 0xb1, 0x57, 0x16, 0x17, 0x29, 0x93, 0xec, 0x40,
|
||||
0xdd, 0x45, 0x8e, 0x0e, 0x0f, 0x63, 0x66, 0x96, 0x64, 0x6c, 0xe6, 0x20, 0x47, 0xb0, 0xee, 0x53,
|
||||
0xc6, 0x07, 0xe3, 0xc8, 0xa5, 0x1c, 0x07, 0x82, 0x7b, 0x49, 0x4a, 0xa3, 0xdf, 0xe9, 0x26, 0xc2,
|
||||
0x74, 0xb5, 0x30, 0xdd, 0x57, 0x5a, 0x18, 0x7b, 0x55, 0xe4, 0x7c, 0x25, 0x53, 0x84, 0xd3, 0xfa,
|
||||
0xcd, 0x80, 0xaa, 0xaa, 0xf5, 0xbf, 0x88, 0x63, 0x42, 0x55, 0x51, 0xa1, 0x84, 0xd1, 0xa6, 0x38,
|
||||
0x40, 0x7d, 0x0e, 0x86, 0x61, 0x3c, 0xa2, 0x5c, 0xc9, 0xb3, 0xa2, 0xbc, 0x8f, 0xa5, 0x93, 0x7c,
|
||||
0x06, 0x6b, 0x17, 0x99, 0x49, 0xf1, 0x90, 0x99, 0x15, 0x49, 0x69, 0x3b, 0xa5, 0x34, 0x37, 0x49,
|
||||
0xf6, 0x3c, 0xdc, 0xfa, 0xdd, 0x80, 0xda, 0x83, 0xc0, 0x41, 0xc6, 0xe3, 0xe2, 0x39, 0x7b, 0x0f,
|
||||
0x6a, 0xc3, 0xa4, 0xd3, 0x84, 0xcd, 0x22, 0xb9, 0x52, 0x04, 0xb9, 0x03, 0xcb, 0x3e, 0x9d, 0x0a,
|
||||
0x55, 0xca, 0x12, 0xbb, 0x9a, 0x62, 0x9f, 0x0a, 0xb7, 0xad, 0xa2, 0xe4, 0x1d, 0x58, 0x63, 0x0e,
|
||||
0x0d, 0x02, 0x74, 0x07, 0x5a, 0xc6, 0x25, 0x29, 0xd5, 0xaa, 0x72, 0x3f, 0x55, 0x6a, 0xbe, 0x0b,
|
||||
0x1b, 0x1a, 0x38, 0x53, 0xb5, 0x22, 0xa1, 0xeb, 0x2a, 0x70, 0xa4, 0xfd, 0xd6, 0x36, 0x54, 0xe4,
|
||||
0x35, 0xa2, 0x91, 0x33, 0xca, 0xce, 0x74, 0x23, 0xe2, 0xdb, 0xfa, 0xc3, 0x80, 0xe6, 0xf3, 0x90,
|
||||
0x7b, 0x43, 0xcf, 0xa1, 0x7a, 0xf0, 0x17, 0xba, 0x35, 0xa1, 0xea, 0xc4, 0x48, 0x39, 0xba, 0x4a,
|
||||
0x31, 0x6d, 0x8a, 0xb1, 0x0f, 0x64, 0x36, 0xba, 0x4a, 0xac, 0xd4, 0x16, 0x59, 0x2e, 0xfa, 0x28,
|
||||
0xb2, 0x12, 0x99, 0xb4, 0x49, 0x0e, 0xa0, 0x1c, 0xfa, 0xae, 0x7c, 0x43, 0x8d, 0xbe, 0x95, 0x92,
|
||||
0xf1, 0x82, 0x9e, 0xa2, 0xab, 0x95, 0xf1, 0x51, 0x09, 0xe0, 0x21, 0xb3, 0x05, 0x5c, 0x64, 0x05,
|
||||
0x38, 0x91, 0xaf, 0xeb, 0x9a, 0x59, 0x01, 0x4e, 0xac, 0x4f, 0x61, 0xf3, 0xcb, 0xc0, 0xc5, 0x4b,
|
||||
0x74, 0xb5, 0xa0, 0x72, 0xc8, 0x5a, 0x50, 0xf1, 0x84, 0x5b, 0xf6, 0x59, 0xb1, 0x13, 0x23, 0x6d,
|
||||
0xbe, 0x34, 0x6b, 0xde, 0xfa, 0xdb, 0x80, 0xad, 0x2b, 0xef, 0x20, 0xb7, 0xa1, 0xe9, 0x8c, 0xe3,
|
||||
0x18, 0x03, 0x3e, 0x88, 0xe8, 0xa9, 0xa6, 0xad, 0xa1, 0x7c, 0x22, 0x8f, 0x6c, 0x43, 0x3d, 0xc0,
|
||||
0x4b, 0x15, 0x2f, 0x29, 0x92, 0xf0, 0x32, 0x09, 0xb6, 0xa0, 0xe2, 0x7b, 0x23, 0x8f, 0x4b, 0xf6,
|
||||
0x2a, 0x76, 0x62, 0x90, 0xfb, 0xb0, 0x92, 0x1d, 0xc9, 0xa9, 0x24, 0xf0, 0xea, 0xf9, 0xcd, 0x83,
|
||||
0xc9, 0x7d, 0x00, 0x9a, 0x56, 0xa8, 0x46, 0x7f, 0x27, 0x4d, 0x2d, 0x60, 0xc3, 0xce, 0xe0, 0xad,
|
||||
0x37, 0x25, 0xd8, 0x7c, 0x11, 0x32, 0xae, 0x01, 0x36, 0x7e, 0x33, 0x46, 0xc6, 0xc9, 0xff, 0x60,
|
||||
0x45, 0xa1, 0xa6, 0x83, 0xcc, 0x84, 0x34, 0x69, 0x96, 0xd6, 0x36, 0x2c, 0xab, 0x97, 0x99, 0x34,
|
||||
0xaa, 0x2c, 0xf2, 0xc9, 0xdc, 0x0b, 0xb8, 0x33, 0x93, 0x6f, 0xf1, 0x2a, 0xe9, 0xcb, 0xbd, 0x8c,
|
||||
0xce, 0xaf, 0x06, 0xd4, 0x53, 0x6f, 0xd1, 0x20, 0x0b, 0x5f, 0x44, 0xf9, 0x99, 0x96, 0x4e, 0x7c,
|
||||
0x93, 0x67, 0x50, 0x3d, 0x43, 0xea, 0xce, 0xae, 0xdd, 0xbf, 0xde, 0xb5, 0xdd, 0x2f, 0x92, 0xac,
|
||||
0x47, 0x81, 0x88, 0xea, 0x33, 0x3a, 0x87, 0xd0, 0xcc, 0x06, 0xc8, 0x3a, 0x94, 0xcf, 0x71, 0xaa,
|
||||
0xaa, 0x10, 0x9f, 0x42, 0xcd, 0x0b, 0xea, 0x8f, 0xb5, 0xcc, 0x89, 0x71, 0x58, 0xfa, 0xd0, 0xb0,
|
||||
0x8e, 0xa0, 0x95, 0xbf, 0x92, 0x45, 0x61, 0xc0, 0xc4, 0x22, 0x59, 0x66, 0x72, 0x77, 0xcb, 0x63,
|
||||
0x1a, 0xfd, 0x56, 0x5a, 0x61, 0x66, 0xaf, 0xdb, 0x0a, 0x63, 0xfd, 0x68, 0x00, 0x79, 0x82, 0xff,
|
||||
0x4e, 0x9a, 0x0f, 0xa0, 0x35, 0xf1, 0xf8, 0xd9, 0x60, 0x7e, 0x35, 0x8a, 0x52, 0x6b, 0xf6, 0xa6,
|
||||
0x88, 0x1d, 0xe7, 0x43, 0xe2, 0x5c, 0x99, 0x92, 0xae, 0xba, 0xb2, 0xc4, 0x36, 0x85, 0x53, 0x6d,
|
||||
0x39, 0x66, 0xc5, 0xb0, 0x99, 0x2b, 0x49, 0x35, 0xf6, 0x3e, 0xd4, 0xf4, 0xf5, 0xaa, 0xb5, 0x8d,
|
||||
0xb4, 0xb5, 0x14, 0x9c, 0x42, 0x32, 0x3c, 0x94, 0xae, 0xc1, 0xc3, 0x2f, 0x06, 0xb4, 0x9f, 0x20,
|
||||
0xcf, 0x2e, 0x2e, 0xcd, 0xc5, 0x01, 0xb4, 0x43, 0xdf, 0xcd, 0x75, 0x39, 0xcd, 0x3e, 0xcd, 0x56,
|
||||
0xe8, 0xbb, 0xb9, 0xd7, 0x23, 0x9f, 0xe1, 0x01, 0xb4, 0x03, 0x9c, 0x14, 0x65, 0x25, 0x4a, 0xb6,
|
||||
0x02, 0x9c, 0x2c, 0x66, 0x15, 0x3f, 0x5e, 0xbd, 0x44, 0x96, 0x32, 0x4b, 0xe4, 0x15, 0xdc, 0x5c,
|
||||
0xa8, 0x57, 0x11, 0xf5, 0x11, 0x34, 0x83, 0x8c, 0x5f, 0x91, 0x75, 0x23, 0xed, 0x3f, 0x97, 0x94,
|
||||
0x83, 0x5a, 0xfb, 0xb0, 0xfb, 0x8c, 0xc6, 0xe7, 0x59, 0xc4, 0x03, 0x66, 0x23, 0x75, 0x35, 0x19,
|
||||
0x05, 0xcb, 0xbc, 0xff, 0xa7, 0x01, 0x6b, 0x5a, 0x80, 0x97, 0x18, 0x5f, 0x78, 0x0e, 0x12, 0x0a,
|
||||
0xcd, 0xec, 0x74, 0x92, 0x9d, 0xb7, 0xbd, 0x93, 0xce, 0xee, 0x15, 0xd1, 0xa4, 0x21, 0xab, 0xf5,
|
||||
0xdd, 0x9b, 0xbf, 0x7e, 0x2e, 0xad, 0x5a, 0xf5, 0x9e, 0x56, 0xf7, 0xd0, 0xb8, 0x47, 0xce, 0xa1,
|
||||
0x91, 0x19, 0x13, 0xb2, 0x9d, 0x9e, 0xb1, 0x38, 0xcf, 0x9d, 0x9d, 0xe2, 0xa0, 0x3a, 0xff, 0xb6,
|
||||
0x3c, 0x7f, 0x9b, 0x6c, 0xa5, 0xe7, 0xf7, 0x5e, 0xe7, 0xc6, 0xff, 0xdb, 0xfe, 0xf7, 0x25, 0xd8,
|
||||
0xcc, 0xb2, 0xa2, 0xfb, 0x64, 0xb0, 0x36, 0x27, 0x03, 0xb9, 0x95, 0xbd, 0xab, 0x60, 0xa0, 0x3a,
|
||||
0x7b, 0x57, 0x03, 0x54, 0x41, 0xbb, 0xb2, 0xa0, 0x9b, 0xe4, 0x46, 0x2f, 0xab, 0x0e, 0xeb, 0xbd,
|
||||
0x96, 0xc5, 0x90, 0x09, 0xb4, 0x8b, 0x55, 0x22, 0xb3, 0x2d, 0xf8, 0x56, 0x19, 0x3b, 0xed, 0x85,
|
||||
0x3f, 0x61, 0x8f, 0xc4, 0x5f, 0x67, 0x7d, 0xf1, 0xbd, 0xe2, 0x8b, 0x1f, 0xd6, 0xbf, 0xd6, 0xff,
|
||||
0xcc, 0x4f, 0x96, 0x65, 0xe6, 0xfe, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6e, 0xc1, 0x1d, 0xff,
|
||||
0xb8, 0x0b, 0x00, 0x00,
|
||||
}
|
||||
|
@ -112,8 +112,8 @@ func request_NotificationService_GetNotification_0(ctx context.Context, marshale
|
||||
|
||||
}
|
||||
|
||||
func request_NotificationService_DeleteNotification_0(ctx context.Context, marshaler runtime.Marshaler, client NotificationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq DeleteNotificationRequest
|
||||
func request_NotificationService_MarkNotificationAsRead_0(ctx context.Context, marshaler runtime.Marshaler, client NotificationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq MarkNotificationAsReadRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
@ -134,7 +134,7 @@ func request_NotificationService_DeleteNotification_0(ctx context.Context, marsh
|
||||
return nil, metadata, err
|
||||
}
|
||||
|
||||
msg, err := client.DeleteNotification(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
msg, err := client.MarkNotificationAsRead(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
@ -301,7 +301,7 @@ func RegisterNotificationServiceHandler(ctx context.Context, mux *runtime.ServeM
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("DELETE", pattern_NotificationService_DeleteNotification_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
mux.Handle("DELETE", pattern_NotificationService_MarkNotificationAsRead_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
if cn, ok := w.(http.CloseNotifier); ok {
|
||||
@ -319,14 +319,14 @@ func RegisterNotificationServiceHandler(ctx context.Context, mux *runtime.ServeM
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_NotificationService_DeleteNotification_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
resp, md, err := request_NotificationService_MarkNotificationAsRead_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_NotificationService_DeleteNotification_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
forward_NotificationService_MarkNotificationAsRead_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
@ -336,11 +336,11 @@ func RegisterNotificationServiceHandler(ctx context.Context, mux *runtime.ServeM
|
||||
var (
|
||||
pattern_NotificationService_GetNotification_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 1, 0, 4, 1, 5, 1}, []string{"notifications", "name"}, ""))
|
||||
|
||||
pattern_NotificationService_DeleteNotification_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 1, 0, 4, 1, 5, 1}, []string{"notifications", "name"}, ""))
|
||||
pattern_NotificationService_MarkNotificationAsRead_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 1, 0, 4, 1, 5, 1}, []string{"notifications", "name"}, ""))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_NotificationService_GetNotification_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_NotificationService_DeleteNotification_0 = runtime.ForwardResponseMessage
|
||||
forward_NotificationService_MarkNotificationAsRead_0 = runtime.ForwardResponseMessage
|
||||
)
|
||||
|
@ -18,6 +18,7 @@ option go_package = "clairpb";
|
||||
package clairpb;
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
message Vulnerability {
|
||||
string name = 1;
|
||||
@ -26,60 +27,72 @@ message Vulnerability {
|
||||
string link = 4;
|
||||
string severity = 5;
|
||||
string metadata = 6;
|
||||
// fixed_by exists when vulnerability is under feature.
|
||||
string fixed_by = 7;
|
||||
repeated Feature fixed_in_features = 8;
|
||||
// affected_versions exists when vulnerability is under notification.
|
||||
repeated Feature affected_versions = 8;
|
||||
}
|
||||
|
||||
message Feature {
|
||||
message ClairStatus {
|
||||
// listers and detectors are processors implemented in this Clair and used to
|
||||
// scan ancestries
|
||||
repeated string listers = 1;
|
||||
repeated string detectors = 2;
|
||||
google.protobuf.Timestamp last_update_time = 3;
|
||||
}
|
||||
|
||||
message Feature{
|
||||
string name = 1;
|
||||
string namespace_name = 2;
|
||||
string version = 3;
|
||||
string version_format = 4;
|
||||
string added_by = 5;
|
||||
repeated Vulnerability vulnerabilities = 6;
|
||||
repeated Vulnerability vulnerabilities = 5;
|
||||
}
|
||||
|
||||
message Ancestry {
|
||||
string name = 1;
|
||||
int32 engine_version = 2;
|
||||
repeated Feature features = 2;
|
||||
repeated Layer layers = 3;
|
||||
}
|
||||
|
||||
message LayersIntroducingVulnerabilty {
|
||||
Vulnerability vulnerability = 1;
|
||||
repeated OrderedLayerName layers = 2;
|
||||
}
|
||||
|
||||
message OrderedLayerName {
|
||||
int32 index = 1;
|
||||
string layer_name = 2;
|
||||
// scanned_listers and scanned_detectors are used to scan this ancestry, it
|
||||
// may be different from listers and detectors in ClairStatus since the
|
||||
// ancestry could be scanned by previous version of Clair.
|
||||
repeated string scanned_listers = 4;
|
||||
repeated string scanned_detectors = 5;
|
||||
}
|
||||
|
||||
message Layer {
|
||||
string name = 1;
|
||||
repeated string namespace_names = 2;
|
||||
string hash = 1;
|
||||
}
|
||||
|
||||
message Notification {
|
||||
string name = 1;
|
||||
string created = 2;
|
||||
string notified = 3;
|
||||
string deleted = 4;
|
||||
int32 limit = 5;
|
||||
Page page = 6;
|
||||
string deleted = 4;
|
||||
PagedVulnerableAncestries old = 5;
|
||||
PagedVulnerableAncestries new = 6;
|
||||
}
|
||||
|
||||
message Page {
|
||||
string this_token = 1;
|
||||
string next_token = 2;
|
||||
LayersIntroducingVulnerabilty old = 3;
|
||||
LayersIntroducingVulnerabilty new = 4;
|
||||
message IndexedAncestryName {
|
||||
// index is unique to name in all streams simultaneously streamed, increasing
|
||||
// and larger than all indexes in previous page in same stream.
|
||||
int32 index = 1;
|
||||
string name = 2;
|
||||
}
|
||||
|
||||
message PagedVulnerableAncestries {
|
||||
string current_page = 1;
|
||||
// if next_page is empty, it signals the end of all pages.
|
||||
string next_page = 2;
|
||||
int32 limit = 3;
|
||||
Vulnerability vulnerability = 4;
|
||||
repeated IndexedAncestryName ancestries = 5;
|
||||
}
|
||||
|
||||
message PostAncestryRequest {
|
||||
message PostLayer {
|
||||
string name = 1;
|
||||
string hash = 1;
|
||||
string path = 2;
|
||||
map<string, string> headers = 3;
|
||||
}
|
||||
@ -89,7 +102,7 @@ message PostAncestryRequest {
|
||||
}
|
||||
|
||||
message PostAncestryResponse {
|
||||
int32 engine_version = 1;
|
||||
ClairStatus status = 1;
|
||||
}
|
||||
|
||||
message GetAncestryRequest {
|
||||
@ -100,25 +113,25 @@ message GetAncestryRequest {
|
||||
|
||||
message GetAncestryResponse {
|
||||
Ancestry ancestry = 1;
|
||||
repeated Feature features = 2;
|
||||
ClairStatus status = 2;
|
||||
}
|
||||
|
||||
message GetNotificationRequest {
|
||||
string page = 1;
|
||||
int32 limit = 2;
|
||||
string name = 3;
|
||||
// if the vulnerability_page is empty, it implies the first page.
|
||||
string old_vulnerability_page = 1;
|
||||
string new_vulnerability_page = 2;
|
||||
int32 limit = 3;
|
||||
string name = 4;
|
||||
}
|
||||
|
||||
message GetNotificationResponse {
|
||||
Notification notification = 1;
|
||||
}
|
||||
|
||||
message DeleteNotificationRequest {
|
||||
message MarkNotificationAsReadRequest {
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
service AncestryService{
|
||||
rpc PostAncestry(PostAncestryRequest) returns (PostAncestryResponse) {
|
||||
option (google.api.http) = {
|
||||
@ -141,7 +154,7 @@ service NotificationService{
|
||||
};
|
||||
}
|
||||
|
||||
rpc DeleteNotification(DeleteNotificationRequest) returns (google.protobuf.Empty) {
|
||||
rpc MarkNotificationAsRead(MarkNotificationAsReadRequest) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete: "/notifications/{name}"
|
||||
};
|
||||
|
@ -98,7 +98,14 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "page",
|
||||
"name": "old_vulnerability_page",
|
||||
"description": "if the vulnerability_page is empty, it implies the first page.",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "new_vulnerability_page",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"type": "string"
|
||||
@ -116,7 +123,7 @@
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"operationId": "DeleteNotification",
|
||||
"operationId": "MarkNotificationAsRead",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "",
|
||||
@ -143,7 +150,7 @@
|
||||
"PostAncestryRequestPostLayer": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"hash": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
@ -163,15 +170,52 @@
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"engine_version": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
"features": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbFeature"
|
||||
}
|
||||
},
|
||||
"layers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbLayer"
|
||||
}
|
||||
},
|
||||
"scanned_listers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "scanned_listers and scanned_detectors are used to scan this ancestry, it\nmay be different from listers and detectors in ClairStatus since the\nancestry could be scanned by previous version of Clair."
|
||||
},
|
||||
"scanned_detectors": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbClairStatus": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"listers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": "listers and detectors are processors implemented in this Clair and used to\nscan ancestries"
|
||||
},
|
||||
"detectors": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"last_update_time": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -190,9 +234,6 @@
|
||||
"version_format": {
|
||||
"type": "string"
|
||||
},
|
||||
"added_by": {
|
||||
"type": "string"
|
||||
},
|
||||
"vulnerabilities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -207,11 +248,8 @@
|
||||
"ancestry": {
|
||||
"$ref": "#/definitions/clairpbAncestry"
|
||||
},
|
||||
"features": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbFeature"
|
||||
}
|
||||
"status": {
|
||||
"$ref": "#/definitions/clairpbClairStatus"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -223,31 +261,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbLayer": {
|
||||
"clairpbIndexedAncestryName": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"index": {
|
||||
"type": "integer",
|
||||
"format": "int32",
|
||||
"description": "index is unique to name in all streams simultaneously streamed, increasing\nand larger than all indexes in previous page in same stream."
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"namespace_names": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbLayersIntroducingVulnerabilty": {
|
||||
"clairpbLayer": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vulnerability": {
|
||||
"$ref": "#/definitions/clairpbVulnerability"
|
||||
},
|
||||
"layers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbOrderedLayerName"
|
||||
}
|
||||
"hash": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -266,41 +297,36 @@
|
||||
"deleted": {
|
||||
"type": "string"
|
||||
},
|
||||
"old": {
|
||||
"$ref": "#/definitions/clairpbPagedVulnerableAncestries"
|
||||
},
|
||||
"new": {
|
||||
"$ref": "#/definitions/clairpbPagedVulnerableAncestries"
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbPagedVulnerableAncestries": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"current_page": {
|
||||
"type": "string"
|
||||
},
|
||||
"next_page": {
|
||||
"type": "string",
|
||||
"description": "if next_page is empty, it signals the end of all pages."
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"page": {
|
||||
"$ref": "#/definitions/clairpbPage"
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbOrderedLayerName": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"index": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
"vulnerability": {
|
||||
"$ref": "#/definitions/clairpbVulnerability"
|
||||
},
|
||||
"layer_name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"clairpbPage": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"this_token": {
|
||||
"type": "string"
|
||||
},
|
||||
"next_token": {
|
||||
"type": "string"
|
||||
},
|
||||
"old": {
|
||||
"$ref": "#/definitions/clairpbLayersIntroducingVulnerabilty"
|
||||
},
|
||||
"new": {
|
||||
"$ref": "#/definitions/clairpbLayersIntroducingVulnerabilty"
|
||||
"ancestries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbIndexedAncestryName"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -324,9 +350,8 @@
|
||||
"clairpbPostAncestryResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"engine_version": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
"status": {
|
||||
"$ref": "#/definitions/clairpbClairStatus"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -352,13 +377,15 @@
|
||||
"type": "string"
|
||||
},
|
||||
"fixed_by": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"description": "fixed_by exists when vulnerability is under feature."
|
||||
},
|
||||
"fixed_in_features": {
|
||||
"affected_versions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/clairpbFeature"
|
||||
}
|
||||
},
|
||||
"description": "affected_versions exists when vulnerability is under notification."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -18,76 +18,81 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/coreos/clair/api/token"
|
||||
"github.com/coreos/clair/database"
|
||||
"github.com/coreos/clair/ext/versionfmt"
|
||||
)
|
||||
|
||||
func NotificationFromDatabaseModel(dbNotification database.VulnerabilityNotification, limit int, pageToken string, nextPage database.VulnerabilityNotificationPageNumber, key string) (*Notification, error) {
|
||||
var oldVuln *LayersIntroducingVulnerabilty
|
||||
if dbNotification.OldVulnerability != nil {
|
||||
v, err := LayersIntroducingVulnerabiltyFromDatabaseModel(*dbNotification.OldVulnerability)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
oldVuln = v
|
||||
// PagedVulnerableAncestriesFromDatabaseModel converts database
|
||||
// PagedVulnerableAncestries to api PagedVulnerableAncestries and assigns
|
||||
// indexes to ancestries.
|
||||
func PagedVulnerableAncestriesFromDatabaseModel(dbVuln *database.PagedVulnerableAncestries) (*PagedVulnerableAncestries, error) {
|
||||
if dbVuln == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var newVuln *LayersIntroducingVulnerabilty
|
||||
if dbNotification.NewVulnerability != nil {
|
||||
v, err := LayersIntroducingVulnerabiltyFromDatabaseModel(*dbNotification.NewVulnerability)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newVuln = v
|
||||
}
|
||||
|
||||
var nextPageStr string
|
||||
if nextPage != database.NoVulnerabilityNotificationPage {
|
||||
nextPageBytes, _ := token.Marshal(nextPage, key)
|
||||
nextPageStr = string(nextPageBytes)
|
||||
}
|
||||
|
||||
var created, notified, deleted string
|
||||
if !dbNotification.Created.IsZero() {
|
||||
created = fmt.Sprintf("%d", dbNotification.Created.Unix())
|
||||
}
|
||||
if !dbNotification.Notified.IsZero() {
|
||||
notified = fmt.Sprintf("%d", dbNotification.Notified.Unix())
|
||||
}
|
||||
if !dbNotification.Deleted.IsZero() {
|
||||
deleted = fmt.Sprintf("%d", dbNotification.Deleted.Unix())
|
||||
}
|
||||
|
||||
return &Notification{
|
||||
Name: dbNotification.Name,
|
||||
Created: created,
|
||||
Notified: notified,
|
||||
Deleted: deleted,
|
||||
Limit: int32(limit),
|
||||
Page: &Page{
|
||||
ThisToken: pageToken,
|
||||
NextToken: nextPageStr,
|
||||
Old: oldVuln,
|
||||
New: newVuln,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func LayersIntroducingVulnerabiltyFromDatabaseModel(dbVuln database.Vulnerability) (*LayersIntroducingVulnerabilty, error) {
|
||||
vuln, err := VulnerabilityFromDatabaseModel(dbVuln, true)
|
||||
vuln, err := VulnerabilityFromDatabaseModel(dbVuln.Vulnerability)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var orderedLayers []*OrderedLayerName
|
||||
|
||||
return &LayersIntroducingVulnerabilty{
|
||||
next := ""
|
||||
if !dbVuln.End {
|
||||
next = string(dbVuln.Next)
|
||||
}
|
||||
|
||||
vulnAncestry := PagedVulnerableAncestries{
|
||||
Vulnerability: vuln,
|
||||
Layers: orderedLayers,
|
||||
}, nil
|
||||
CurrentPage: string(dbVuln.Current),
|
||||
NextPage: next,
|
||||
Limit: int32(dbVuln.Limit),
|
||||
}
|
||||
|
||||
for index, ancestryName := range dbVuln.Affected {
|
||||
indexedAncestry := IndexedAncestryName{
|
||||
Name: ancestryName,
|
||||
Index: int32(index),
|
||||
}
|
||||
vulnAncestry.Ancestries = append(vulnAncestry.Ancestries, &indexedAncestry)
|
||||
}
|
||||
|
||||
return &vulnAncestry, nil
|
||||
}
|
||||
|
||||
func VulnerabilityFromDatabaseModel(dbVuln database.Vulnerability, withFixedIn bool) (*Vulnerability, error) {
|
||||
// NotificationFromDatabaseModel converts database notification, old and new
|
||||
// vulnerabilities' paged vulnerable ancestries to be api notification.
|
||||
func NotificationFromDatabaseModel(dbNotification database.VulnerabilityNotificationWithVulnerable) (*Notification, error) {
|
||||
var (
|
||||
noti Notification
|
||||
err error
|
||||
)
|
||||
|
||||
noti.Name = dbNotification.Name
|
||||
if !dbNotification.Created.IsZero() {
|
||||
noti.Created = fmt.Sprintf("%d", dbNotification.Created.Unix())
|
||||
}
|
||||
|
||||
if !dbNotification.Notified.IsZero() {
|
||||
noti.Notified = fmt.Sprintf("%d", dbNotification.Notified.Unix())
|
||||
}
|
||||
|
||||
if !dbNotification.Deleted.IsZero() {
|
||||
noti.Deleted = fmt.Sprintf("%d", dbNotification.Deleted.Unix())
|
||||
}
|
||||
|
||||
noti.Old, err = PagedVulnerableAncestriesFromDatabaseModel(dbNotification.Old)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
noti.New, err = PagedVulnerableAncestriesFromDatabaseModel(dbNotification.New)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ¬i, nil
|
||||
}
|
||||
|
||||
func VulnerabilityFromDatabaseModel(dbVuln database.Vulnerability) (*Vulnerability, error) {
|
||||
metaString := ""
|
||||
if dbVuln.Metadata != nil {
|
||||
metadataByte, err := json.Marshal(dbVuln.Metadata)
|
||||
@ -97,69 +102,54 @@ func VulnerabilityFromDatabaseModel(dbVuln database.Vulnerability, withFixedIn b
|
||||
metaString = string(metadataByte)
|
||||
}
|
||||
|
||||
vuln := Vulnerability{
|
||||
return &Vulnerability{
|
||||
Name: dbVuln.Name,
|
||||
NamespaceName: dbVuln.Namespace.Name,
|
||||
Description: dbVuln.Description,
|
||||
Link: dbVuln.Link,
|
||||
Severity: string(dbVuln.Severity),
|
||||
Metadata: metaString,
|
||||
}
|
||||
|
||||
if dbVuln.FixedBy != versionfmt.MaxVersion {
|
||||
vuln.FixedBy = dbVuln.FixedBy
|
||||
}
|
||||
|
||||
if withFixedIn {
|
||||
for _, dbFeatureVersion := range dbVuln.FixedIn {
|
||||
f, err := FeatureFromDatabaseModel(dbFeatureVersion, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
vuln.FixedInFeatures = append(vuln.FixedInFeatures, f)
|
||||
}
|
||||
}
|
||||
|
||||
return &vuln, nil
|
||||
}, nil
|
||||
}
|
||||
|
||||
func LayerFromDatabaseModel(dbLayer database.Layer) *Layer {
|
||||
layer := Layer{
|
||||
Name: dbLayer.Name,
|
||||
}
|
||||
for _, ns := range dbLayer.Namespaces {
|
||||
layer.NamespaceNames = append(layer.NamespaceNames, ns.Name)
|
||||
func VulnerabilityWithFixedInFromDatabaseModel(dbVuln database.VulnerabilityWithFixedIn) (*Vulnerability, error) {
|
||||
vuln, err := VulnerabilityFromDatabaseModel(dbVuln.Vulnerability)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
vuln.FixedBy = dbVuln.FixedInVersion
|
||||
return vuln, nil
|
||||
}
|
||||
|
||||
// AncestryFromDatabaseModel converts database ancestry to api ancestry.
|
||||
func AncestryFromDatabaseModel(dbAncestry database.Ancestry) *Ancestry {
|
||||
ancestry := &Ancestry{
|
||||
Name: dbAncestry.Name,
|
||||
}
|
||||
for _, layer := range dbAncestry.Layers {
|
||||
ancestry.Layers = append(ancestry.Layers, LayerFromDatabaseModel(layer))
|
||||
}
|
||||
return ancestry
|
||||
}
|
||||
|
||||
// LayerFromDatabaseModel converts database layer to api layer.
|
||||
func LayerFromDatabaseModel(dbLayer database.Layer) *Layer {
|
||||
layer := Layer{Hash: dbLayer.Hash}
|
||||
return &layer
|
||||
}
|
||||
|
||||
func FeatureFromDatabaseModel(fv database.FeatureVersion, withVulnerabilities bool) (*Feature, error) {
|
||||
version := fv.Version
|
||||
// NamespacedFeatureFromDatabaseModel converts database namespacedFeature to api Feature.
|
||||
func NamespacedFeatureFromDatabaseModel(feature database.NamespacedFeature) *Feature {
|
||||
version := feature.Feature.Version
|
||||
if version == versionfmt.MaxVersion {
|
||||
version = "None"
|
||||
}
|
||||
f := &Feature{
|
||||
Name: fv.Feature.Name,
|
||||
NamespaceName: fv.Feature.Namespace.Name,
|
||||
VersionFormat: fv.Feature.Namespace.VersionFormat,
|
||||
|
||||
return &Feature{
|
||||
Name: feature.Feature.Name,
|
||||
NamespaceName: feature.Namespace.Name,
|
||||
VersionFormat: feature.Namespace.VersionFormat,
|
||||
Version: version,
|
||||
AddedBy: fv.AddedBy.Name,
|
||||
}
|
||||
|
||||
if withVulnerabilities {
|
||||
for _, dbVuln := range fv.AffectedBy {
|
||||
// VulnerabilityFromDatabaseModel should be called without FixedIn,
|
||||
// Otherwise it might cause infinite loop
|
||||
vul, err := VulnerabilityFromDatabaseModel(dbVuln, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
f.Vulnerabilities = append(f.Vulnerabilities, vul)
|
||||
}
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
301
api/v2/rpc.go
301
api/v2/rpc.go
@ -17,24 +17,21 @@ package v2
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/golang/protobuf/ptypes"
|
||||
google_protobuf1 "github.com/golang/protobuf/ptypes/empty"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/net/context"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
|
||||
"github.com/coreos/clair"
|
||||
"github.com/coreos/clair/api/token"
|
||||
pb "github.com/coreos/clair/api/v2/clairpb"
|
||||
"github.com/coreos/clair/database"
|
||||
"github.com/coreos/clair/pkg/commonerr"
|
||||
"github.com/coreos/clair/pkg/tarutil"
|
||||
)
|
||||
|
||||
// NotificationServer implements NotificationService interface for serving RPC.
|
||||
type NotificationServer struct {
|
||||
Store database.Datastore
|
||||
PaginationKey string
|
||||
Store database.Datastore
|
||||
}
|
||||
|
||||
// AncestryServer implements AncestryService interface for serving RPC.
|
||||
@ -46,73 +43,145 @@ type AncestryServer struct {
|
||||
func (s *AncestryServer) PostAncestry(ctx context.Context, req *pb.PostAncestryRequest) (*pb.PostAncestryResponse, error) {
|
||||
ancestryName := req.GetAncestryName()
|
||||
if ancestryName == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "Failed to provide proper ancestry name")
|
||||
return nil, status.Error(codes.InvalidArgument, "ancestry name should not be empty")
|
||||
}
|
||||
|
||||
layers := req.GetLayers()
|
||||
if len(layers) == 0 {
|
||||
return nil, status.Error(codes.InvalidArgument, "At least one layer should be provided for an ancestry")
|
||||
return nil, status.Error(codes.InvalidArgument, "ancestry should have at least one layer")
|
||||
}
|
||||
|
||||
var currentName, parentName, rootName string
|
||||
for i, layer := range layers {
|
||||
ancestryFormat := req.GetFormat()
|
||||
if ancestryFormat == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "ancestry format should not be empty")
|
||||
}
|
||||
|
||||
ancestryLayers := []clair.LayerRequest{}
|
||||
for _, layer := range layers {
|
||||
if layer == nil {
|
||||
err := status.Error(codes.InvalidArgument, "Failed to provide layer")
|
||||
return nil, s.rollBackOnError(err, currentName, rootName)
|
||||
err := status.Error(codes.InvalidArgument, "ancestry layer is invalid")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO(keyboardnerd): after altering the database to support ancestry,
|
||||
// we should use the ancestry name and index as key instead of
|
||||
// the amalgamation of ancestry name of index
|
||||
// Hack: layer name is [ancestryName]-[index] except the tail layer,
|
||||
// tail layer name is [ancestryName]
|
||||
if i == len(layers)-1 {
|
||||
currentName = ancestryName
|
||||
} else {
|
||||
currentName = fmt.Sprintf("%s-%d", ancestryName, i)
|
||||
if layer.GetHash() == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "ancestry layer hash should not be empty")
|
||||
}
|
||||
|
||||
// if rootName is unset, this is the first iteration over the layers and
|
||||
// the current layer is the root of the ancestry
|
||||
if rootName == "" {
|
||||
rootName = currentName
|
||||
if layer.GetPath() == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "ancestry layer path should not be empty")
|
||||
}
|
||||
|
||||
err := clair.ProcessLayer(s.Store, req.GetFormat(), currentName, parentName, layer.GetPath(), layer.GetHeaders())
|
||||
if err != nil {
|
||||
return nil, s.rollBackOnError(err, currentName, rootName)
|
||||
}
|
||||
|
||||
// Now that the current layer is processed, set the parentName for the
|
||||
// next iteration.
|
||||
parentName = currentName
|
||||
ancestryLayers = append(ancestryLayers, clair.LayerRequest{
|
||||
Hash: layer.Hash,
|
||||
Headers: layer.Headers,
|
||||
Path: layer.Path,
|
||||
})
|
||||
}
|
||||
|
||||
return &pb.PostAncestryResponse{
|
||||
EngineVersion: clair.Version,
|
||||
}, nil
|
||||
err := clair.ProcessAncestry(s.Store, ancestryFormat, ancestryName, ancestryLayers)
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, "ancestry is failed to be processed: "+err.Error())
|
||||
}
|
||||
|
||||
clairStatus, err := s.getClairStatus()
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
return &pb.PostAncestryResponse{Status: clairStatus}, nil
|
||||
}
|
||||
|
||||
func (s *AncestryServer) getClairStatus() (*pb.ClairStatus, error) {
|
||||
status := &pb.ClairStatus{
|
||||
Listers: clair.Processors.Listers,
|
||||
Detectors: clair.Processors.Detectors,
|
||||
}
|
||||
|
||||
t, firstUpdate, err := clair.GetLastUpdateTime(s.Store)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if firstUpdate {
|
||||
return status, nil
|
||||
}
|
||||
|
||||
status.LastUpdateTime, err = ptypes.TimestampProto(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return status, nil
|
||||
}
|
||||
|
||||
// GetAncestry implements retrieving an ancestry via the Clair gRPC service.
|
||||
func (s *AncestryServer) GetAncestry(ctx context.Context, req *pb.GetAncestryRequest) (*pb.GetAncestryResponse, error) {
|
||||
if req.GetAncestryName() == "" {
|
||||
return nil, status.Errorf(codes.InvalidArgument, "invalid get ancestry request")
|
||||
return nil, status.Errorf(codes.InvalidArgument, "ancestry name should not be empty")
|
||||
}
|
||||
|
||||
// TODO(keyboardnerd): after altering the database to support ancestry, this
|
||||
// function is iteratively querying for for r.GetIndex() th parent of the
|
||||
// requested layer until the indexed layer is found or index is out of bound
|
||||
// this is a hack and will be replaced with one query
|
||||
ancestry, features, err := s.getAncestry(req.GetAncestryName(), req.GetWithFeatures(), req.GetWithVulnerabilities())
|
||||
if err == commonerr.ErrNotFound {
|
||||
return nil, status.Error(codes.NotFound, err.Error())
|
||||
} else if err != nil {
|
||||
tx, err := s.Store.Begin()
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
ancestry, _, ok, err := tx.FindAncestry(req.GetAncestryName())
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
} else if !ok {
|
||||
return nil, status.Error(codes.NotFound, fmt.Sprintf("requested ancestry '%s' is not found", req.GetAncestryName()))
|
||||
}
|
||||
|
||||
pbAncestry := pb.AncestryFromDatabaseModel(ancestry)
|
||||
if req.GetWithFeatures() || req.GetWithVulnerabilities() {
|
||||
ancestryWFeature, ok, err := tx.FindAncestryFeatures(ancestry.Name)
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
if !ok {
|
||||
return nil, status.Error(codes.NotFound, fmt.Sprintf("requested ancestry '%s' is not found", req.GetAncestryName()))
|
||||
}
|
||||
pbAncestry.ScannedDetectors = ancestryWFeature.ProcessedBy.Detectors
|
||||
pbAncestry.ScannedListers = ancestryWFeature.ProcessedBy.Listers
|
||||
|
||||
if req.GetWithVulnerabilities() {
|
||||
featureVulnerabilities, err := tx.FindAffectedNamespacedFeatures(ancestryWFeature.Features)
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
for _, fv := range featureVulnerabilities {
|
||||
// Ensure that every feature can be found.
|
||||
if !fv.Valid {
|
||||
return nil, status.Error(codes.Internal, "ancestry feature is not found")
|
||||
}
|
||||
|
||||
pbFeature := pb.NamespacedFeatureFromDatabaseModel(fv.NamespacedFeature)
|
||||
for _, v := range fv.AffectedBy {
|
||||
pbVuln, err := pb.VulnerabilityWithFixedInFromDatabaseModel(v)
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
pbFeature.Vulnerabilities = append(pbFeature.Vulnerabilities, pbVuln)
|
||||
}
|
||||
|
||||
pbAncestry.Features = append(pbAncestry.Features, pbFeature)
|
||||
}
|
||||
} else {
|
||||
for _, f := range ancestryWFeature.Features {
|
||||
pbAncestry.Features = append(pbAncestry.Features, pb.NamespacedFeatureFromDatabaseModel(f))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clairStatus, err := s.getClairStatus()
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
return &pb.GetAncestryResponse{
|
||||
Ancestry: ancestry,
|
||||
Features: features,
|
||||
Status: clairStatus,
|
||||
Ancestry: pbAncestry,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -120,36 +189,35 @@ func (s *AncestryServer) GetAncestry(ctx context.Context, req *pb.GetAncestryReq
|
||||
// service.
|
||||
func (s *NotificationServer) GetNotification(ctx context.Context, req *pb.GetNotificationRequest) (*pb.GetNotificationResponse, error) {
|
||||
if req.GetName() == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "Failed to provide notification name")
|
||||
return nil, status.Error(codes.InvalidArgument, "notification name should not be empty")
|
||||
}
|
||||
|
||||
if req.GetLimit() <= 0 {
|
||||
return nil, status.Error(codes.InvalidArgument, "Failed to provide page limit")
|
||||
return nil, status.Error(codes.InvalidArgument, "notification page limit should not be empty or less than 1")
|
||||
}
|
||||
|
||||
page := database.VulnerabilityNotificationFirstPage
|
||||
pageToken := req.GetPage()
|
||||
if pageToken != "" {
|
||||
err := token.Unmarshal(pageToken, s.PaginationKey, &page)
|
||||
if err != nil {
|
||||
return nil, status.Errorf(codes.InvalidArgument, "Invalid page format %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
pageTokenBytes, err := token.Marshal(page, s.PaginationKey)
|
||||
if err != nil {
|
||||
return nil, status.Errorf(codes.InvalidArgument, "Failed to marshal token: %s", err.Error())
|
||||
}
|
||||
pageToken = string(pageTokenBytes)
|
||||
tx, err := s.Store.Begin()
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
dbNotification, nextPage, err := s.Store.GetNotification(req.GetName(), int(req.GetLimit()), page)
|
||||
if err == commonerr.ErrNotFound {
|
||||
return nil, status.Error(codes.NotFound, err.Error())
|
||||
} else if err != nil {
|
||||
dbNotification, ok, err := tx.FindVulnerabilityNotification(
|
||||
req.GetName(),
|
||||
int(req.GetLimit()),
|
||||
database.PageNumber(req.GetOldVulnerabilityPage()),
|
||||
database.PageNumber(req.GetNewVulnerabilityPage()),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
notification, err := pb.NotificationFromDatabaseModel(dbNotification, int(req.GetLimit()), pageToken, nextPage, s.PaginationKey)
|
||||
if !ok {
|
||||
return nil, status.Error(codes.NotFound, fmt.Sprintf("requested notification '%s' is not found", req.GetName()))
|
||||
}
|
||||
|
||||
notification, err := pb.NotificationFromDatabaseModel(dbNotification)
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
@ -157,100 +225,29 @@ func (s *NotificationServer) GetNotification(ctx context.Context, req *pb.GetNot
|
||||
return &pb.GetNotificationResponse{Notification: notification}, nil
|
||||
}
|
||||
|
||||
// DeleteNotification implements deleting a notification via the Clair gRPC
|
||||
// MarkNotificationAsRead implements deleting a notification via the Clair gRPC
|
||||
// service.
|
||||
func (s *NotificationServer) DeleteNotification(ctx context.Context, req *pb.DeleteNotificationRequest) (*google_protobuf1.Empty, error) {
|
||||
func (s *NotificationServer) MarkNotificationAsRead(ctx context.Context, req *pb.MarkNotificationAsReadRequest) (*google_protobuf1.Empty, error) {
|
||||
if req.GetName() == "" {
|
||||
return nil, status.Error(codes.InvalidArgument, "Failed to provide notification name")
|
||||
return nil, status.Error(codes.InvalidArgument, "notification name should not be empty")
|
||||
}
|
||||
|
||||
err := s.Store.DeleteNotification(req.GetName())
|
||||
tx, err := s.Store.Begin()
|
||||
if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
err = tx.DeleteNotification(req.GetName())
|
||||
if err == commonerr.ErrNotFound {
|
||||
return nil, status.Error(codes.NotFound, err.Error())
|
||||
return nil, status.Error(codes.NotFound, "requested notification \""+req.GetName()+"\" is not found")
|
||||
} else if err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
return &google_protobuf1.Empty{}, nil
|
||||
}
|
||||
|
||||
// rollBackOnError handles server error and rollback whole ancestry insertion if
|
||||
// any layer failed to be inserted.
|
||||
func (s *AncestryServer) rollBackOnError(err error, currentLayerName, rootLayerName string) error {
|
||||
// if the current layer failed to be inserted and it's the root layer,
|
||||
// then the ancestry is not yet in the database.
|
||||
if currentLayerName != rootLayerName {
|
||||
errrb := s.Store.DeleteLayer(rootLayerName)
|
||||
if errrb != nil {
|
||||
return status.Errorf(codes.Internal, errrb.Error())
|
||||
}
|
||||
log.WithField("layer name", currentLayerName).Warnf("Can't process %s: roll back the ancestry", currentLayerName)
|
||||
}
|
||||
|
||||
if err == tarutil.ErrCouldNotExtract ||
|
||||
err == tarutil.ErrExtractedFileTooBig ||
|
||||
err == clair.ErrUnsupported {
|
||||
return status.Errorf(codes.InvalidArgument, "unprocessable entity %s", err.Error())
|
||||
}
|
||||
|
||||
if _, badreq := err.(*commonerr.ErrBadRequest); badreq {
|
||||
return status.Error(codes.InvalidArgument, err.Error())
|
||||
}
|
||||
|
||||
return status.Error(codes.Internal, err.Error())
|
||||
}
|
||||
|
||||
// TODO(keyboardnerd): Remove this Legacy compability code once the database is
|
||||
// revised.
|
||||
// getAncestry returns an ancestry from database by getting all parents of a
|
||||
// layer given the layer name, and the layer's feature list if
|
||||
// withFeature/withVulnerability is turned on.
|
||||
func (s *AncestryServer) getAncestry(name string, withFeature bool, withVulnerability bool) (ancestry *pb.Ancestry, features []*pb.Feature, err error) {
|
||||
var (
|
||||
layers = []*pb.Layer{}
|
||||
layer database.Layer
|
||||
)
|
||||
ancestry = &pb.Ancestry{}
|
||||
|
||||
layer, err = s.Store.FindLayer(name, withFeature, withVulnerability)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if withFeature {
|
||||
for _, fv := range layer.Features {
|
||||
f, e := pb.FeatureFromDatabaseModel(fv, withVulnerability)
|
||||
if e != nil {
|
||||
err = e
|
||||
return
|
||||
}
|
||||
|
||||
features = append(features, f)
|
||||
}
|
||||
}
|
||||
|
||||
ancestry.Name = name
|
||||
ancestry.EngineVersion = int32(layer.EngineVersion)
|
||||
for name != "" {
|
||||
layer, err = s.Store.FindLayer(name, false, false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if layer.Parent != nil {
|
||||
name = layer.Parent.Name
|
||||
} else {
|
||||
name = ""
|
||||
}
|
||||
|
||||
layers = append(layers, pb.LayerFromDatabaseModel(layer))
|
||||
}
|
||||
|
||||
// reverse layers to make the root layer at the top
|
||||
for i, j := 0, len(layers)-1; i < j; i, j = i+1, j-1 {
|
||||
layers[i], layers[j] = layers[j], layers[i]
|
||||
}
|
||||
|
||||
ancestry.Layers = layers
|
||||
return
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func handleShutdown(err error) {
|
||||
var (
|
||||
promResponseDurationMilliseconds = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Name: "clair_v2_api_response_duration_milliseconds",
|
||||
Help: "The duration of time it takes to receieve and write a response to an V2 API request",
|
||||
Help: "The duration of time it takes to receive and write a response to an V2 API request",
|
||||
Buckets: prometheus.ExponentialBuckets(9.375, 2, 10),
|
||||
}, []string{"route", "code"})
|
||||
)
|
||||
@ -57,7 +57,7 @@ func init() {
|
||||
prometheus.MustRegister(promResponseDurationMilliseconds)
|
||||
}
|
||||
|
||||
func newGrpcServer(paginationKey string, store database.Datastore, tlsConfig *tls.Config) *grpc.Server {
|
||||
func newGrpcServer(store database.Datastore, tlsConfig *tls.Config) *grpc.Server {
|
||||
grpcOpts := []grpc.ServerOption{
|
||||
grpc.UnaryInterceptor(grpc_prometheus.UnaryServerInterceptor),
|
||||
grpc.StreamInterceptor(grpc_prometheus.StreamServerInterceptor),
|
||||
@ -69,7 +69,7 @@ func newGrpcServer(paginationKey string, store database.Datastore, tlsConfig *tl
|
||||
|
||||
grpcServer := grpc.NewServer(grpcOpts...)
|
||||
pb.RegisterAncestryServiceServer(grpcServer, &AncestryServer{Store: store})
|
||||
pb.RegisterNotificationServiceServer(grpcServer, &NotificationServer{PaginationKey: paginationKey, Store: store})
|
||||
pb.RegisterNotificationServiceServer(grpcServer, &NotificationServer{Store: store})
|
||||
return grpcServer
|
||||
}
|
||||
|
||||
@ -98,11 +98,11 @@ func logHandler(handler http.Handler) http.Handler {
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"remote addr": r.RemoteAddr,
|
||||
"method": r.Method,
|
||||
"request uri": r.RequestURI,
|
||||
"status": statusStr,
|
||||
"elapsed time": time.Since(start),
|
||||
"remote addr": r.RemoteAddr,
|
||||
"method": r.Method,
|
||||
"request uri": r.RequestURI,
|
||||
"status": statusStr,
|
||||
"elapsed time (ms)": float64(time.Since(start).Nanoseconds()) * 1e-6,
|
||||
}).Info("Handled HTTP request")
|
||||
})
|
||||
}
|
||||
@ -148,7 +148,7 @@ func servePrometheus(mux *http.ServeMux) {
|
||||
}
|
||||
|
||||
// Run initializes grpc and grpc gateway api services on the same port
|
||||
func Run(GrpcPort int, tlsConfig *tls.Config, PaginationKey, CertFile, KeyFile string, store database.Datastore) {
|
||||
func Run(GrpcPort int, tlsConfig *tls.Config, CertFile, KeyFile string, store database.Datastore) {
|
||||
l, err := net.Listen("tcp", fmt.Sprintf("localhost:%d", GrpcPort))
|
||||
if err != nil {
|
||||
log.WithError(err).Fatalf("could not bind to port %d", GrpcPort)
|
||||
@ -175,7 +175,7 @@ func Run(GrpcPort int, tlsConfig *tls.Config, PaginationKey, CertFile, KeyFile s
|
||||
apiListener = tls.NewListener(tcpMux.Match(cmux.Any()), tlsConfig)
|
||||
go func() { handleShutdown(tcpMux.Serve()) }()
|
||||
|
||||
grpcServer := newGrpcServer(PaginationKey, store, tlsConfig)
|
||||
grpcServer := newGrpcServer(store, tlsConfig)
|
||||
gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), tlsConfig)
|
||||
|
||||
httpMux.Handle("/", gwmux)
|
||||
@ -188,7 +188,7 @@ func Run(GrpcPort int, tlsConfig *tls.Config, PaginationKey, CertFile, KeyFile s
|
||||
apiListener = tcpMux.Match(cmux.Any())
|
||||
go func() { handleShutdown(tcpMux.Serve()) }()
|
||||
|
||||
grpcServer := newGrpcServer(PaginationKey, store, nil)
|
||||
grpcServer := newGrpcServer(store, nil)
|
||||
go func() { handleShutdown(grpcServer.Serve(grpcL)) }()
|
||||
|
||||
gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), nil)
|
||||
|
Loading…
Reference in New Issue
Block a user