diff --git a/.travis.yml b/.travis.yml index 576da9fc..d636072a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,23 @@ language: go go: - - 1.8 + - "1.10" sudo: required +env: + global: + - PATH=$HOME/.local/bin:$PATH + install: - curl https://glide.sh/get | sh +- mkdir -p $HOME/.local/bin +- curl -o $HOME/.local/bin/prototool -sSL https://github.com/uber/prototool/releases/download/v0.1.0/prototool-$(uname -s)-$(uname -m) +- chmod +x $HOME/.local/bin/prototool script: +- prototool format -d api/v3/clairpb/clair.proto +- prototool lint api/v3/clairpb/clair.proto - go test $(glide novendor | grep -v contrib) dist: trusty diff --git a/Dockerfile b/Dockerfile index 767e4cd1..cebc06c6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM golang:1.8-alpine +FROM golang:1.10-alpine VOLUME /config EXPOSE 6060 6061 @@ -20,7 +20,7 @@ EXPOSE 6060 6061 ADD . /go/src/github.com/coreos/clair/ WORKDIR /go/src/github.com/coreos/clair/ -RUN apk add --no-cache git bzr rpm xz dumb-init && \ +RUN apk add --no-cache git rpm xz dumb-init && \ go install -v github.com/coreos/clair/cmd/clair && \ mv /go/bin/clair /clair && \ rm -rf /go /usr/local/go diff --git a/Documentation/integrations.md b/Documentation/integrations.md index a75235bb..289fbeba 100644 --- a/Documentation/integrations.md +++ b/Documentation/integrations.md @@ -10,6 +10,8 @@ This document tracks projects that integrate with Clair. [Join the community](ht [Yair](https://github.com/yfoelling/yair): a lightweight command-line for working with clair with many different outputs. Mainly designed for usage in a CI Job. +[Paclair](https://github.com/yebinama/paclair): a Python3 CLI tool to interact with Clair (easily configurable to access private registries). + [Clairctl](https://github.com/jgsqware/clairctl): a lightweight command-line tool for working locally with Clair and generate HTML report. [Clair-SQS](https://github.com/zalando-incubator/clair-sqs): a container containing Clair and additional processes that integrate Clair with [Amazon SQS][sqs]. diff --git a/Documentation/running-clair.md b/Documentation/running-clair.md index d7dee43d..500d115c 100644 --- a/Documentation/running-clair.md +++ b/Documentation/running-clair.md @@ -44,7 +44,7 @@ A [PostgreSQL 9.4+] database instance is required for all instructions. #### Kubernetes (Helm) If you don't have a local Kubernetes cluster already, check out [minikube]. -This assumes you've already ran `helm init` and you have access to a currently running instance of Tiller. +This assumes you've already ran `helm init`, you have access to a currently running instance of Tiller and that you are running the latest version of helm. [minikube]: https://github.com/kubernetes/minikube @@ -53,6 +53,7 @@ git clone https://github.com/coreos/clair cd clair/contrib/helm cp clair/values.yaml ~/my_custom_values.yaml vi ~/my_custom_values.yaml +helm dependency update clair helm install clair -f ~/my_custom_values.yaml ``` @@ -86,14 +87,12 @@ To build Clair, you need to latest stable version of [Go] and a working [Go envi In addition, Clair requires some additional binaries be installed on the system [$PATH] as runtime dependencies: * [git] -* [bzr] * [rpm] * [xz] [Go]: https://github.com/golang/go/releases [Go environment]: https://golang.org/doc/code.html [git]: https://git-scm.com -[bzr]: http://bazaar.canonical.com/en [rpm]: http://www.rpm.org [xz]: http://tukaani.org/xz [$PATH]: https://en.wikipedia.org/wiki/PATH_(variable) diff --git a/README.md b/README.md index 2fe179cf..2cd4987a 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ Thus, the project was named `Clair` after the French term which translates to *c ## Contact - IRC: #[clair](irc://irc.freenode.org:6667/#clair) on freenode.org -- Bugs: [issues](https://github.com/coreos/etcd/issues) +- Bugs: [issues](https://github.com/coreos/clair/issues) ## Contributing diff --git a/api/v3/clairpb/Dockerfile b/api/v3/clairpb/Dockerfile new file mode 100644 index 00000000..a8547e29 --- /dev/null +++ b/api/v3/clairpb/Dockerfile @@ -0,0 +1,7 @@ +FROM golang:alpine + +RUN apk add --update --no-cache git bash protobuf-dev + +RUN go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway +RUN go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger +RUN go get -u github.com/golang/protobuf/protoc-gen-go diff --git a/api/v3/clairpb/Makefile b/api/v3/clairpb/Makefile deleted file mode 100644 index 8c193baa..00000000 --- a/api/v3/clairpb/Makefile +++ /dev/null @@ -1,17 +0,0 @@ -all: - protoc -I/usr/local/include -I. \ - -I${GOPATH}/src \ - -I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - --go_out=plugins=grpc:. \ - clair.proto - protoc -I/usr/local/include -I. \ - -I${GOPATH}/src \ - -I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - --grpc-gateway_out=logtostderr=true:. \ - clair.proto - protoc -I/usr/local/include -I. \ - -I${GOPATH}/src \ - -I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - --swagger_out=logtostderr=true:. \ - clair.proto - go generate . \ No newline at end of file diff --git a/api/v3/clairpb/clair.pb.go b/api/v3/clairpb/clair.pb.go index 19816099..2dd77344 100644 --- a/api/v3/clairpb/clair.pb.go +++ b/api/v3/clairpb/clair.pb.go @@ -1,37 +1,34 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: clair.proto +// source: api/v3/clairpb/clair.proto /* Package clairpb is a generated protocol buffer package. It is generated from these files: - clair.proto + api/v3/clairpb/clair.proto It has these top-level messages: Vulnerability - ClairStatus Feature - Ancestry Layer - Notification - IndexedAncestryName - PagedVulnerableAncestries - PostAncestryRequest - PostAncestryResponse + ClairStatus GetAncestryRequest GetAncestryResponse + PostAncestryRequest + PostAncestryResponse GetNotificationRequest GetNotificationResponse + PagedVulnerableAncestries MarkNotificationAsReadRequest + MarkNotificationAsReadResponse */ package clairpb import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/timestamp" import _ "google.golang.org/genproto/googleapis/api/annotations" -import google_protobuf1 "github.com/golang/protobuf/ptypes/empty" -import google_protobuf2 "github.com/golang/protobuf/ptypes/timestamp" import ( context "golang.org/x/net/context" @@ -50,15 +47,23 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Vulnerability struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The name of the vulnerability. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The name of the namespace in which the vulnerability was detected. NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"` - Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` - Link string `protobuf:"bytes,4,opt,name=link" json:"link,omitempty"` - Severity string `protobuf:"bytes,5,opt,name=severity" json:"severity,omitempty"` - Metadata string `protobuf:"bytes,6,opt,name=metadata" json:"metadata,omitempty"` - // fixed_by exists when vulnerability is under feature. + // A description of the vulnerability according to the source for the namespace. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // A link to the vulnerability according to the source for the namespace. + Link string `protobuf:"bytes,4,opt,name=link" json:"link,omitempty"` + // How dangerous the vulnerability is. + Severity string `protobuf:"bytes,5,opt,name=severity" json:"severity,omitempty"` + // Namespace agnostic metadata about the vulnerability. + Metadata string `protobuf:"bytes,6,opt,name=metadata" json:"metadata,omitempty"` + // The feature that fixes this vulnerability. + // This field only exists when a vulnerability is a part of a Feature. FixedBy string `protobuf:"bytes,7,opt,name=fixed_by,json=fixedBy" json:"fixed_by,omitempty"` - // affected_versions exists when vulnerability is under notification. + // The Features that are affected by the vulnerability. + // This field only exists when a vulnerability is a part of a Notification. AffectedVersions []*Feature `protobuf:"bytes,8,rep,name=affected_versions,json=affectedVersions" json:"affected_versions,omitempty"` } @@ -123,52 +128,23 @@ func (m *Vulnerability) GetAffectedVersions() []*Feature { return nil } -type ClairStatus struct { - // listers and detectors are processors implemented in this Clair and used to - // scan ancestries - Listers []string `protobuf:"bytes,1,rep,name=listers" json:"listers,omitempty"` - Detectors []string `protobuf:"bytes,2,rep,name=detectors" json:"detectors,omitempty"` - LastUpdateTime *google_protobuf2.Timestamp `protobuf:"bytes,3,opt,name=last_update_time,json=lastUpdateTime" json:"last_update_time,omitempty"` -} - -func (m *ClairStatus) Reset() { *m = ClairStatus{} } -func (m *ClairStatus) String() string { return proto.CompactTextString(m) } -func (*ClairStatus) ProtoMessage() {} -func (*ClairStatus) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *ClairStatus) GetListers() []string { - if m != nil { - return m.Listers - } - return nil -} - -func (m *ClairStatus) GetDetectors() []string { - if m != nil { - return m.Detectors - } - return nil -} - -func (m *ClairStatus) GetLastUpdateTime() *google_protobuf2.Timestamp { - if m != nil { - return m.LastUpdateTime - } - return nil -} - type Feature struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"` - Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"` - VersionFormat string `protobuf:"bytes,4,opt,name=version_format,json=versionFormat" json:"version_format,omitempty"` + // The name of the feature. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The name of the namespace in which the feature is detected. + NamespaceName string `protobuf:"bytes,2,opt,name=namespace_name,json=namespaceName" json:"namespace_name,omitempty"` + // The specific version of this feature. + Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"` + // The format used to parse version numbers for the feature. + VersionFormat string `protobuf:"bytes,4,opt,name=version_format,json=versionFormat" json:"version_format,omitempty"` + // The list of vulnerabilities that affect the feature. Vulnerabilities []*Vulnerability `protobuf:"bytes,5,rep,name=vulnerabilities" json:"vulnerabilities,omitempty"` } func (m *Feature) Reset() { *m = Feature{} } func (m *Feature) String() string { return proto.CompactTextString(m) } func (*Feature) ProtoMessage() {} -func (*Feature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (*Feature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *Feature) GetName() string { if m != nil { @@ -205,214 +181,187 @@ func (m *Feature) GetVulnerabilities() []*Vulnerability { return nil } -type Ancestry struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Features []*Feature `protobuf:"bytes,2,rep,name=features" json:"features,omitempty"` - Layers []*Layer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"` - // scanned_listers and scanned_detectors are used to scan this ancestry, it - // may be different from listers and detectors in ClairStatus since the - // ancestry could be scanned by previous version of Clair. - ScannedListers []string `protobuf:"bytes,4,rep,name=scanned_listers,json=scannedListers" json:"scanned_listers,omitempty"` - ScannedDetectors []string `protobuf:"bytes,5,rep,name=scanned_detectors,json=scannedDetectors" json:"scanned_detectors,omitempty"` +type Layer struct { + // The sha256 tarsum for the layer. + Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"` } -func (m *Ancestry) Reset() { *m = Ancestry{} } -func (m *Ancestry) String() string { return proto.CompactTextString(m) } -func (*Ancestry) ProtoMessage() {} -func (*Ancestry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *Layer) Reset() { *m = Layer{} } +func (m *Layer) String() string { return proto.CompactTextString(m) } +func (*Layer) ProtoMessage() {} +func (*Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (m *Ancestry) GetName() string { +func (m *Layer) GetHash() string { if m != nil { - return m.Name + return m.Hash } return "" } -func (m *Ancestry) GetFeatures() []*Feature { - if m != nil { - return m.Features - } - return nil +type ClairStatus struct { + // The configured list of feature listers used to scan an ancestry. + Listers []string `protobuf:"bytes,1,rep,name=listers" json:"listers,omitempty"` + // The configured list of namespace detectors used to scan an ancestry. + Detectors []string `protobuf:"bytes,2,rep,name=detectors" json:"detectors,omitempty"` + // The time at which the updater last ran. + LastUpdateTime *google_protobuf.Timestamp `protobuf:"bytes,3,opt,name=last_update_time,json=lastUpdateTime" json:"last_update_time,omitempty"` } -func (m *Ancestry) GetLayers() []*Layer { +func (m *ClairStatus) Reset() { *m = ClairStatus{} } +func (m *ClairStatus) String() string { return proto.CompactTextString(m) } +func (*ClairStatus) ProtoMessage() {} +func (*ClairStatus) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *ClairStatus) GetListers() []string { if m != nil { - return m.Layers + return m.Listers } return nil } -func (m *Ancestry) GetScannedListers() []string { +func (m *ClairStatus) GetDetectors() []string { if m != nil { - return m.ScannedListers + return m.Detectors } return nil } -func (m *Ancestry) GetScannedDetectors() []string { +func (m *ClairStatus) GetLastUpdateTime() *google_protobuf.Timestamp { if m != nil { - return m.ScannedDetectors + return m.LastUpdateTime } return nil } -type Layer struct { - Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"` +type GetAncestryRequest struct { + // The name of the desired ancestry. + AncestryName string `protobuf:"bytes,1,opt,name=ancestry_name,json=ancestryName" json:"ancestry_name,omitempty"` + // Whether to include vulnerabilities or not in the response. + WithVulnerabilities bool `protobuf:"varint,2,opt,name=with_vulnerabilities,json=withVulnerabilities" json:"with_vulnerabilities,omitempty"` + // Whether to include features or not in the response. + WithFeatures bool `protobuf:"varint,3,opt,name=with_features,json=withFeatures" json:"with_features,omitempty"` } -func (m *Layer) Reset() { *m = Layer{} } -func (m *Layer) String() string { return proto.CompactTextString(m) } -func (*Layer) ProtoMessage() {} -func (*Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (m *GetAncestryRequest) Reset() { *m = GetAncestryRequest{} } +func (m *GetAncestryRequest) String() string { return proto.CompactTextString(m) } +func (*GetAncestryRequest) ProtoMessage() {} +func (*GetAncestryRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } -func (m *Layer) GetHash() string { +func (m *GetAncestryRequest) GetAncestryName() string { if m != nil { - return m.Hash + return m.AncestryName } return "" } -type Notification struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Created string `protobuf:"bytes,2,opt,name=created" json:"created,omitempty"` - Notified string `protobuf:"bytes,3,opt,name=notified" json:"notified,omitempty"` - Deleted string `protobuf:"bytes,4,opt,name=deleted" json:"deleted,omitempty"` - Old *PagedVulnerableAncestries `protobuf:"bytes,5,opt,name=old" json:"old,omitempty"` - New *PagedVulnerableAncestries `protobuf:"bytes,6,opt,name=new" json:"new,omitempty"` -} - -func (m *Notification) Reset() { *m = Notification{} } -func (m *Notification) String() string { return proto.CompactTextString(m) } -func (*Notification) ProtoMessage() {} -func (*Notification) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } - -func (m *Notification) GetName() string { +func (m *GetAncestryRequest) GetWithVulnerabilities() bool { if m != nil { - return m.Name + return m.WithVulnerabilities } - return "" + return false } -func (m *Notification) GetCreated() string { +func (m *GetAncestryRequest) GetWithFeatures() bool { if m != nil { - return m.Created + return m.WithFeatures } - return "" + return false } -func (m *Notification) GetNotified() string { - if m != nil { - return m.Notified - } - return "" +type GetAncestryResponse struct { + // The ancestry requested. + Ancestry *GetAncestryResponse_Ancestry `protobuf:"bytes,1,opt,name=ancestry" json:"ancestry,omitempty"` + // The status of Clair at the time of the request. + Status *ClairStatus `protobuf:"bytes,2,opt,name=status" json:"status,omitempty"` } -func (m *Notification) GetDeleted() string { - if m != nil { - return m.Deleted - } - return "" -} +func (m *GetAncestryResponse) Reset() { *m = GetAncestryResponse{} } +func (m *GetAncestryResponse) String() string { return proto.CompactTextString(m) } +func (*GetAncestryResponse) ProtoMessage() {} +func (*GetAncestryResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } -func (m *Notification) GetOld() *PagedVulnerableAncestries { +func (m *GetAncestryResponse) GetAncestry() *GetAncestryResponse_Ancestry { if m != nil { - return m.Old + return m.Ancestry } return nil } -func (m *Notification) GetNew() *PagedVulnerableAncestries { +func (m *GetAncestryResponse) GetStatus() *ClairStatus { if m != nil { - return m.New + return m.Status } return nil } -type IndexedAncestryName struct { - // index is unique to name in all streams simultaneously streamed, increasing - // and larger than all indexes in previous page in same stream. - Index int32 `protobuf:"varint,1,opt,name=index" json:"index,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` +type GetAncestryResponse_Ancestry struct { + // The name of the desired ancestry. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The list of features present in the ancestry. + // This will only be provided if requested. + Features []*Feature `protobuf:"bytes,2,rep,name=features" json:"features,omitempty"` + // The layers present in the ancestry. + Layers []*Layer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"` + // The configured list of feature listers used to scan this ancestry. + ScannedListers []string `protobuf:"bytes,4,rep,name=scanned_listers,json=scannedListers" json:"scanned_listers,omitempty"` + // The configured list of namespace detectors used to scan an ancestry. + ScannedDetectors []string `protobuf:"bytes,5,rep,name=scanned_detectors,json=scannedDetectors" json:"scanned_detectors,omitempty"` } -func (m *IndexedAncestryName) Reset() { *m = IndexedAncestryName{} } -func (m *IndexedAncestryName) String() string { return proto.CompactTextString(m) } -func (*IndexedAncestryName) ProtoMessage() {} -func (*IndexedAncestryName) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } - -func (m *IndexedAncestryName) GetIndex() int32 { - if m != nil { - return m.Index - } - return 0 -} +func (m *GetAncestryResponse_Ancestry) Reset() { *m = GetAncestryResponse_Ancestry{} } +func (m *GetAncestryResponse_Ancestry) String() string { return proto.CompactTextString(m) } +func (*GetAncestryResponse_Ancestry) ProtoMessage() {} +func (*GetAncestryResponse_Ancestry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 0} } -func (m *IndexedAncestryName) GetName() string { +func (m *GetAncestryResponse_Ancestry) GetName() string { if m != nil { return m.Name } return "" } -type PagedVulnerableAncestries struct { - CurrentPage string `protobuf:"bytes,1,opt,name=current_page,json=currentPage" json:"current_page,omitempty"` - // if next_page is empty, it signals the end of all pages. - NextPage string `protobuf:"bytes,2,opt,name=next_page,json=nextPage" json:"next_page,omitempty"` - Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"` - Vulnerability *Vulnerability `protobuf:"bytes,4,opt,name=vulnerability" json:"vulnerability,omitempty"` - Ancestries []*IndexedAncestryName `protobuf:"bytes,5,rep,name=ancestries" json:"ancestries,omitempty"` -} - -func (m *PagedVulnerableAncestries) Reset() { *m = PagedVulnerableAncestries{} } -func (m *PagedVulnerableAncestries) String() string { return proto.CompactTextString(m) } -func (*PagedVulnerableAncestries) ProtoMessage() {} -func (*PagedVulnerableAncestries) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } - -func (m *PagedVulnerableAncestries) GetCurrentPage() string { - if m != nil { - return m.CurrentPage - } - return "" -} - -func (m *PagedVulnerableAncestries) GetNextPage() string { +func (m *GetAncestryResponse_Ancestry) GetFeatures() []*Feature { if m != nil { - return m.NextPage + return m.Features } - return "" + return nil } -func (m *PagedVulnerableAncestries) GetLimit() int32 { +func (m *GetAncestryResponse_Ancestry) GetLayers() []*Layer { if m != nil { - return m.Limit + return m.Layers } - return 0 + return nil } -func (m *PagedVulnerableAncestries) GetVulnerability() *Vulnerability { +func (m *GetAncestryResponse_Ancestry) GetScannedListers() []string { if m != nil { - return m.Vulnerability + return m.ScannedListers } return nil } -func (m *PagedVulnerableAncestries) GetAncestries() []*IndexedAncestryName { +func (m *GetAncestryResponse_Ancestry) GetScannedDetectors() []string { if m != nil { - return m.Ancestries + return m.ScannedDetectors } return nil } type PostAncestryRequest struct { - AncestryName string `protobuf:"bytes,1,opt,name=ancestry_name,json=ancestryName" json:"ancestry_name,omitempty"` - Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` - Layers []*PostAncestryRequest_PostLayer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"` + // The name of the ancestry being scanned. + // If scanning OCI images, this should be the hash of the manifest. + AncestryName string `protobuf:"bytes,1,opt,name=ancestry_name,json=ancestryName" json:"ancestry_name,omitempty"` + // The format of the image being uploaded. + Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` + // The layers to be scanned for this particular ancestry. + Layers []*PostAncestryRequest_PostLayer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"` } func (m *PostAncestryRequest) Reset() { *m = PostAncestryRequest{} } func (m *PostAncestryRequest) String() string { return proto.CompactTextString(m) } func (*PostAncestryRequest) ProtoMessage() {} -func (*PostAncestryRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } +func (*PostAncestryRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } func (m *PostAncestryRequest) GetAncestryName() string { if m != nil { @@ -436,8 +385,11 @@ func (m *PostAncestryRequest) GetLayers() []*PostAncestryRequest_PostLayer { } type PostAncestryRequest_PostLayer struct { - Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"` - Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` + // The hash of the layer. + Hash string `protobuf:"bytes,1,opt,name=hash" json:"hash,omitempty"` + // The location of the layer (URL or filepath). + Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` + // Any HTTP Headers that need to be used if requesting a layer over HTTP(S). Headers map[string]string `protobuf:"bytes,3,rep,name=headers" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` } @@ -445,7 +397,7 @@ func (m *PostAncestryRequest_PostLayer) Reset() { *m = PostAncestryReque func (m *PostAncestryRequest_PostLayer) String() string { return proto.CompactTextString(m) } func (*PostAncestryRequest_PostLayer) ProtoMessage() {} func (*PostAncestryRequest_PostLayer) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{8, 0} + return fileDescriptor0, []int{6, 0} } func (m *PostAncestryRequest_PostLayer) GetHash() string { @@ -470,13 +422,14 @@ func (m *PostAncestryRequest_PostLayer) GetHeaders() map[string]string { } type PostAncestryResponse struct { + // The status of Clair at the time of the request. Status *ClairStatus `protobuf:"bytes,1,opt,name=status" json:"status,omitempty"` } func (m *PostAncestryResponse) Reset() { *m = PostAncestryResponse{} } func (m *PostAncestryResponse) String() string { return proto.CompactTextString(m) } func (*PostAncestryResponse) ProtoMessage() {} -func (*PostAncestryResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } +func (*PostAncestryResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } func (m *PostAncestryResponse) GetStatus() *ClairStatus { if m != nil { @@ -485,127 +438,229 @@ func (m *PostAncestryResponse) GetStatus() *ClairStatus { return nil } -type GetAncestryRequest struct { - AncestryName string `protobuf:"bytes,1,opt,name=ancestry_name,json=ancestryName" json:"ancestry_name,omitempty"` - WithVulnerabilities bool `protobuf:"varint,2,opt,name=with_vulnerabilities,json=withVulnerabilities" json:"with_vulnerabilities,omitempty"` - WithFeatures bool `protobuf:"varint,3,opt,name=with_features,json=withFeatures" json:"with_features,omitempty"` +type GetNotificationRequest struct { + // The current page of previous vulnerabilities for the ancestry. + // This will be empty when it is the first page. + OldVulnerabilityPage string `protobuf:"bytes,1,opt,name=old_vulnerability_page,json=oldVulnerabilityPage" json:"old_vulnerability_page,omitempty"` + // The current page of vulnerabilities for the ancestry. + // This will be empty when it is the first page. + NewVulnerabilityPage string `protobuf:"bytes,2,opt,name=new_vulnerability_page,json=newVulnerabilityPage" json:"new_vulnerability_page,omitempty"` + // The requested maximum number of results per page. + Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"` + // The name of the notification being requested. + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` } -func (m *GetAncestryRequest) Reset() { *m = GetAncestryRequest{} } -func (m *GetAncestryRequest) String() string { return proto.CompactTextString(m) } -func (*GetAncestryRequest) ProtoMessage() {} -func (*GetAncestryRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } +func (m *GetNotificationRequest) Reset() { *m = GetNotificationRequest{} } +func (m *GetNotificationRequest) String() string { return proto.CompactTextString(m) } +func (*GetNotificationRequest) ProtoMessage() {} +func (*GetNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } -func (m *GetAncestryRequest) GetAncestryName() string { +func (m *GetNotificationRequest) GetOldVulnerabilityPage() string { if m != nil { - return m.AncestryName + return m.OldVulnerabilityPage } return "" } -func (m *GetAncestryRequest) GetWithVulnerabilities() bool { +func (m *GetNotificationRequest) GetNewVulnerabilityPage() string { if m != nil { - return m.WithVulnerabilities + return m.NewVulnerabilityPage } - return false + return "" } -func (m *GetAncestryRequest) GetWithFeatures() bool { +func (m *GetNotificationRequest) GetLimit() int32 { if m != nil { - return m.WithFeatures + return m.Limit } - return false + return 0 } -type GetAncestryResponse struct { - Ancestry *Ancestry `protobuf:"bytes,1,opt,name=ancestry" json:"ancestry,omitempty"` - Status *ClairStatus `protobuf:"bytes,2,opt,name=status" json:"status,omitempty"` +func (m *GetNotificationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" } -func (m *GetAncestryResponse) Reset() { *m = GetAncestryResponse{} } -func (m *GetAncestryResponse) String() string { return proto.CompactTextString(m) } -func (*GetAncestryResponse) ProtoMessage() {} -func (*GetAncestryResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } +type GetNotificationResponse struct { + // The notification as requested. + Notification *GetNotificationResponse_Notification `protobuf:"bytes,1,opt,name=notification" json:"notification,omitempty"` +} + +func (m *GetNotificationResponse) Reset() { *m = GetNotificationResponse{} } +func (m *GetNotificationResponse) String() string { return proto.CompactTextString(m) } +func (*GetNotificationResponse) ProtoMessage() {} +func (*GetNotificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } -func (m *GetAncestryResponse) GetAncestry() *Ancestry { +func (m *GetNotificationResponse) GetNotification() *GetNotificationResponse_Notification { if m != nil { - return m.Ancestry + return m.Notification } return nil } -func (m *GetAncestryResponse) GetStatus() *ClairStatus { +type GetNotificationResponse_Notification struct { + // The name of the requested notification. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The time at which the notification was created. + Created string `protobuf:"bytes,2,opt,name=created" json:"created,omitempty"` + // The time at which the notification was last sent out. + Notified string `protobuf:"bytes,3,opt,name=notified" json:"notified,omitempty"` + // The time at which a notification has been deleted. + Deleted string `protobuf:"bytes,4,opt,name=deleted" json:"deleted,omitempty"` + // The previous vulnerability and a paginated view of the ancestries it affects. + Old *PagedVulnerableAncestries `protobuf:"bytes,5,opt,name=old" json:"old,omitempty"` + // The newly updated vulnerability and a paginated view of the ancestries it affects. + New *PagedVulnerableAncestries `protobuf:"bytes,6,opt,name=new" json:"new,omitempty"` +} + +func (m *GetNotificationResponse_Notification) Reset() { *m = GetNotificationResponse_Notification{} } +func (m *GetNotificationResponse_Notification) String() string { return proto.CompactTextString(m) } +func (*GetNotificationResponse_Notification) ProtoMessage() {} +func (*GetNotificationResponse_Notification) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{9, 0} +} + +func (m *GetNotificationResponse_Notification) GetName() string { if m != nil { - return m.Status + return m.Name + } + return "" +} + +func (m *GetNotificationResponse_Notification) GetCreated() string { + if m != nil { + return m.Created + } + return "" +} + +func (m *GetNotificationResponse_Notification) GetNotified() string { + if m != nil { + return m.Notified + } + return "" +} + +func (m *GetNotificationResponse_Notification) GetDeleted() string { + if m != nil { + return m.Deleted + } + return "" +} + +func (m *GetNotificationResponse_Notification) GetOld() *PagedVulnerableAncestries { + if m != nil { + return m.Old } return nil } -type GetNotificationRequest struct { - // if the vulnerability_page is empty, it implies the first page. - OldVulnerabilityPage string `protobuf:"bytes,1,opt,name=old_vulnerability_page,json=oldVulnerabilityPage" json:"old_vulnerability_page,omitempty"` - NewVulnerabilityPage string `protobuf:"bytes,2,opt,name=new_vulnerability_page,json=newVulnerabilityPage" json:"new_vulnerability_page,omitempty"` - Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"` - Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` +func (m *GetNotificationResponse_Notification) GetNew() *PagedVulnerableAncestries { + if m != nil { + return m.New + } + return nil } -func (m *GetNotificationRequest) Reset() { *m = GetNotificationRequest{} } -func (m *GetNotificationRequest) String() string { return proto.CompactTextString(m) } -func (*GetNotificationRequest) ProtoMessage() {} -func (*GetNotificationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } +type PagedVulnerableAncestries struct { + // The identifier for the current page. + CurrentPage string `protobuf:"bytes,1,opt,name=current_page,json=currentPage" json:"current_page,omitempty"` + // The token used to request the next page. + // This will be empty when there are no more pages. + NextPage string `protobuf:"bytes,2,opt,name=next_page,json=nextPage" json:"next_page,omitempty"` + // The requested maximum number of results per page. + Limit int32 `protobuf:"varint,3,opt,name=limit" json:"limit,omitempty"` + // The vulnerability that affects a given set of ancestries. + Vulnerability *Vulnerability `protobuf:"bytes,4,opt,name=vulnerability" json:"vulnerability,omitempty"` + // The ancestries affected by a vulnerability. + Ancestries []*PagedVulnerableAncestries_IndexedAncestryName `protobuf:"bytes,5,rep,name=ancestries" json:"ancestries,omitempty"` +} -func (m *GetNotificationRequest) GetOldVulnerabilityPage() string { +func (m *PagedVulnerableAncestries) Reset() { *m = PagedVulnerableAncestries{} } +func (m *PagedVulnerableAncestries) String() string { return proto.CompactTextString(m) } +func (*PagedVulnerableAncestries) ProtoMessage() {} +func (*PagedVulnerableAncestries) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *PagedVulnerableAncestries) GetCurrentPage() string { if m != nil { - return m.OldVulnerabilityPage + return m.CurrentPage } return "" } -func (m *GetNotificationRequest) GetNewVulnerabilityPage() string { +func (m *PagedVulnerableAncestries) GetNextPage() string { if m != nil { - return m.NewVulnerabilityPage + return m.NextPage } return "" } -func (m *GetNotificationRequest) GetLimit() int32 { +func (m *PagedVulnerableAncestries) GetLimit() int32 { if m != nil { return m.Limit } return 0 } -func (m *GetNotificationRequest) GetName() string { +func (m *PagedVulnerableAncestries) GetVulnerability() *Vulnerability { if m != nil { - return m.Name + return m.Vulnerability } - return "" + return nil } -type GetNotificationResponse struct { - Notification *Notification `protobuf:"bytes,1,opt,name=notification" json:"notification,omitempty"` +func (m *PagedVulnerableAncestries) GetAncestries() []*PagedVulnerableAncestries_IndexedAncestryName { + if m != nil { + return m.Ancestries + } + return nil } -func (m *GetNotificationResponse) Reset() { *m = GetNotificationResponse{} } -func (m *GetNotificationResponse) String() string { return proto.CompactTextString(m) } -func (*GetNotificationResponse) ProtoMessage() {} -func (*GetNotificationResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } +type PagedVulnerableAncestries_IndexedAncestryName struct { + // The index is an ever increasing number associated with the particular ancestry. + // This is useful if you're processing notifications, and need to keep track of the progress of paginating the results. + Index int32 `protobuf:"varint,1,opt,name=index" json:"index,omitempty"` + // The name of the ancestry. + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` +} + +func (m *PagedVulnerableAncestries_IndexedAncestryName) Reset() { + *m = PagedVulnerableAncestries_IndexedAncestryName{} +} +func (m *PagedVulnerableAncestries_IndexedAncestryName) String() string { + return proto.CompactTextString(m) +} +func (*PagedVulnerableAncestries_IndexedAncestryName) ProtoMessage() {} +func (*PagedVulnerableAncestries_IndexedAncestryName) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{10, 0} +} -func (m *GetNotificationResponse) GetNotification() *Notification { +func (m *PagedVulnerableAncestries_IndexedAncestryName) GetIndex() int32 { if m != nil { - return m.Notification + return m.Index } - return nil + return 0 +} + +func (m *PagedVulnerableAncestries_IndexedAncestryName) GetName() string { + if m != nil { + return m.Name + } + return "" } type MarkNotificationAsReadRequest struct { + // The name of the Notification that has been processed. Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` } func (m *MarkNotificationAsReadRequest) Reset() { *m = MarkNotificationAsReadRequest{} } func (m *MarkNotificationAsReadRequest) String() string { return proto.CompactTextString(m) } func (*MarkNotificationAsReadRequest) ProtoMessage() {} -func (*MarkNotificationAsReadRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } +func (*MarkNotificationAsReadRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } func (m *MarkNotificationAsReadRequest) GetName() string { if m != nil { @@ -614,23 +669,32 @@ func (m *MarkNotificationAsReadRequest) GetName() string { return "" } +type MarkNotificationAsReadResponse struct { +} + +func (m *MarkNotificationAsReadResponse) Reset() { *m = MarkNotificationAsReadResponse{} } +func (m *MarkNotificationAsReadResponse) String() string { return proto.CompactTextString(m) } +func (*MarkNotificationAsReadResponse) ProtoMessage() {} +func (*MarkNotificationAsReadResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + func init() { - proto.RegisterType((*Vulnerability)(nil), "clairpb.Vulnerability") - proto.RegisterType((*ClairStatus)(nil), "clairpb.ClairStatus") - proto.RegisterType((*Feature)(nil), "clairpb.Feature") - proto.RegisterType((*Ancestry)(nil), "clairpb.Ancestry") - proto.RegisterType((*Layer)(nil), "clairpb.Layer") - proto.RegisterType((*Notification)(nil), "clairpb.Notification") - proto.RegisterType((*IndexedAncestryName)(nil), "clairpb.IndexedAncestryName") - proto.RegisterType((*PagedVulnerableAncestries)(nil), "clairpb.PagedVulnerableAncestries") - proto.RegisterType((*PostAncestryRequest)(nil), "clairpb.PostAncestryRequest") - proto.RegisterType((*PostAncestryRequest_PostLayer)(nil), "clairpb.PostAncestryRequest.PostLayer") - proto.RegisterType((*PostAncestryResponse)(nil), "clairpb.PostAncestryResponse") - proto.RegisterType((*GetAncestryRequest)(nil), "clairpb.GetAncestryRequest") - proto.RegisterType((*GetAncestryResponse)(nil), "clairpb.GetAncestryResponse") - proto.RegisterType((*GetNotificationRequest)(nil), "clairpb.GetNotificationRequest") - proto.RegisterType((*GetNotificationResponse)(nil), "clairpb.GetNotificationResponse") - proto.RegisterType((*MarkNotificationAsReadRequest)(nil), "clairpb.MarkNotificationAsReadRequest") + proto.RegisterType((*Vulnerability)(nil), "coreos.clair.Vulnerability") + proto.RegisterType((*Feature)(nil), "coreos.clair.Feature") + proto.RegisterType((*Layer)(nil), "coreos.clair.Layer") + proto.RegisterType((*ClairStatus)(nil), "coreos.clair.ClairStatus") + proto.RegisterType((*GetAncestryRequest)(nil), "coreos.clair.GetAncestryRequest") + proto.RegisterType((*GetAncestryResponse)(nil), "coreos.clair.GetAncestryResponse") + proto.RegisterType((*GetAncestryResponse_Ancestry)(nil), "coreos.clair.GetAncestryResponse.Ancestry") + proto.RegisterType((*PostAncestryRequest)(nil), "coreos.clair.PostAncestryRequest") + proto.RegisterType((*PostAncestryRequest_PostLayer)(nil), "coreos.clair.PostAncestryRequest.PostLayer") + proto.RegisterType((*PostAncestryResponse)(nil), "coreos.clair.PostAncestryResponse") + proto.RegisterType((*GetNotificationRequest)(nil), "coreos.clair.GetNotificationRequest") + proto.RegisterType((*GetNotificationResponse)(nil), "coreos.clair.GetNotificationResponse") + proto.RegisterType((*GetNotificationResponse_Notification)(nil), "coreos.clair.GetNotificationResponse.Notification") + proto.RegisterType((*PagedVulnerableAncestries)(nil), "coreos.clair.PagedVulnerableAncestries") + proto.RegisterType((*PagedVulnerableAncestries_IndexedAncestryName)(nil), "coreos.clair.PagedVulnerableAncestries.IndexedAncestryName") + proto.RegisterType((*MarkNotificationAsReadRequest)(nil), "coreos.clair.MarkNotificationAsReadRequest") + proto.RegisterType((*MarkNotificationAsReadResponse)(nil), "coreos.clair.MarkNotificationAsReadResponse") } // Reference imports to suppress errors if they are not otherwise used. @@ -644,8 +708,10 @@ const _ = grpc.SupportPackageIsVersion4 // Client API for AncestryService service type AncestryServiceClient interface { - PostAncestry(ctx context.Context, in *PostAncestryRequest, opts ...grpc.CallOption) (*PostAncestryResponse, error) + // The RPC used to read the results of scanning for a particular ancestry. GetAncestry(ctx context.Context, in *GetAncestryRequest, opts ...grpc.CallOption) (*GetAncestryResponse, error) + // The RPC used to create a new scan of an ancestry. + PostAncestry(ctx context.Context, in *PostAncestryRequest, opts ...grpc.CallOption) (*PostAncestryResponse, error) } type ancestryServiceClient struct { @@ -656,18 +722,18 @@ func NewAncestryServiceClient(cc *grpc.ClientConn) AncestryServiceClient { return &ancestryServiceClient{cc} } -func (c *ancestryServiceClient) PostAncestry(ctx context.Context, in *PostAncestryRequest, opts ...grpc.CallOption) (*PostAncestryResponse, error) { - out := new(PostAncestryResponse) - err := grpc.Invoke(ctx, "/clairpb.AncestryService/PostAncestry", in, out, c.cc, opts...) +func (c *ancestryServiceClient) GetAncestry(ctx context.Context, in *GetAncestryRequest, opts ...grpc.CallOption) (*GetAncestryResponse, error) { + out := new(GetAncestryResponse) + err := grpc.Invoke(ctx, "/coreos.clair.AncestryService/GetAncestry", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } -func (c *ancestryServiceClient) GetAncestry(ctx context.Context, in *GetAncestryRequest, opts ...grpc.CallOption) (*GetAncestryResponse, error) { - out := new(GetAncestryResponse) - err := grpc.Invoke(ctx, "/clairpb.AncestryService/GetAncestry", in, out, c.cc, opts...) +func (c *ancestryServiceClient) PostAncestry(ctx context.Context, in *PostAncestryRequest, opts ...grpc.CallOption) (*PostAncestryResponse, error) { + out := new(PostAncestryResponse) + err := grpc.Invoke(ctx, "/coreos.clair.AncestryService/PostAncestry", in, out, c.cc, opts...) if err != nil { return nil, err } @@ -677,72 +743,76 @@ func (c *ancestryServiceClient) GetAncestry(ctx context.Context, in *GetAncestry // Server API for AncestryService service type AncestryServiceServer interface { - PostAncestry(context.Context, *PostAncestryRequest) (*PostAncestryResponse, error) + // The RPC used to read the results of scanning for a particular ancestry. GetAncestry(context.Context, *GetAncestryRequest) (*GetAncestryResponse, error) + // The RPC used to create a new scan of an ancestry. + PostAncestry(context.Context, *PostAncestryRequest) (*PostAncestryResponse, error) } func RegisterAncestryServiceServer(s *grpc.Server, srv AncestryServiceServer) { s.RegisterService(&_AncestryService_serviceDesc, srv) } -func _AncestryService_PostAncestry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PostAncestryRequest) +func _AncestryService_GetAncestry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAncestryRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(AncestryServiceServer).PostAncestry(ctx, in) + return srv.(AncestryServiceServer).GetAncestry(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/clairpb.AncestryService/PostAncestry", + FullMethod: "/coreos.clair.AncestryService/GetAncestry", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AncestryServiceServer).PostAncestry(ctx, req.(*PostAncestryRequest)) + return srv.(AncestryServiceServer).GetAncestry(ctx, req.(*GetAncestryRequest)) } return interceptor(ctx, in, info, handler) } -func _AncestryService_GetAncestry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetAncestryRequest) +func _AncestryService_PostAncestry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PostAncestryRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(AncestryServiceServer).GetAncestry(ctx, in) + return srv.(AncestryServiceServer).PostAncestry(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/clairpb.AncestryService/GetAncestry", + FullMethod: "/coreos.clair.AncestryService/PostAncestry", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AncestryServiceServer).GetAncestry(ctx, req.(*GetAncestryRequest)) + return srv.(AncestryServiceServer).PostAncestry(ctx, req.(*PostAncestryRequest)) } return interceptor(ctx, in, info, handler) } var _AncestryService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "clairpb.AncestryService", + ServiceName: "coreos.clair.AncestryService", HandlerType: (*AncestryServiceServer)(nil), Methods: []grpc.MethodDesc{ - { - MethodName: "PostAncestry", - Handler: _AncestryService_PostAncestry_Handler, - }, { MethodName: "GetAncestry", Handler: _AncestryService_GetAncestry_Handler, }, + { + MethodName: "PostAncestry", + Handler: _AncestryService_PostAncestry_Handler, + }, }, Streams: []grpc.StreamDesc{}, - Metadata: "clair.proto", + Metadata: "api/v3/clairpb/clair.proto", } // Client API for NotificationService service type NotificationServiceClient interface { + // The RPC used to get a particularly Notification. GetNotification(ctx context.Context, in *GetNotificationRequest, opts ...grpc.CallOption) (*GetNotificationResponse, error) - MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + // The RPC used to mark a Notification as read after it has been processed. + MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*MarkNotificationAsReadResponse, error) } type notificationServiceClient struct { @@ -755,16 +825,16 @@ func NewNotificationServiceClient(cc *grpc.ClientConn) NotificationServiceClient func (c *notificationServiceClient) GetNotification(ctx context.Context, in *GetNotificationRequest, opts ...grpc.CallOption) (*GetNotificationResponse, error) { out := new(GetNotificationResponse) - err := grpc.Invoke(ctx, "/clairpb.NotificationService/GetNotification", in, out, c.cc, opts...) + err := grpc.Invoke(ctx, "/coreos.clair.NotificationService/GetNotification", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } -func (c *notificationServiceClient) MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { - out := new(google_protobuf1.Empty) - err := grpc.Invoke(ctx, "/clairpb.NotificationService/MarkNotificationAsRead", in, out, c.cc, opts...) +func (c *notificationServiceClient) MarkNotificationAsRead(ctx context.Context, in *MarkNotificationAsReadRequest, opts ...grpc.CallOption) (*MarkNotificationAsReadResponse, error) { + out := new(MarkNotificationAsReadResponse) + err := grpc.Invoke(ctx, "/coreos.clair.NotificationService/MarkNotificationAsRead", in, out, c.cc, opts...) if err != nil { return nil, err } @@ -774,8 +844,10 @@ func (c *notificationServiceClient) MarkNotificationAsRead(ctx context.Context, // Server API for NotificationService service type NotificationServiceServer interface { + // The RPC used to get a particularly Notification. GetNotification(context.Context, *GetNotificationRequest) (*GetNotificationResponse, error) - MarkNotificationAsRead(context.Context, *MarkNotificationAsReadRequest) (*google_protobuf1.Empty, error) + // The RPC used to mark a Notification as read after it has been processed. + MarkNotificationAsRead(context.Context, *MarkNotificationAsReadRequest) (*MarkNotificationAsReadResponse, error) } func RegisterNotificationServiceServer(s *grpc.Server, srv NotificationServiceServer) { @@ -792,7 +864,7 @@ func _NotificationService_GetNotification_Handler(srv interface{}, ctx context.C } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/clairpb.NotificationService/GetNotification", + FullMethod: "/coreos.clair.NotificationService/GetNotification", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(NotificationServiceServer).GetNotification(ctx, req.(*GetNotificationRequest)) @@ -810,7 +882,7 @@ func _NotificationService_MarkNotificationAsRead_Handler(srv interface{}, ctx co } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/clairpb.NotificationService/MarkNotificationAsRead", + FullMethod: "/coreos.clair.NotificationService/MarkNotificationAsRead", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(NotificationServiceServer).MarkNotificationAsRead(ctx, req.(*MarkNotificationAsReadRequest)) @@ -819,7 +891,7 @@ func _NotificationService_MarkNotificationAsRead_Handler(srv interface{}, ctx co } var _NotificationService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "clairpb.NotificationService", + ServiceName: "coreos.clair.NotificationService", HandlerType: (*NotificationServiceServer)(nil), Methods: []grpc.MethodDesc{ { @@ -832,84 +904,86 @@ var _NotificationService_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "clair.proto", + Metadata: "api/v3/clairpb/clair.proto", } -func init() { proto.RegisterFile("clair.proto", fileDescriptor0) } +func init() { proto.RegisterFile("api/v3/clairpb/clair.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 1156 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x4d, 0x6f, 0xdb, 0x46, - 0x13, 0x06, 0x25, 0xcb, 0x92, 0x46, 0xf2, 0xd7, 0x5a, 0x51, 0x68, 0xd9, 0x46, 0x1c, 0xbe, 0x78, - 0xd3, 0x20, 0x6d, 0x25, 0x54, 0xf6, 0xa1, 0x35, 0xd2, 0x8f, 0xa4, 0x4e, 0xd2, 0x02, 0x49, 0x10, - 0x30, 0xa9, 0x0f, 0xbd, 0x08, 0x6b, 0x72, 0x64, 0x13, 0xa6, 0x48, 0x96, 0xbb, 0xb2, 0x2c, 0x04, - 0xbd, 0xb4, 0xc7, 0x9e, 0xda, 0xfe, 0x8f, 0xfe, 0x84, 0x5e, 0x0b, 0xf4, 0x9a, 0x7b, 0x81, 0x02, - 0xbd, 0xf6, 0x3f, 0x14, 0xbb, 0xdc, 0xa5, 0x48, 0x89, 0x0e, 0x8c, 0xf6, 0x24, 0xce, 0xcc, 0x33, - 0xbb, 0x33, 0xcf, 0x33, 0x3b, 0x10, 0x34, 0x1c, 0x9f, 0x7a, 0x71, 0x37, 0x8a, 0x43, 0x1e, 0x92, - 0xaa, 0x34, 0xa2, 0x93, 0xce, 0xce, 0x69, 0x18, 0x9e, 0xfa, 0xd8, 0xa3, 0x91, 0xd7, 0xa3, 0x41, - 0x10, 0x72, 0xca, 0xbd, 0x30, 0x60, 0x09, 0xac, 0xb3, 0xad, 0xa2, 0xd2, 0x3a, 0x19, 0x0f, 0x7b, - 0x38, 0x8a, 0xf8, 0x54, 0x05, 0x6f, 0xcd, 0x07, 0xb9, 0x37, 0x42, 0xc6, 0xe9, 0x28, 0x4a, 0x00, - 0xd6, 0x4f, 0x25, 0x58, 0x39, 0x1e, 0xfb, 0x01, 0xc6, 0xf4, 0xc4, 0xf3, 0x3d, 0x3e, 0x25, 0x04, - 0x96, 0x02, 0x3a, 0x42, 0xd3, 0xd8, 0x33, 0xee, 0xd6, 0x6d, 0xf9, 0x4d, 0xfe, 0x0f, 0xab, 0xe2, - 0x97, 0x45, 0xd4, 0xc1, 0x81, 0x8c, 0x96, 0x64, 0x74, 0x25, 0xf5, 0x3e, 0x17, 0xb0, 0x3d, 0x68, - 0xb8, 0xc8, 0x9c, 0xd8, 0x8b, 0x44, 0x81, 0x66, 0x59, 0x62, 0xb2, 0x2e, 0x71, 0xb8, 0xef, 0x05, - 0xe7, 0xe6, 0x52, 0x72, 0xb8, 0xf8, 0x26, 0x1d, 0xa8, 0x31, 0xbc, 0xc0, 0xd8, 0xe3, 0x53, 0xb3, - 0x22, 0xfd, 0xa9, 0x2d, 0x62, 0x23, 0xe4, 0xd4, 0xa5, 0x9c, 0x9a, 0xcb, 0x49, 0x4c, 0xdb, 0x64, - 0x0b, 0x6a, 0x43, 0xef, 0x12, 0xdd, 0xc1, 0xc9, 0xd4, 0xac, 0xca, 0x58, 0x55, 0xda, 0x0f, 0xa7, - 0xe4, 0x63, 0xd8, 0xa0, 0xc3, 0x21, 0x3a, 0x1c, 0xdd, 0xc1, 0x05, 0xc6, 0x4c, 0xd0, 0x65, 0xd6, - 0xf6, 0xca, 0x77, 0x1b, 0xfd, 0xf5, 0xae, 0xa2, 0xb5, 0xfb, 0x18, 0x29, 0x1f, 0xc7, 0x68, 0xaf, - 0x6b, 0xe8, 0xb1, 0x42, 0x5a, 0x3f, 0x18, 0xd0, 0xf8, 0x5c, 0xa0, 0x5e, 0x72, 0xca, 0xc7, 0x8c, - 0x98, 0x50, 0xf5, 0x3d, 0xc6, 0x31, 0x66, 0xa6, 0xb1, 0x57, 0x16, 0x17, 0x29, 0x93, 0xec, 0x40, - 0xdd, 0x45, 0x8e, 0x0e, 0x0f, 0x63, 0x66, 0x96, 0x64, 0x6c, 0xe6, 0x20, 0x47, 0xb0, 0xee, 0x53, - 0xc6, 0x07, 0xe3, 0xc8, 0xa5, 0x1c, 0x07, 0x82, 0x7b, 0x49, 0x4a, 0xa3, 0xdf, 0xe9, 0x26, 0xc2, - 0x74, 0xb5, 0x30, 0xdd, 0x57, 0x5a, 0x18, 0x7b, 0x55, 0xe4, 0x7c, 0x25, 0x53, 0x84, 0xd3, 0xfa, - 0xcd, 0x80, 0xaa, 0xaa, 0xf5, 0xbf, 0x88, 0x63, 0x42, 0x55, 0x51, 0xa1, 0x84, 0xd1, 0xa6, 0x38, - 0x40, 0x7d, 0x0e, 0x86, 0x61, 0x3c, 0xa2, 0x5c, 0xc9, 0xb3, 0xa2, 0xbc, 0x8f, 0xa5, 0x93, 0x7c, - 0x06, 0x6b, 0x17, 0x99, 0x49, 0xf1, 0x90, 0x99, 0x15, 0x49, 0x69, 0x3b, 0xa5, 0x34, 0x37, 0x49, - 0xf6, 0x3c, 0xdc, 0xfa, 0xdd, 0x80, 0xda, 0x83, 0xc0, 0x41, 0xc6, 0xe3, 0xe2, 0x39, 0x7b, 0x0f, - 0x6a, 0xc3, 0xa4, 0xd3, 0x84, 0xcd, 0x22, 0xb9, 0x52, 0x04, 0xb9, 0x03, 0xcb, 0x3e, 0x9d, 0x0a, - 0x55, 0xca, 0x12, 0xbb, 0x9a, 0x62, 0x9f, 0x0a, 0xb7, 0xad, 0xa2, 0xe4, 0x1d, 0x58, 0x63, 0x0e, - 0x0d, 0x02, 0x74, 0x07, 0x5a, 0xc6, 0x25, 0x29, 0xd5, 0xaa, 0x72, 0x3f, 0x55, 0x6a, 0xbe, 0x0b, - 0x1b, 0x1a, 0x38, 0x53, 0xb5, 0x22, 0xa1, 0xeb, 0x2a, 0x70, 0xa4, 0xfd, 0xd6, 0x36, 0x54, 0xe4, - 0x35, 0xa2, 0x91, 0x33, 0xca, 0xce, 0x74, 0x23, 0xe2, 0xdb, 0xfa, 0xc3, 0x80, 0xe6, 0xf3, 0x90, - 0x7b, 0x43, 0xcf, 0xa1, 0x7a, 0xf0, 0x17, 0xba, 0x35, 0xa1, 0xea, 0xc4, 0x48, 0x39, 0xba, 0x4a, - 0x31, 0x6d, 0x8a, 0xb1, 0x0f, 0x64, 0x36, 0xba, 0x4a, 0xac, 0xd4, 0x16, 0x59, 0x2e, 0xfa, 0x28, - 0xb2, 0x12, 0x99, 0xb4, 0x49, 0x0e, 0xa0, 0x1c, 0xfa, 0xae, 0x7c, 0x43, 0x8d, 0xbe, 0x95, 0x92, - 0xf1, 0x82, 0x9e, 0xa2, 0xab, 0x95, 0xf1, 0x51, 0x09, 0xe0, 0x21, 0xb3, 0x05, 0x5c, 0x64, 0x05, - 0x38, 0x91, 0xaf, 0xeb, 0x9a, 0x59, 0x01, 0x4e, 0xac, 0x4f, 0x61, 0xf3, 0xcb, 0xc0, 0xc5, 0x4b, - 0x74, 0xb5, 0xa0, 0x72, 0xc8, 0x5a, 0x50, 0xf1, 0x84, 0x5b, 0xf6, 0x59, 0xb1, 0x13, 0x23, 0x6d, - 0xbe, 0x34, 0x6b, 0xde, 0xfa, 0xdb, 0x80, 0xad, 0x2b, 0xef, 0x20, 0xb7, 0xa1, 0xe9, 0x8c, 0xe3, - 0x18, 0x03, 0x3e, 0x88, 0xe8, 0xa9, 0xa6, 0xad, 0xa1, 0x7c, 0x22, 0x8f, 0x6c, 0x43, 0x3d, 0xc0, - 0x4b, 0x15, 0x2f, 0x29, 0x92, 0xf0, 0x32, 0x09, 0xb6, 0xa0, 0xe2, 0x7b, 0x23, 0x8f, 0x4b, 0xf6, - 0x2a, 0x76, 0x62, 0x90, 0xfb, 0xb0, 0x92, 0x1d, 0xc9, 0xa9, 0x24, 0xf0, 0xea, 0xf9, 0xcd, 0x83, - 0xc9, 0x7d, 0x00, 0x9a, 0x56, 0xa8, 0x46, 0x7f, 0x27, 0x4d, 0x2d, 0x60, 0xc3, 0xce, 0xe0, 0xad, - 0x37, 0x25, 0xd8, 0x7c, 0x11, 0x32, 0xae, 0x01, 0x36, 0x7e, 0x33, 0x46, 0xc6, 0xc9, 0xff, 0x60, - 0x45, 0xa1, 0xa6, 0x83, 0xcc, 0x84, 0x34, 0x69, 0x96, 0xd6, 0x36, 0x2c, 0xab, 0x97, 0x99, 0x34, - 0xaa, 0x2c, 0xf2, 0xc9, 0xdc, 0x0b, 0xb8, 0x33, 0x93, 0x6f, 0xf1, 0x2a, 0xe9, 0xcb, 0xbd, 0x8c, - 0xce, 0xaf, 0x06, 0xd4, 0x53, 0x6f, 0xd1, 0x20, 0x0b, 0x5f, 0x44, 0xf9, 0x99, 0x96, 0x4e, 0x7c, - 0x93, 0x67, 0x50, 0x3d, 0x43, 0xea, 0xce, 0xae, 0xdd, 0xbf, 0xde, 0xb5, 0xdd, 0x2f, 0x92, 0xac, - 0x47, 0x81, 0x88, 0xea, 0x33, 0x3a, 0x87, 0xd0, 0xcc, 0x06, 0xc8, 0x3a, 0x94, 0xcf, 0x71, 0xaa, - 0xaa, 0x10, 0x9f, 0x42, 0xcd, 0x0b, 0xea, 0x8f, 0xb5, 0xcc, 0x89, 0x71, 0x58, 0xfa, 0xd0, 0xb0, - 0x8e, 0xa0, 0x95, 0xbf, 0x92, 0x45, 0x61, 0xc0, 0xc4, 0x22, 0x59, 0x66, 0x72, 0x77, 0xcb, 0x63, - 0x1a, 0xfd, 0x56, 0x5a, 0x61, 0x66, 0xaf, 0xdb, 0x0a, 0x63, 0xfd, 0x68, 0x00, 0x79, 0x82, 0xff, - 0x4e, 0x9a, 0x0f, 0xa0, 0x35, 0xf1, 0xf8, 0xd9, 0x60, 0x7e, 0x35, 0x8a, 0x52, 0x6b, 0xf6, 0xa6, - 0x88, 0x1d, 0xe7, 0x43, 0xe2, 0x5c, 0x99, 0x92, 0xae, 0xba, 0xb2, 0xc4, 0x36, 0x85, 0x53, 0x6d, - 0x39, 0x66, 0xc5, 0xb0, 0x99, 0x2b, 0x49, 0x35, 0xf6, 0x3e, 0xd4, 0xf4, 0xf5, 0xaa, 0xb5, 0x8d, - 0xb4, 0xb5, 0x14, 0x9c, 0x42, 0x32, 0x3c, 0x94, 0xae, 0xc1, 0xc3, 0x2f, 0x06, 0xb4, 0x9f, 0x20, - 0xcf, 0x2e, 0x2e, 0xcd, 0xc5, 0x01, 0xb4, 0x43, 0xdf, 0xcd, 0x75, 0x39, 0xcd, 0x3e, 0xcd, 0x56, - 0xe8, 0xbb, 0xb9, 0xd7, 0x23, 0x9f, 0xe1, 0x01, 0xb4, 0x03, 0x9c, 0x14, 0x65, 0x25, 0x4a, 0xb6, - 0x02, 0x9c, 0x2c, 0x66, 0x15, 0x3f, 0x5e, 0xbd, 0x44, 0x96, 0x32, 0x4b, 0xe4, 0x15, 0xdc, 0x5c, - 0xa8, 0x57, 0x11, 0xf5, 0x11, 0x34, 0x83, 0x8c, 0x5f, 0x91, 0x75, 0x23, 0xed, 0x3f, 0x97, 0x94, - 0x83, 0x5a, 0xfb, 0xb0, 0xfb, 0x8c, 0xc6, 0xe7, 0x59, 0xc4, 0x03, 0x66, 0x23, 0x75, 0x35, 0x19, - 0x05, 0xcb, 0xbc, 0xff, 0xa7, 0x01, 0x6b, 0x5a, 0x80, 0x97, 0x18, 0x5f, 0x78, 0x0e, 0x12, 0x0a, - 0xcd, 0xec, 0x74, 0x92, 0x9d, 0xb7, 0xbd, 0x93, 0xce, 0xee, 0x15, 0xd1, 0xa4, 0x21, 0xab, 0xf5, - 0xdd, 0x9b, 0xbf, 0x7e, 0x2e, 0xad, 0x5a, 0xf5, 0x9e, 0x56, 0xf7, 0xd0, 0xb8, 0x47, 0xce, 0xa1, - 0x91, 0x19, 0x13, 0xb2, 0x9d, 0x9e, 0xb1, 0x38, 0xcf, 0x9d, 0x9d, 0xe2, 0xa0, 0x3a, 0xff, 0xb6, - 0x3c, 0x7f, 0x9b, 0x6c, 0xa5, 0xe7, 0xf7, 0x5e, 0xe7, 0xc6, 0xff, 0xdb, 0xfe, 0xf7, 0x25, 0xd8, - 0xcc, 0xb2, 0xa2, 0xfb, 0x64, 0xb0, 0x36, 0x27, 0x03, 0xb9, 0x95, 0xbd, 0xab, 0x60, 0xa0, 0x3a, - 0x7b, 0x57, 0x03, 0x54, 0x41, 0xbb, 0xb2, 0xa0, 0x9b, 0xe4, 0x46, 0x2f, 0xab, 0x0e, 0xeb, 0xbd, - 0x96, 0xc5, 0x90, 0x09, 0xb4, 0x8b, 0x55, 0x22, 0xb3, 0x2d, 0xf8, 0x56, 0x19, 0x3b, 0xed, 0x85, - 0x3f, 0x61, 0x8f, 0xc4, 0x5f, 0x67, 0x7d, 0xf1, 0xbd, 0xe2, 0x8b, 0x1f, 0xd6, 0xbf, 0xd6, 0xff, - 0xcc, 0x4f, 0x96, 0x65, 0xe6, 0xfe, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6e, 0xc1, 0x1d, 0xff, - 0xb8, 0x0b, 0x00, 0x00, + // 1191 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4f, 0x73, 0xdb, 0x44, + 0x14, 0x1f, 0x29, 0x71, 0xec, 0x3c, 0x3b, 0x7f, 0xba, 0x76, 0x53, 0x45, 0x69, 0x8b, 0x23, 0xe8, + 0xb4, 0x93, 0x30, 0xf6, 0xc4, 0xe1, 0x50, 0xc2, 0x81, 0x49, 0xda, 0xa6, 0x74, 0xa6, 0x74, 0x3a, + 0x2a, 0xe4, 0x00, 0x07, 0xcf, 0x5a, 0x7a, 0x4e, 0x34, 0x91, 0x25, 0xa3, 0x5d, 0x3b, 0xf1, 0x74, + 0x7a, 0xe1, 0xca, 0x09, 0x38, 0xf0, 0x19, 0xb8, 0xf0, 0x25, 0xb8, 0x72, 0x02, 0x8e, 0x70, 0x63, + 0x06, 0xbe, 0x00, 0x77, 0x66, 0x57, 0x2b, 0x45, 0x72, 0xd4, 0x24, 0xa5, 0x27, 0xeb, 0xfd, 0xde, + 0x9f, 0x7d, 0x7f, 0x7e, 0x6f, 0x37, 0x01, 0x93, 0x0e, 0xbd, 0xf6, 0x78, 0xbb, 0xed, 0xf8, 0xd4, + 0x8b, 0x86, 0xbd, 0xf8, 0xb7, 0x35, 0x8c, 0x42, 0x1e, 0x92, 0x9a, 0x13, 0x46, 0x18, 0xb2, 0x96, + 0xc4, 0xcc, 0x77, 0x0e, 0xc3, 0xf0, 0xd0, 0xc7, 0xb6, 0xd4, 0xf5, 0x46, 0xfd, 0x36, 0xf7, 0x06, + 0xc8, 0x38, 0x1d, 0x0c, 0x63, 0x73, 0xf3, 0xa6, 0x32, 0x10, 0x11, 0x69, 0x10, 0x84, 0x9c, 0x72, + 0x2f, 0x0c, 0x58, 0xac, 0xb5, 0x7e, 0xd0, 0x61, 0xe1, 0x60, 0xe4, 0x07, 0x18, 0xd1, 0x9e, 0xe7, + 0x7b, 0x7c, 0x42, 0x08, 0xcc, 0x06, 0x74, 0x80, 0x86, 0xd6, 0xd4, 0xee, 0xcd, 0xdb, 0xf2, 0x9b, + 0xdc, 0x81, 0x45, 0xf1, 0xcb, 0x86, 0xd4, 0xc1, 0xae, 0xd4, 0xea, 0x52, 0xbb, 0x90, 0xa2, 0xcf, + 0x84, 0x59, 0x13, 0xaa, 0x2e, 0x32, 0x27, 0xf2, 0x86, 0xe2, 0x08, 0x63, 0x46, 0xda, 0x64, 0x21, + 0x11, 0xdc, 0xf7, 0x82, 0x63, 0x63, 0x36, 0x0e, 0x2e, 0xbe, 0x89, 0x09, 0x15, 0x86, 0x63, 0x8c, + 0x3c, 0x3e, 0x31, 0x4a, 0x12, 0x4f, 0x65, 0xa1, 0x1b, 0x20, 0xa7, 0x2e, 0xe5, 0xd4, 0x98, 0x8b, + 0x75, 0x89, 0x4c, 0x56, 0xa1, 0xd2, 0xf7, 0x4e, 0xd1, 0xed, 0xf6, 0x26, 0x46, 0x59, 0xea, 0xca, + 0x52, 0xde, 0x9b, 0x90, 0x3d, 0xb8, 0x46, 0xfb, 0x7d, 0x74, 0x38, 0xba, 0xdd, 0x31, 0x46, 0x4c, + 0x14, 0x6c, 0x54, 0x9a, 0x33, 0xf7, 0xaa, 0x9d, 0xeb, 0xad, 0x6c, 0xfb, 0x5a, 0xfb, 0x48, 0xf9, + 0x28, 0x42, 0x7b, 0x39, 0xb1, 0x3f, 0x50, 0xe6, 0xd6, 0x2f, 0x1a, 0x94, 0x95, 0xf6, 0x6d, 0x7a, + 0x62, 0x40, 0x59, 0x65, 0xa0, 0xfa, 0x91, 0x88, 0x22, 0x80, 0xfa, 0xec, 0xf6, 0xc3, 0x68, 0x40, + 0xb9, 0xea, 0xca, 0x82, 0x42, 0xf7, 0x25, 0x48, 0x1e, 0xc1, 0xd2, 0x38, 0x33, 0x20, 0x0f, 0x99, + 0x51, 0x92, 0x95, 0xac, 0xe5, 0x2b, 0xc9, 0x4d, 0xd1, 0x9e, 0xf6, 0xb1, 0xd6, 0xa0, 0xf4, 0x94, + 0x4e, 0x30, 0x12, 0xb5, 0x1c, 0x51, 0x76, 0x94, 0xd4, 0x22, 0xbe, 0xad, 0x6f, 0x34, 0xa8, 0x3e, + 0x10, 0x51, 0x5e, 0x70, 0xca, 0x47, 0x4c, 0x24, 0xed, 0x7b, 0x8c, 0x63, 0xc4, 0x0c, 0xad, 0x39, + 0x23, 0x92, 0x56, 0x22, 0xb9, 0x09, 0xf3, 0x2e, 0x72, 0x74, 0x78, 0x18, 0x31, 0x43, 0x97, 0xba, + 0x33, 0x80, 0x3c, 0x84, 0x65, 0x9f, 0x32, 0xde, 0x1d, 0x0d, 0x5d, 0xca, 0xb1, 0x2b, 0xa8, 0x28, + 0xab, 0xae, 0x76, 0xcc, 0x56, 0x4c, 0xc3, 0x56, 0xc2, 0xd3, 0xd6, 0x67, 0x09, 0x4f, 0xed, 0x45, + 0xe1, 0xf3, 0xb9, 0x74, 0x11, 0xa0, 0xf5, 0xad, 0x06, 0xe4, 0x31, 0xf2, 0xdd, 0xc0, 0x41, 0xc6, + 0xa3, 0x89, 0x8d, 0x5f, 0x8d, 0x90, 0x71, 0xf2, 0x2e, 0x2c, 0x50, 0x05, 0x75, 0x33, 0xd3, 0xa8, + 0x25, 0xa0, 0x6c, 0xf7, 0x16, 0x34, 0x4e, 0x3c, 0x7e, 0xd4, 0x9d, 0x6e, 0x99, 0x98, 0x4d, 0xc5, + 0xae, 0x0b, 0xdd, 0x41, 0x5e, 0x25, 0xe2, 0x4a, 0x97, 0x7e, 0x3c, 0x6c, 0x26, 0x33, 0xae, 0xd8, + 0x35, 0x01, 0x2a, 0x02, 0x30, 0xeb, 0x6f, 0x1d, 0xea, 0xb9, 0x9c, 0xd8, 0x30, 0x0c, 0x18, 0x92, + 0x7d, 0xa8, 0x24, 0xe7, 0xcb, 0x7c, 0xaa, 0x9d, 0x8d, 0xfc, 0x58, 0x0a, 0x9c, 0x5a, 0x29, 0x90, + 0xfa, 0x92, 0x2d, 0x98, 0x63, 0xb2, 0xf7, 0x32, 0xd3, 0x6a, 0x67, 0x35, 0x1f, 0x25, 0x33, 0x1c, + 0x5b, 0x19, 0x9a, 0xbf, 0x6b, 0x50, 0x49, 0x22, 0x15, 0x32, 0x74, 0x0b, 0x2a, 0x69, 0x4d, 0xfa, + 0x45, 0xe4, 0x4f, 0xcd, 0xc8, 0x26, 0xcc, 0xf9, 0x82, 0x25, 0xa2, 0x09, 0xc2, 0xa1, 0x9e, 0x77, + 0x90, 0x0c, 0xb2, 0x95, 0x09, 0xb9, 0x0b, 0x4b, 0xcc, 0xa1, 0x41, 0x80, 0x6e, 0x37, 0x61, 0xcb, + 0xac, 0x64, 0xc4, 0xa2, 0x82, 0x9f, 0x2a, 0xd2, 0x6c, 0xc2, 0xb5, 0xc4, 0xf0, 0x8c, 0x3c, 0x25, + 0x69, 0xba, 0xac, 0x14, 0x0f, 0x13, 0xdc, 0xfa, 0x53, 0x87, 0xfa, 0xf3, 0x90, 0xfd, 0xbf, 0xf1, + 0xaf, 0xc0, 0x9c, 0xda, 0xa5, 0x78, 0x19, 0x95, 0x44, 0x1e, 0x4c, 0xd5, 0xb5, 0x99, 0xaf, 0xab, + 0xe0, 0x3c, 0x89, 0xe5, 0xea, 0x35, 0x7f, 0xd6, 0x60, 0x3e, 0x45, 0x8b, 0xf6, 0x48, 0x60, 0x43, + 0xca, 0x8f, 0xd4, 0xe1, 0xf2, 0x9b, 0xd8, 0x50, 0x3e, 0x42, 0xea, 0x9e, 0x9d, 0x7d, 0xff, 0x0d, + 0xce, 0x6e, 0x7d, 0x12, 0xbb, 0x3e, 0x0a, 0x84, 0x36, 0x09, 0x64, 0xee, 0x40, 0x2d, 0xab, 0x20, + 0xcb, 0x30, 0x73, 0x8c, 0x13, 0x95, 0x8a, 0xf8, 0x24, 0x0d, 0x28, 0x8d, 0xa9, 0x3f, 0x4a, 0x2e, + 0xa5, 0x58, 0xd8, 0xd1, 0xef, 0x6b, 0xd6, 0x13, 0x68, 0xe4, 0x8f, 0x54, 0x4c, 0x3e, 0x63, 0xa0, + 0x76, 0x45, 0x06, 0x5a, 0x3f, 0x69, 0xb0, 0xf2, 0x18, 0xf9, 0xb3, 0x90, 0x7b, 0x7d, 0xcf, 0x91, + 0xef, 0x4a, 0x32, 0xad, 0x0f, 0x60, 0x25, 0xf4, 0xdd, 0xdc, 0x1a, 0x4e, 0xba, 0x43, 0x7a, 0x98, + 0x8c, 0xad, 0x11, 0xfa, 0x6e, 0xee, 0xc6, 0x7a, 0x4e, 0x0f, 0x51, 0x78, 0x05, 0x78, 0x52, 0xe4, + 0x15, 0x97, 0xd1, 0x08, 0xf0, 0xe4, 0xbc, 0x57, 0x03, 0x4a, 0xbe, 0x37, 0xf0, 0xb8, 0x5c, 0xdc, + 0x92, 0x1d, 0x0b, 0xe9, 0x46, 0xcc, 0x9e, 0x6d, 0x84, 0xf5, 0x87, 0x0e, 0x37, 0xce, 0x25, 0xac, + 0xea, 0x3f, 0x80, 0x5a, 0x90, 0xc1, 0x55, 0x17, 0x3a, 0xe7, 0xb6, 0xb9, 0xc8, 0xb9, 0x95, 0x03, + 0x73, 0x71, 0xcc, 0x7f, 0x34, 0xa8, 0x65, 0xd5, 0x85, 0xab, 0x6a, 0x40, 0xd9, 0x89, 0x90, 0x72, + 0x74, 0x55, 0xa5, 0x89, 0x28, 0x5e, 0xc0, 0x38, 0x1c, 0xba, 0xea, 0x01, 0x49, 0x65, 0xe1, 0xe5, + 0xa2, 0x8f, 0xc2, 0x2b, 0xae, 0x32, 0x11, 0xc9, 0x87, 0x30, 0x13, 0xfa, 0xae, 0x7c, 0x4e, 0xab, + 0x9d, 0xbb, 0x53, 0x84, 0xa3, 0x87, 0x98, 0xf6, 0xde, 0x47, 0x45, 0x04, 0x0f, 0x99, 0x2d, 0x7c, + 0x84, 0x6b, 0x80, 0x27, 0xf2, 0xb5, 0x7d, 0x13, 0xd7, 0x00, 0x4f, 0xac, 0x5f, 0x75, 0x58, 0x7d, + 0xad, 0x09, 0x59, 0x87, 0x9a, 0x33, 0x8a, 0x22, 0x0c, 0x78, 0x96, 0x08, 0x55, 0x85, 0xc9, 0x49, + 0xae, 0xc1, 0x7c, 0x80, 0xa7, 0x3c, 0x3b, 0xf2, 0x8a, 0x00, 0x2e, 0x18, 0xf3, 0x2e, 0x2c, 0xe4, + 0xe8, 0x22, 0x3b, 0x71, 0xc9, 0xe3, 0x98, 0xf7, 0x20, 0x5f, 0x02, 0xd0, 0x34, 0x4d, 0xf5, 0xb8, + 0x7e, 0x74, 0xc5, 0xc2, 0x5b, 0x4f, 0x02, 0x17, 0x4f, 0xd1, 0xdd, 0xcd, 0xdc, 0x42, 0x76, 0x26, + 0x9c, 0xf9, 0x31, 0xd4, 0x0b, 0x4c, 0x44, 0x31, 0x9e, 0x80, 0x65, 0x17, 0x4a, 0x76, 0x2c, 0xa4, + 0xd4, 0xd0, 0x33, 0x9c, 0xdd, 0x86, 0x5b, 0x9f, 0xd2, 0xe8, 0x38, 0x4b, 0xa1, 0x5d, 0x66, 0x23, + 0x75, 0x93, 0x55, 0x2b, 0xe0, 0x93, 0xd5, 0x84, 0xdb, 0xaf, 0x73, 0x8a, 0x19, 0xdb, 0xf9, 0x57, + 0x83, 0xa5, 0x24, 0xa3, 0x17, 0x18, 0x8d, 0x3d, 0x07, 0xc9, 0x08, 0xaa, 0x99, 0xe7, 0x8a, 0x34, + 0x2f, 0x78, 0xc9, 0xe4, 0xd1, 0xe6, 0xfa, 0xa5, 0x6f, 0x9d, 0xb5, 0xfe, 0xf5, 0x6f, 0x7f, 0x7d, + 0xaf, 0xaf, 0x91, 0xd5, 0x76, 0x72, 0x51, 0xb7, 0x5f, 0xe6, 0xee, 0xf1, 0x57, 0xe4, 0x18, 0x6a, + 0xd9, 0x1b, 0x89, 0xac, 0x5f, 0x7a, 0x41, 0x9a, 0xd6, 0x45, 0x26, 0xea, 0xe4, 0x86, 0x3c, 0x79, + 0xd1, 0x9a, 0x4f, 0x4f, 0xde, 0xd1, 0x36, 0x3a, 0x3f, 0xea, 0x50, 0xcf, 0xb6, 0x25, 0xa9, 0xfd, + 0x15, 0x2c, 0x4d, 0x2d, 0x37, 0x79, 0xef, 0x92, 0xdd, 0x8f, 0x53, 0xb9, 0x73, 0xa5, 0x1b, 0xc2, + 0xba, 0x25, 0xb3, 0xb9, 0x41, 0xae, 0xb7, 0xb3, 0xb7, 0x03, 0x6b, 0xbf, 0x8c, 0x7b, 0xf0, 0x9d, + 0x06, 0x2b, 0xc5, 0x13, 0x23, 0x53, 0x6f, 0xd5, 0x85, 0x64, 0x30, 0xdf, 0xbf, 0x9a, 0x71, 0x3e, + 0xa9, 0x8d, 0xe2, 0xa4, 0xf6, 0x6e, 0x43, 0xdd, 0x09, 0x07, 0xf9, 0x88, 0xc3, 0xde, 0x17, 0x65, + 0xf5, 0x5f, 0x49, 0x6f, 0x4e, 0xfe, 0x31, 0xb7, 0xfd, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe6, + 0x24, 0x28, 0x46, 0xae, 0x0c, 0x00, 0x00, } diff --git a/api/v3/clairpb/clair.pb.gw.go b/api/v3/clairpb/clair.pb.gw.go index 21bfdf12..5e4cd05c 100644 --- a/api/v3/clairpb/clair.pb.gw.go +++ b/api/v3/clairpb/clair.pb.gw.go @@ -1,6 +1,5 @@ -// Code generated by protoc-gen-grpc-gateway -// source: clair.proto -// DO NOT EDIT! +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: api/v3/clairpb/clair.proto /* Package clairpb is a reverse proxy. @@ -29,19 +28,6 @@ var _ status.Status var _ = runtime.String var _ = utilities.NewDoubleArray -func request_AncestryService_PostAncestry_0(ctx context.Context, marshaler runtime.Marshaler, client AncestryServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq PostAncestryRequest - var metadata runtime.ServerMetadata - - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.PostAncestry(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - var ( filter_AncestryService_GetAncestry_0 = &utilities.DoubleArray{Encoding: map[string]int{"ancestry_name": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} ) @@ -65,7 +51,7 @@ func request_AncestryService_GetAncestry_0(ctx context.Context, marshaler runtim protoReq.AncestryName, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "ancestry_name", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_AncestryService_GetAncestry_0); err != nil { @@ -77,6 +63,21 @@ func request_AncestryService_GetAncestry_0(ctx context.Context, marshaler runtim } +func request_AncestryService_PostAncestry_0(ctx context.Context, marshaler runtime.Marshaler, client AncestryServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostAncestryRequest + var metadata runtime.ServerMetadata + + if req.ContentLength > 0 { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + } + + msg, err := client.PostAncestry(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + var ( filter_NotificationService_GetNotification_0 = &utilities.DoubleArray{Encoding: map[string]int{"name": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} ) @@ -100,7 +101,7 @@ func request_NotificationService_GetNotification_0(ctx context.Context, marshale protoReq.Name, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_NotificationService_GetNotification_0); err != nil { @@ -131,7 +132,7 @@ func request_NotificationService_MarkNotificationAsRead_0(ctx context.Context, m protoReq.Name, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } msg, err := client.MarkNotificationAsRead(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -167,10 +168,18 @@ func RegisterAncestryServiceHandlerFromEndpoint(ctx context.Context, mux *runtim // RegisterAncestryServiceHandler registers the http handlers for service AncestryService to "mux". // The handlers forward requests to the grpc endpoint over "conn". func RegisterAncestryServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewAncestryServiceClient(conn) + return RegisterAncestryServiceHandlerClient(ctx, mux, NewAncestryServiceClient(conn)) +} - mux.Handle("POST", pattern_AncestryService_PostAncestry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) +// RegisterAncestryServiceHandler registers the http handlers for service AncestryService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "AncestryServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AncestryServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "AncestryServiceClient" to call the correct interceptors. +func RegisterAncestryServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AncestryServiceClient) error { + + mux.Handle("GET", pattern_AncestryService_GetAncestry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -187,19 +196,19 @@ func RegisterAncestryServiceHandler(ctx context.Context, mux *runtime.ServeMux, runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AncestryService_PostAncestry_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AncestryService_GetAncestry_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AncestryService_PostAncestry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AncestryService_GetAncestry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AncestryService_GetAncestry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + mux.Handle("POST", pattern_AncestryService_PostAncestry_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -216,14 +225,14 @@ func RegisterAncestryServiceHandler(ctx context.Context, mux *runtime.ServeMux, runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AncestryService_GetAncestry_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AncestryService_PostAncestry_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AncestryService_GetAncestry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AncestryService_PostAncestry_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) @@ -231,15 +240,15 @@ func RegisterAncestryServiceHandler(ctx context.Context, mux *runtime.ServeMux, } var ( - pattern_AncestryService_PostAncestry_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"ancestry"}, "")) - pattern_AncestryService_GetAncestry_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 1, 0, 4, 1, 5, 1}, []string{"ancestry", "ancestry_name"}, "")) + + pattern_AncestryService_PostAncestry_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"ancestry"}, "")) ) var ( - forward_AncestryService_PostAncestry_0 = runtime.ForwardResponseMessage - forward_AncestryService_GetAncestry_0 = runtime.ForwardResponseMessage + + forward_AncestryService_PostAncestry_0 = runtime.ForwardResponseMessage ) // RegisterNotificationServiceHandlerFromEndpoint is same as RegisterNotificationServiceHandler but @@ -270,10 +279,18 @@ func RegisterNotificationServiceHandlerFromEndpoint(ctx context.Context, mux *ru // RegisterNotificationServiceHandler registers the http handlers for service NotificationService to "mux". // The handlers forward requests to the grpc endpoint over "conn". func RegisterNotificationServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewNotificationServiceClient(conn) + return RegisterNotificationServiceHandlerClient(ctx, mux, NewNotificationServiceClient(conn)) +} + +// RegisterNotificationServiceHandler registers the http handlers for service NotificationService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "NotificationServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "NotificationServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "NotificationServiceClient" to call the correct interceptors. +func RegisterNotificationServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client NotificationServiceClient) error { mux.Handle("GET", pattern_NotificationService_GetNotification_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -302,7 +319,7 @@ func RegisterNotificationServiceHandler(ctx context.Context, mux *runtime.ServeM }) mux.Handle("DELETE", pattern_NotificationService_MarkNotificationAsRead_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { diff --git a/api/v3/clairpb/clair.proto b/api/v3/clairpb/clair.proto index 8d704230..4dcce1b3 100644 --- a/api/v3/clairpb/clair.proto +++ b/api/v3/clairpb/clair.proto @@ -1,4 +1,4 @@ -// Copyright 2017 clair authors +// Copyright 2018 clair authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -13,150 +13,197 @@ // limitations under the License. syntax = "proto3"; -option go_package = "clairpb"; -package clairpb; -import "google/api/annotations.proto"; -import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -message Vulnerability { - string name = 1; - string namespace_name = 2; - string description = 3; - string link = 4; - string severity = 5; - string metadata = 6; - // fixed_by exists when vulnerability is under feature. - string fixed_by = 7; - // affected_versions exists when vulnerability is under notification. - repeated Feature affected_versions = 8; -} - -message ClairStatus { - // listers and detectors are processors implemented in this Clair and used to - // scan ancestries - repeated string listers = 1; - repeated string detectors = 2; - google.protobuf.Timestamp last_update_time = 3; -} +import "google/api/annotations.proto"; -message Feature{ - string name = 1; - string namespace_name = 2; - string version = 3; - string version_format = 4; - repeated Vulnerability vulnerabilities = 5; -} +package coreos.clair; -message Ancestry { - string name = 1; - repeated Feature features = 2; - repeated Layer layers = 3; +option go_package = "clairpb"; +option java_package = "com.coreos.clair.pb"; - // scanned_listers and scanned_detectors are used to scan this ancestry, it - // may be different from listers and detectors in ClairStatus since the - // ancestry could be scanned by previous version of Clair. - repeated string scanned_listers = 4; - repeated string scanned_detectors = 5; +message Vulnerability { + // The name of the vulnerability. + string name = 1; + // The name of the namespace in which the vulnerability was detected. + string namespace_name = 2; + // A description of the vulnerability according to the source for the namespace. + string description = 3; + // A link to the vulnerability according to the source for the namespace. + string link = 4; + // How dangerous the vulnerability is. + string severity = 5; + // Namespace agnostic metadata about the vulnerability. + string metadata = 6; + // The feature that fixes this vulnerability. + // This field only exists when a vulnerability is a part of a Feature. + string fixed_by = 7; + // The Features that are affected by the vulnerability. + // This field only exists when a vulnerability is a part of a Notification. + repeated Feature affected_versions = 8; +} + +message Feature { + // The name of the feature. + string name = 1; + // The name of the namespace in which the feature is detected. + string namespace_name = 2; + // The specific version of this feature. + string version = 3; + // The format used to parse version numbers for the feature. + string version_format = 4; + // The list of vulnerabilities that affect the feature. + repeated Vulnerability vulnerabilities = 5; } message Layer { - string hash = 1; + // The sha256 tarsum for the layer. + string hash = 1; +} + +service AncestryService { + // The RPC used to read the results of scanning for a particular ancestry. + rpc GetAncestry(GetAncestryRequest) returns (GetAncestryResponse) { + option (google.api.http) = { get: "/ancestry/{ancestry_name}" }; + } + // The RPC used to create a new scan of an ancestry. + rpc PostAncestry(PostAncestryRequest) returns (PostAncestryResponse) { + option (google.api.http) = { + post: "/ancestry" + body: "*" + }; + } } -message Notification { - string name = 1; - string created = 2; - string notified = 3; - string deleted = 4; - PagedVulnerableAncestries old = 5; - PagedVulnerableAncestries new = 6; +message ClairStatus { + // The configured list of feature listers used to scan an ancestry. + repeated string listers = 1; + // The configured list of namespace detectors used to scan an ancestry. + repeated string detectors = 2; + // The time at which the updater last ran. + google.protobuf.Timestamp last_update_time = 3; } -message IndexedAncestryName { - // index is unique to name in all streams simultaneously streamed, increasing - // and larger than all indexes in previous page in same stream. - int32 index = 1; - string name = 2; +message GetAncestryRequest { + // The name of the desired ancestry. + string ancestry_name = 1; + // Whether to include vulnerabilities or not in the response. + bool with_vulnerabilities = 2; + // Whether to include features or not in the response. + bool with_features = 3; } -message PagedVulnerableAncestries { - string current_page = 1; - // if next_page is empty, it signals the end of all pages. - string next_page = 2; - int32 limit = 3; - Vulnerability vulnerability = 4; - repeated IndexedAncestryName ancestries = 5; +message GetAncestryResponse { + message Ancestry { + // The name of the desired ancestry. + string name = 1; + // The list of features present in the ancestry. + // This will only be provided if requested. + repeated Feature features = 2; + // The layers present in the ancestry. + repeated Layer layers = 3; + // The configured list of feature listers used to scan this ancestry. + repeated string scanned_listers = 4; + // The configured list of namespace detectors used to scan an ancestry. + repeated string scanned_detectors = 5; + } + // The ancestry requested. + Ancestry ancestry = 1; + // The status of Clair at the time of the request. + ClairStatus status = 2; } message PostAncestryRequest { - message PostLayer { - string hash = 1; - string path = 2; - map headers = 3; - } - string ancestry_name = 1; - string format = 2; - repeated PostLayer layers = 3; + message PostLayer { + // The hash of the layer. + string hash = 1; + // The location of the layer (URL or filepath). + string path = 2; + // Any HTTP Headers that need to be used if requesting a layer over HTTP(S). + map headers = 3; + } + // The name of the ancestry being scanned. + // If scanning OCI images, this should be the hash of the manifest. + string ancestry_name = 1; + // The format of the image being uploaded. + string format = 2; + // The layers to be scanned for this particular ancestry. + repeated PostLayer layers = 3; } message PostAncestryResponse { - ClairStatus status = 1; -} - -message GetAncestryRequest { - string ancestry_name = 1; - bool with_vulnerabilities = 2; - bool with_features = 3; + // The status of Clair at the time of the request. + ClairStatus status = 1; } -message GetAncestryResponse { - Ancestry ancestry = 1; - ClairStatus status = 2; +service NotificationService { + // The RPC used to get a particularly Notification. + rpc GetNotification(GetNotificationRequest) returns (GetNotificationResponse) { + option (google.api.http) = { get: "/notifications/{name}" }; + } + // The RPC used to mark a Notification as read after it has been processed. + rpc MarkNotificationAsRead(MarkNotificationAsReadRequest) returns (MarkNotificationAsReadResponse) { + option (google.api.http) = { delete: "/notifications/{name}" }; + } } message GetNotificationRequest { - // if the vulnerability_page is empty, it implies the first page. - string old_vulnerability_page = 1; - string new_vulnerability_page = 2; - int32 limit = 3; - string name = 4; + // The current page of previous vulnerabilities for the ancestry. + // This will be empty when it is the first page. + string old_vulnerability_page = 1; + // The current page of vulnerabilities for the ancestry. + // This will be empty when it is the first page. + string new_vulnerability_page = 2; + // The requested maximum number of results per page. + int32 limit = 3; + // The name of the notification being requested. + string name = 4; } message GetNotificationResponse { - Notification notification = 1; + message Notification { + // The name of the requested notification. + string name = 1; + // The time at which the notification was created. + string created = 2; + // The time at which the notification was last sent out. + string notified = 3; + // The time at which a notification has been deleted. + string deleted = 4; + // The previous vulnerability and a paginated view of the ancestries it affects. + PagedVulnerableAncestries old = 5; + // The newly updated vulnerability and a paginated view of the ancestries it affects. + PagedVulnerableAncestries new = 6; + } + // The notification as requested. + Notification notification = 1; } -message MarkNotificationAsReadRequest { - string name = 1; +message PagedVulnerableAncestries { + message IndexedAncestryName { + // The index is an ever increasing number associated with the particular ancestry. + // This is useful if you're processing notifications, and need to keep track of the progress of paginating the results. + int32 index = 1; + // The name of the ancestry. + string name = 2; + } + // The identifier for the current page. + string current_page = 1; + // The token used to request the next page. + // This will be empty when there are no more pages. + string next_page = 2; + // The requested maximum number of results per page. + int32 limit = 3; + // The vulnerability that affects a given set of ancestries. + Vulnerability vulnerability = 4; + // The ancestries affected by a vulnerability. + repeated IndexedAncestryName ancestries = 5; } -service AncestryService{ - rpc PostAncestry(PostAncestryRequest) returns (PostAncestryResponse) { - option (google.api.http) = { - post: "/ancestry" - body: "*" - }; - } - - rpc GetAncestry(GetAncestryRequest) returns (GetAncestryResponse) { - option (google.api.http) = { - get: "/ancestry/{ancestry_name}" - }; - } +message MarkNotificationAsReadRequest { + // The name of the Notification that has been processed. + string name = 1; } -service NotificationService{ - rpc GetNotification(GetNotificationRequest) returns (GetNotificationResponse) { - option (google.api.http) = { - get: "/notifications/{name}" - }; - } - - rpc MarkNotificationAsRead(MarkNotificationAsReadRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/notifications/{name}" - }; - } -} \ No newline at end of file +message MarkNotificationAsReadResponse {} diff --git a/api/v3/clairpb/clair.swagger.json b/api/v3/clairpb/clair.swagger.json index 3e54a8a2..64c8b413 100644 --- a/api/v3/clairpb/clair.swagger.json +++ b/api/v3/clairpb/clair.swagger.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "title": "clair.proto", + "title": "api/v3/clairpb/clair.proto", "version": "version not set" }, "schemes": [ @@ -17,12 +17,13 @@ "paths": { "/ancestry": { "post": { + "summary": "The RPC used to create a new scan of an ancestry.", "operationId": "PostAncestry", "responses": { "200": { "description": "", "schema": { - "$ref": "#/definitions/clairpbPostAncestryResponse" + "$ref": "#/definitions/clairPostAncestryResponse" } } }, @@ -32,7 +33,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/clairpbPostAncestryRequest" + "$ref": "#/definitions/clairPostAncestryRequest" } } ], @@ -43,12 +44,13 @@ }, "/ancestry/{ancestry_name}": { "get": { + "summary": "The RPC used to read the results of scanning for a particular ancestry.", "operationId": "GetAncestry", "responses": { "200": { "description": "", "schema": { - "$ref": "#/definitions/clairpbGetAncestryResponse" + "$ref": "#/definitions/clairGetAncestryResponse" } } }, @@ -61,6 +63,7 @@ }, { "name": "with_vulnerabilities", + "description": "Whether to include vulnerabilities or not in the response.", "in": "query", "required": false, "type": "boolean", @@ -68,6 +71,7 @@ }, { "name": "with_features", + "description": "Whether to include features or not in the response.", "in": "query", "required": false, "type": "boolean", @@ -81,12 +85,13 @@ }, "/notifications/{name}": { "get": { + "summary": "The RPC used to get a particularly Notification.", "operationId": "GetNotification", "responses": { "200": { "description": "", "schema": { - "$ref": "#/definitions/clairpbGetNotificationResponse" + "$ref": "#/definitions/clairGetNotificationResponse" } } }, @@ -99,19 +104,21 @@ }, { "name": "old_vulnerability_page", - "description": "if the vulnerability_page is empty, it implies the first page.", + "description": "The current page of previous vulnerabilities for the ancestry.\nThis will be empty when it is the first page.", "in": "query", "required": false, "type": "string" }, { "name": "new_vulnerability_page", + "description": "The current page of vulnerabilities for the ancestry.\nThis will be empty when it is the first page.", "in": "query", "required": false, "type": "string" }, { "name": "limit", + "description": "The requested maximum number of results per page.", "in": "query", "required": false, "type": "integer", @@ -123,12 +130,13 @@ ] }, "delete": { + "summary": "The RPC used to mark a Notification as read after it has been processed.", "operationId": "MarkNotificationAsRead", "responses": { "200": { "description": "", "schema": { - "$ref": "#/definitions/protobufEmpty" + "$ref": "#/definitions/clairMarkNotificationAsReadResponse" } } }, @@ -147,57 +155,107 @@ } }, "definitions": { - "PostAncestryRequestPostLayer": { - "type": "object", - "properties": { - "hash": { - "type": "string" - }, - "path": { - "type": "string" - }, - "headers": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - } - }, - "clairpbAncestry": { + "GetAncestryResponseAncestry": { "type": "object", "properties": { "name": { - "type": "string" + "type": "string", + "description": "The name of the desired ancestry." }, "features": { "type": "array", "items": { - "$ref": "#/definitions/clairpbFeature" - } + "$ref": "#/definitions/clairFeature" + }, + "description": "The list of features present in the ancestry.\nThis will only be provided if requested." }, "layers": { "type": "array", "items": { - "$ref": "#/definitions/clairpbLayer" - } + "$ref": "#/definitions/clairLayer" + }, + "description": "The layers present in the ancestry." }, "scanned_listers": { "type": "array", "items": { "type": "string" }, - "description": "scanned_listers and scanned_detectors are used to scan this ancestry, it\nmay be different from listers and detectors in ClairStatus since the\nancestry could be scanned by previous version of Clair." + "description": "The configured list of feature listers used to scan this ancestry." }, "scanned_detectors": { "type": "array", "items": { "type": "string" - } + }, + "description": "The configured list of namespace detectors used to scan an ancestry." + } + } + }, + "GetNotificationResponseNotification": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the requested notification." + }, + "created": { + "type": "string", + "description": "The time at which the notification was created." + }, + "notified": { + "type": "string", + "description": "The time at which the notification was last sent out." + }, + "deleted": { + "type": "string", + "description": "The time at which a notification has been deleted." + }, + "old": { + "$ref": "#/definitions/clairPagedVulnerableAncestries", + "description": "The previous vulnerability and a paginated view of the ancestries it affects." + }, + "new": { + "$ref": "#/definitions/clairPagedVulnerableAncestries", + "description": "The newly updated vulnerability and a paginated view of the ancestries it affects." + } + } + }, + "PagedVulnerableAncestriesIndexedAncestryName": { + "type": "object", + "properties": { + "index": { + "type": "integer", + "format": "int32", + "description": "The index is an ever increasing number associated with the particular ancestry.\nThis is useful if you're processing notifications, and need to keep track of the progress of paginating the results." + }, + "name": { + "type": "string", + "description": "The name of the ancestry." + } + } + }, + "PostAncestryRequestPostLayer": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "description": "The hash of the layer." + }, + "path": { + "type": "string", + "description": "The location of the layer (URL or filepath)." + }, + "headers": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Any HTTP Headers that need to be used if requesting a layer over HTTP(S)." } } }, - "clairpbClairStatus": { + "clairClairStatus": { "type": "object", "properties": { "listers": { @@ -205,194 +263,181 @@ "items": { "type": "string" }, - "title": "listers and detectors are processors implemented in this Clair and used to\nscan ancestries" + "description": "The configured list of feature listers used to scan an ancestry." }, "detectors": { "type": "array", "items": { "type": "string" - } + }, + "description": "The configured list of namespace detectors used to scan an ancestry." }, "last_update_time": { "type": "string", - "format": "date-time" + "format": "date-time", + "description": "The time at which the updater last ran." } } }, - "clairpbFeature": { + "clairFeature": { "type": "object", "properties": { "name": { - "type": "string" + "type": "string", + "description": "The name of the feature." }, "namespace_name": { - "type": "string" + "type": "string", + "description": "The name of the namespace in which the feature is detected." }, "version": { - "type": "string" + "type": "string", + "description": "The specific version of this feature." }, "version_format": { - "type": "string" + "type": "string", + "description": "The format used to parse version numbers for the feature." }, "vulnerabilities": { "type": "array", "items": { - "$ref": "#/definitions/clairpbVulnerability" - } + "$ref": "#/definitions/clairVulnerability" + }, + "description": "The list of vulnerabilities that affect the feature." } } }, - "clairpbGetAncestryResponse": { + "clairGetAncestryResponse": { "type": "object", "properties": { "ancestry": { - "$ref": "#/definitions/clairpbAncestry" + "$ref": "#/definitions/GetAncestryResponseAncestry", + "description": "The ancestry requested." }, "status": { - "$ref": "#/definitions/clairpbClairStatus" + "$ref": "#/definitions/clairClairStatus", + "description": "The status of Clair at the time of the request." } } }, - "clairpbGetNotificationResponse": { + "clairGetNotificationResponse": { "type": "object", "properties": { "notification": { - "$ref": "#/definitions/clairpbNotification" + "$ref": "#/definitions/GetNotificationResponseNotification", + "description": "The notification as requested." } } }, - "clairpbIndexedAncestryName": { - "type": "object", - "properties": { - "index": { - "type": "integer", - "format": "int32", - "description": "index is unique to name in all streams simultaneously streamed, increasing\nand larger than all indexes in previous page in same stream." - }, - "name": { - "type": "string" - } - } - }, - "clairpbLayer": { + "clairLayer": { "type": "object", "properties": { "hash": { - "type": "string" + "type": "string", + "description": "The sha256 tarsum for the layer." } } }, - "clairpbNotification": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "created": { - "type": "string" - }, - "notified": { - "type": "string" - }, - "deleted": { - "type": "string" - }, - "old": { - "$ref": "#/definitions/clairpbPagedVulnerableAncestries" - }, - "new": { - "$ref": "#/definitions/clairpbPagedVulnerableAncestries" - } - } + "clairMarkNotificationAsReadResponse": { + "type": "object" }, - "clairpbPagedVulnerableAncestries": { + "clairPagedVulnerableAncestries": { "type": "object", "properties": { "current_page": { - "type": "string" + "type": "string", + "description": "The identifier for the current page." }, "next_page": { "type": "string", - "description": "if next_page is empty, it signals the end of all pages." + "description": "The token used to request the next page.\nThis will be empty when there are no more pages." }, "limit": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The requested maximum number of results per page." }, "vulnerability": { - "$ref": "#/definitions/clairpbVulnerability" + "$ref": "#/definitions/clairVulnerability", + "description": "The vulnerability that affects a given set of ancestries." }, "ancestries": { "type": "array", "items": { - "$ref": "#/definitions/clairpbIndexedAncestryName" - } + "$ref": "#/definitions/PagedVulnerableAncestriesIndexedAncestryName" + }, + "description": "The ancestries affected by a vulnerability." } } }, - "clairpbPostAncestryRequest": { + "clairPostAncestryRequest": { "type": "object", "properties": { "ancestry_name": { - "type": "string" + "type": "string", + "description": "The name of the ancestry being scanned.\nIf scanning OCI images, this should be the hash of the manifest." }, "format": { - "type": "string" + "type": "string", + "description": "The format of the image being uploaded." }, "layers": { "type": "array", "items": { "$ref": "#/definitions/PostAncestryRequestPostLayer" - } + }, + "description": "The layers to be scanned for this particular ancestry." } } }, - "clairpbPostAncestryResponse": { + "clairPostAncestryResponse": { "type": "object", "properties": { "status": { - "$ref": "#/definitions/clairpbClairStatus" + "$ref": "#/definitions/clairClairStatus", + "description": "The status of Clair at the time of the request." } } }, - "clairpbVulnerability": { + "clairVulnerability": { "type": "object", "properties": { "name": { - "type": "string" + "type": "string", + "description": "The name of the vulnerability." }, "namespace_name": { - "type": "string" + "type": "string", + "description": "The name of the namespace in which the vulnerability was detected." }, "description": { - "type": "string" + "type": "string", + "description": "A description of the vulnerability according to the source for the namespace." }, "link": { - "type": "string" + "type": "string", + "description": "A link to the vulnerability according to the source for the namespace." }, "severity": { - "type": "string" + "type": "string", + "description": "How dangerous the vulnerability is." }, "metadata": { - "type": "string" + "type": "string", + "description": "Namespace agnostic metadata about the vulnerability." }, "fixed_by": { "type": "string", - "description": "fixed_by exists when vulnerability is under feature." + "description": "The feature that fixes this vulnerability.\nThis field only exists when a vulnerability is a part of a Feature." }, "affected_versions": { "type": "array", "items": { - "$ref": "#/definitions/clairpbFeature" + "$ref": "#/definitions/clairFeature" }, - "description": "affected_versions exists when vulnerability is under notification." + "description": "The Features that are affected by the vulnerability.\nThis field only exists when a vulnerability is a part of a Notification." } } - }, - "protobufEmpty": { - "type": "object", - "description": "service Foo {\n rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);\n }\n\nThe JSON representation for `Empty` is empty JSON object `{}`.", - "title": "A generic empty message that you can re-use to avoid defining duplicated\nempty messages in your APIs. A typical example is to use it as the request\nor the response type of an API method. For instance:" } } } diff --git a/api/v3/clairpb/convert.go b/api/v3/clairpb/convert.go index a3584587..408b0983 100644 --- a/api/v3/clairpb/convert.go +++ b/api/v3/clairpb/convert.go @@ -48,7 +48,7 @@ func PagedVulnerableAncestriesFromDatabaseModel(dbVuln *database.PagedVulnerable } for index, ancestryName := range dbVuln.Affected { - indexedAncestry := IndexedAncestryName{ + indexedAncestry := PagedVulnerableAncestries_IndexedAncestryName{ Name: ancestryName, Index: int32(index), } @@ -60,9 +60,9 @@ func PagedVulnerableAncestriesFromDatabaseModel(dbVuln *database.PagedVulnerable // NotificationFromDatabaseModel converts database notification, old and new // vulnerabilities' paged vulnerable ancestries to be api notification. -func NotificationFromDatabaseModel(dbNotification database.VulnerabilityNotificationWithVulnerable) (*Notification, error) { +func NotificationFromDatabaseModel(dbNotification database.VulnerabilityNotificationWithVulnerable) (*GetNotificationResponse_Notification, error) { var ( - noti Notification + noti GetNotificationResponse_Notification err error ) @@ -123,8 +123,8 @@ func VulnerabilityWithFixedInFromDatabaseModel(dbVuln database.VulnerabilityWith } // AncestryFromDatabaseModel converts database ancestry to api ancestry. -func AncestryFromDatabaseModel(dbAncestry database.Ancestry) *Ancestry { - ancestry := &Ancestry{ +func AncestryFromDatabaseModel(dbAncestry database.Ancestry) *GetAncestryResponse_Ancestry { + ancestry := &GetAncestryResponse_Ancestry{ Name: dbAncestry.Name, } for _, layer := range dbAncestry.Layers { diff --git a/api/v3/clairpb/generate-protobuf.sh b/api/v3/clairpb/generate-protobuf.sh new file mode 100755 index 00000000..246edfbd --- /dev/null +++ b/api/v3/clairpb/generate-protobuf.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +# Copyright 2018 clair authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +DOCKER_REPO_ROOT="$GOPATH/src/github.com/coreos/clair" +IMAGE=${IMAGE:-"quay.io/coreos/clair-gen-proto"} + +docker run --rm -it \ + -v "$DOCKER_REPO_ROOT":"$DOCKER_REPO_ROOT" \ + -w "$DOCKER_REPO_ROOT" \ + "$IMAGE" \ + "./api/v3/clairpb/run_in_docker.sh" diff --git a/api/v3/clairpb/prototool.yaml b/api/v3/clairpb/prototool.yaml new file mode 100644 index 00000000..8fe4dd22 --- /dev/null +++ b/api/v3/clairpb/prototool.yaml @@ -0,0 +1,3 @@ +protoc_version: 3.5.1 +protoc_includes: +- ../../../vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis diff --git a/api/v3/clairpb/run_in_docker.sh b/api/v3/clairpb/run_in_docker.sh new file mode 100755 index 00000000..28e703ed --- /dev/null +++ b/api/v3/clairpb/run_in_docker.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# Copyright 2018 clair authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +protoc -I/usr/include -I. \ + -I"${GOPATH}/src" \ + -I"${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis" \ + --go_out=plugins=grpc:. \ + ./api/v3/clairpb/clair.proto + +protoc -I/usr/include -I. \ + -I"${GOPATH}/src" \ + -I"${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis" \ + --grpc-gateway_out=logtostderr=true:. \ + ./api/v3/clairpb/clair.proto + +protoc -I/usr/include -I. \ + -I"${GOPATH}/src" \ + -I"${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis" \ + --swagger_out=logtostderr=true:. \ + ./api/v3/clairpb/clair.proto + +go generate . diff --git a/api/v3/rpc.go b/api/v3/rpc.go index 109bf17a..02244ff3 100644 --- a/api/v3/rpc.go +++ b/api/v3/rpc.go @@ -18,7 +18,6 @@ import ( "fmt" "github.com/golang/protobuf/ptypes" - google_protobuf1 "github.com/golang/protobuf/ptypes/empty" "golang.org/x/net/context" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" @@ -227,7 +226,7 @@ func (s *NotificationServer) GetNotification(ctx context.Context, req *pb.GetNot // MarkNotificationAsRead implements deleting a notification via the Clair gRPC // service. -func (s *NotificationServer) MarkNotificationAsRead(ctx context.Context, req *pb.MarkNotificationAsReadRequest) (*google_protobuf1.Empty, error) { +func (s *NotificationServer) MarkNotificationAsRead(ctx context.Context, req *pb.MarkNotificationAsReadRequest) (*pb.MarkNotificationAsReadResponse, error) { if req.GetName() == "" { return nil, status.Error(codes.InvalidArgument, "notification name should not be empty") } @@ -249,5 +248,5 @@ func (s *NotificationServer) MarkNotificationAsRead(ctx context.Context, req *pb return nil, status.Error(codes.Internal, err.Error()) } - return &google_protobuf1.Empty{}, nil + return &pb.MarkNotificationAsReadResponse{}, nil } diff --git a/cmd/clair/main.go b/cmd/clair/main.go index e802cbbd..dbb360a8 100644 --- a/cmd/clair/main.go +++ b/cmd/clair/main.go @@ -180,7 +180,7 @@ func main() { flag.Parse() // Check for dependencies. - for _, bin := range []string{"git", "bzr", "rpm", "xz"} { + for _, bin := range []string{"git", "rpm", "xz"} { _, err := exec.LookPath(bin) if err != nil { log.WithError(err).WithField("dependency", bin).Fatal("failed to find dependency") diff --git a/code-of-conduct.md b/code-of-conduct.md new file mode 100644 index 00000000..a234f360 --- /dev/null +++ b/code-of-conduct.md @@ -0,0 +1,61 @@ +## CoreOS Community Code of Conduct + +### Contributor Code of Conduct + +As contributors and maintainers of this project, and in the interest of +fostering an open and welcoming community, we pledge to respect all people who +contribute through reporting issues, posting feature requests, updating +documentation, submitting pull requests or patches, and other activities. + +We are committed to making participation in this project a harassment-free +experience for everyone, regardless of level of experience, gender, gender +identity and expression, sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing others' private information, such as physical or electronic addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently applying these +principles to every aspect of managing this project. Project maintainers who do +not follow or enforce the Code of Conduct may be permanently removed from the +project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting a project maintainer, Brandon Philips +, and/or Rithu John . + +This Code of Conduct is adapted from the Contributor Covenant +(http://contributor-covenant.org), version 1.2.0, available at +http://contributor-covenant.org/version/1/2/0/ + +### CoreOS Events Code of Conduct + +CoreOS events are working conferences intended for professional networking and +collaboration in the CoreOS community. Attendees are expected to behave +according to professional standards and in accordance with their employer’s +policies on appropriate workplace behavior. + +While at CoreOS events or related social networking opportunities, attendees +should not engage in discriminatory or offensive speech or actions including +but not limited to gender, sexuality, race, age, disability, or religion. +Speakers should be especially aware of these concerns. + +CoreOS does not condone any statements by speakers contrary to these standards. +CoreOS reserves the right to deny entrance and/or eject from an event (without +refund) any individual found to be engaging in discriminatory or offensive +speech or actions. + +Please bring any concerns to the immediate attention of designated on-site +staff, Brandon Philips , and/or Rithu John . diff --git a/contrib/helm/clair/Chart.yaml b/contrib/helm/clair/Chart.yaml index ba3bdd78..4e76d1c1 100644 --- a/contrib/helm/clair/Chart.yaml +++ b/contrib/helm/clair/Chart.yaml @@ -8,4 +8,4 @@ sources: - https://github.com/coreos/clair maintainers: - name: Jimmy Zelinskie - - email: jimmy.zelinskie@coreos.com + email: jimmy.zelinskie@coreos.com diff --git a/contrib/helm/clair/templates/configmap.yaml b/contrib/helm/clair/templates/configmap.yaml index e30cab3e..7c592bd4 100644 --- a/contrib/helm/clair/templates/configmap.yaml +++ b/contrib/helm/clair/templates/configmap.yaml @@ -14,7 +14,7 @@ data: # PostgreSQL Connection string # https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING # This should be done using secrets or Vault, but for now this will also work - {{- if .Values.config.postgresURI -}} + {{- if .Values.config.postgresURI }} source: "{{ .Values.config.postgresURI }}" {{ else }} source: "postgres://{{ .Values.postgresql.postgresUser }}:{{ .Values.postgresql.postgresPassword }}@{{ template "postgresql.fullname" . }}:5432/{{ .Values.postgresql.postgresDatabase }}?sslmode=disable" diff --git a/contrib/helm/clair/values.yaml b/contrib/helm/clair/values.yaml index a30b8dcb..dc8e6cef 100644 --- a/contrib/helm/clair/values.yaml +++ b/contrib/helm/clair/values.yaml @@ -30,11 +30,11 @@ ingress: # - chart-example.local resources: limits: - cpu: 100m - memory: 1Gi + cpu: 200m + memory: 1500Mi requests: cpu: 100m - memory: 128Mi + memory: 500Mi config: # postgresURI: "postgres://user:password@host:5432/postgres?sslmode=disable" paginationKey: "XxoPtCUzrUv4JV5dS+yQ+MdW7yLEJnRMwigVY/bpgtQ=" @@ -59,6 +59,9 @@ config: # Configuration values for the postgresql dependency. # ref: https://github.com/kubernetes/charts/blob/master/stable/postgresql/README.md postgresql: +# The dependant Postgres chart can be disabled, to connect to +# an existing database by defining config.postgresURI + enabled: true cpu: 1000m memory: 1Gi # These values are hardcoded until Helm supports secrets. @@ -68,4 +71,4 @@ postgresql: postgresDatabase: clair persistence: - size: 10Gi \ No newline at end of file + size: 10Gi diff --git a/database/models.go b/database/models.go index fe36fbfc..43d41da9 100644 --- a/database/models.go +++ b/database/models.go @@ -71,8 +71,8 @@ type Namespace struct { // determined. // // e.g. Name: OpenSSL, Version: 1.0, VersionFormat: dpkg. -// dpkg implies the installer package manager but the namespace (might be -// debian:7, debian:8, ...) could not be determined. +// dpkg is the version format of the installer package manager, which in this +// case could be dpkg or apk. type Feature struct { Name string Version string diff --git a/database/namespace_mapping.go b/database/namespace_mapping.go index d7ff67c3..8691ac99 100644 --- a/database/namespace_mapping.go +++ b/database/namespace_mapping.go @@ -45,4 +45,5 @@ var UbuntuReleasesMapping = map[string]string{ "yakkety": "16.10", "zesty": "17.04", "artful": "17.10", + "bionic": "18.04", } diff --git a/ext/featurens/redhatrelease/redhatrelease.go b/ext/featurens/redhatrelease/redhatrelease.go index 2b15c1e9..0dabc3fa 100644 --- a/ext/featurens/redhatrelease/redhatrelease.go +++ b/ext/featurens/redhatrelease/redhatrelease.go @@ -30,7 +30,7 @@ import ( ) var ( - oracleReleaseRegexp = regexp.MustCompile(`(?P[^\s]*) (Linux Server release) (?P[\d]+)`) + oracleReleaseRegexp = regexp.MustCompile(`(?POracle) (Linux Server release) (?P[\d]+)`) centosReleaseRegexp = regexp.MustCompile(`(?P[^\s]*) (Linux release|release) (?P[\d]+)`) redhatReleaseRegexp = regexp.MustCompile(`(?PRed Hat Enterprise Linux) (Client release|Server release|Workstation release) (?P[\d]+)`) ) diff --git a/ext/featurens/redhatrelease/redhatrelease_test.go b/ext/featurens/redhatrelease/redhatrelease_test.go index 5ee32f18..4a2da907 100644 --- a/ext/featurens/redhatrelease/redhatrelease_test.go +++ b/ext/featurens/redhatrelease/redhatrelease_test.go @@ -42,6 +42,12 @@ func TestDetector(t *testing.T) { "etc/centos-release": []byte(`CentOS release 6.6 (Final)`), }, }, + { + ExpectedNamespace: &database.Namespace{Name: "centos:7"}, + Files: tarutil.FilesMap{ + "etc/redhat-release": []byte(`Red Hat Enterprise Linux Server release 7.2 (Maipo)`), + }, + }, { ExpectedNamespace: &database.Namespace{Name: "centos:7"}, Files: tarutil.FilesMap{ diff --git a/ext/vulnsrc/alpine/alpine.go b/ext/vulnsrc/alpine/alpine.go index 5b6f46e1..49775f77 100644 --- a/ext/vulnsrc/alpine/alpine.go +++ b/ext/vulnsrc/alpine/alpine.go @@ -58,19 +58,20 @@ func (u *updater) Update(db database.Datastore) (resp vulnsrc.UpdateResponse, er return } - // Ask the database for the latest commit we successfully applied. - var dbCommit string + // Open a database transaction. tx, err := db.Begin() if err != nil { return } defer tx.Rollback() - dbCommit, ok, err := tx.FindKeyValue(updaterFlag) + // Ask the database for the latest commit we successfully applied. + var dbCommit string + var ok bool + dbCommit, ok, err = tx.FindKeyValue(updaterFlag) if err != nil { return } - if !ok { dbCommit = "" } @@ -193,7 +194,7 @@ func (u *updater) pullRepository() (commit string, err error) { cmd.Dir = u.repositoryLocalPath if out, err := cmd.CombinedOutput(); err != nil { u.Clean() - log.WithError(err).WithField("output", string(out)).Error("could not pull alpine-secdb repository") + log.WithError(err).WithField("output", string(out)).Error("could not clone alpine-secdb repository") return "", commonerr.ErrCouldNotDownload } } else { diff --git a/ext/vulnsrc/ubuntu/ubuntu.go b/ext/vulnsrc/ubuntu/ubuntu.go index 6af0c14c..662c40ba 100644 --- a/ext/vulnsrc/ubuntu/ubuntu.go +++ b/ext/vulnsrc/ubuntu/ubuntu.go @@ -18,14 +18,13 @@ package ubuntu import ( "bufio" - "bytes" + "errors" "fmt" "io" "io/ioutil" "os" "os/exec" "regexp" - "strconv" "strings" log "github.com/sirupsen/logrus" @@ -38,10 +37,9 @@ import ( ) const ( - trackerURI = "https://launchpad.net/ubuntu-cve-tracker" - trackerRepository = "https://launchpad.net/ubuntu-cve-tracker" - updaterFlag = "ubuntuUpdater" - cveURL = "http://people.ubuntu.com/~ubuntu-security/cve/%s" + trackerURI = "https://git.launchpad.net/ubuntu-cve-tracker" + updaterFlag = "ubuntuUpdater" + cveURL = "http://people.ubuntu.com/~ubuntu-security/cve/%s" ) var ( @@ -74,6 +72,8 @@ var ( affectsCaptureRegexp = regexp.MustCompile(`(?P.*)_(?P.*): (?P[^\s]*)( \(+(?P[^()]*)\)+)?`) affectsCaptureRegexpNames = affectsCaptureRegexp.SubexpNames() + + errUnknownRelease = errors.New("found packages with CVEs for a verison of Ubuntu that Clair doesn't know about") ) type updater struct { @@ -84,211 +84,179 @@ func init() { vulnsrc.RegisterUpdater("ubuntu", &updater{}) } -func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateResponse, err error) { +func (u *updater) Update(db database.Datastore) (resp vulnsrc.UpdateResponse, err error) { log.WithField("package", "Ubuntu").Info("Start fetching vulnerabilities") - // Pull the bzr repository. - if err = u.pullRepository(); err != nil { - return resp, err - } - - // Get revision number. - revisionNumber, err := getRevisionNumber(u.repositoryLocalPath) + // Pull the master branch. + var commit string + commit, err = u.pullRepository() if err != nil { return resp, err } - tx, err := datastore.Begin() + // Open a database transaction. + tx, err := db.Begin() if err != nil { return resp, err } + defer tx.Rollback() - // Get the latest revision number we successfully applied in the database. - dbRevisionNumber, ok, err := tx.FindKeyValue("ubuntuUpdater") + // Ask the database for the latest commit we successfully applied. + var dbCommit string + var ok bool + dbCommit, ok, err = tx.FindKeyValue(updaterFlag) if err != nil { - return resp, err + return } - - if err := tx.Rollback(); err != nil { - return resp, err + if !ok { + dbCommit = "" } - if !ok { - dbRevisionNumber = "" + // Set the updaterFlag to equal the commit processed. + resp.FlagName = updaterFlag + resp.FlagValue = commit + + // Short-circuit if there have been no updates. + if commit == dbCommit { + log.WithField("package", "ubuntu").Debug("no update") + return } // Get the list of vulnerabilities that we have to update. - modifiedCVE, err := collectModifiedVulnerabilities(revisionNumber, dbRevisionNumber, u.repositoryLocalPath) + var modifiedCVE map[string]struct{} + modifiedCVE, err = collectModifiedVulnerabilities(commit, dbCommit, u.repositoryLocalPath) if err != nil { - return resp, err + return } - notes := make(map[string]struct{}) - for cvePath := range modifiedCVE { - // Open the CVE file. - file, err := os.Open(u.repositoryLocalPath + "/" + cvePath) - if err != nil { - // This can happen when a file is modified and then moved in another - // commit. - continue - } - - // Parse the vulnerability. - v, unknownReleases, err := parseUbuntuCVE(file) - if err != nil { - return resp, err - } - - // Add the vulnerability to the response. - resp.Vulnerabilities = append(resp.Vulnerabilities, v) - - // Store any unknown releases as notes. - for k := range unknownReleases { - note := fmt.Sprintf("Ubuntu %s is not mapped to any version number (eg. trusty->14.04). Please update me.", k) - notes[note] = struct{}{} - - // If we encountered unknown Ubuntu release, we don't want the revision - // number to be considered as managed. - dbRevisionNumberInt, _ := strconv.Atoi(dbRevisionNumber) - revisionNumber = dbRevisionNumberInt - } - - // Close the file manually. - // - // We do that instead of using defer because defer works on a function-level scope. - // We would open many files and close them all at once at the end of the function, - // which could lead to exceed fs.file-max. - file.Close() + // Get the list of vulnerabilities. + resp.Vulnerabilities, resp.Notes, err = collectVulnerabilitiesAndNotes(u.repositoryLocalPath, modifiedCVE) + if err != nil { + return } - // Add flag and notes. - resp.FlagName = updaterFlag - resp.FlagValue = strconv.Itoa(revisionNumber) - for note := range notes { - resp.Notes = append(resp.Notes, note) + // The only notes we take are if we encountered unknown Ubuntu release. + // We don't want the commit to be considered as managed in that case. + if len(resp.Notes) != 0 { + resp.FlagValue = dbCommit } return } func (u *updater) Clean() { - os.RemoveAll(u.repositoryLocalPath) + if u.repositoryLocalPath != "" { + os.RemoveAll(u.repositoryLocalPath) + } } -func (u *updater) pullRepository() (err error) { +func (u *updater) pullRepository() (commit string, err error) { // Determine whether we should branch or pull. if _, pathExists := os.Stat(u.repositoryLocalPath); u.repositoryLocalPath == "" || os.IsNotExist(pathExists) { // Create a temporary folder to store the repository. if u.repositoryLocalPath, err = ioutil.TempDir(os.TempDir(), "ubuntu-cve-tracker"); err != nil { - return vulnsrc.ErrFilesystem + return "", vulnsrc.ErrFilesystem } - - // Branch repository. - cmd := exec.Command("bzr", "branch", "--use-existing-dir", trackerRepository, ".") + cmd := exec.Command("git", "clone", trackerURI, ".") cmd.Dir = u.repositoryLocalPath if out, err := cmd.CombinedOutput(); err != nil { - log.WithError(err).WithField("output", string(out)).Error("could not branch Ubuntu repository") - return commonerr.ErrCouldNotDownload + u.Clean() + log.WithError(err).WithField("output", string(out)).Error("could not clone ubuntu-cve-tracker repository") + return "", commonerr.ErrCouldNotDownload + } + } else { + // The repository already exists and it needs to be refreshed via a pull. + cmd := exec.Command("git", "pull") + cmd.Dir = u.repositoryLocalPath + if _, err := cmd.CombinedOutput(); err != nil { + return "", vulnsrc.ErrGitFailure } - - return nil } - // Pull repository. - cmd := exec.Command("bzr", "pull", "--overwrite") + cmd := exec.Command("git", "rev-parse", "HEAD") cmd.Dir = u.repositoryLocalPath - if out, err := cmd.CombinedOutput(); err != nil { - os.RemoveAll(u.repositoryLocalPath) - log.WithError(err).WithField("output", string(out)).Error("could not pull Ubuntu repository") - return commonerr.ErrCouldNotDownload + out, err := cmd.CombinedOutput() + if err != nil { + return "", vulnsrc.ErrGitFailure } - return nil + commit = strings.TrimSpace(string(out)) + return } -func getRevisionNumber(pathToRepo string) (int, error) { - cmd := exec.Command("bzr", "revno") - cmd.Dir = pathToRepo - out, err := cmd.CombinedOutput() - if err != nil { - log.WithError(err).WithField("output", string(out)).Error("could not get Ubuntu repository's revision number") - return 0, commonerr.ErrCouldNotDownload +func collectModifiedVulnerabilities(commit, dbCommit, repositoryLocalPath string) (map[string]struct{}, error) { + modifiedCVE := make(map[string]struct{}) + for _, dirName := range []string{"active", "retired"} { + if err := processDirectory(repositoryLocalPath, dirName, modifiedCVE); err != nil { + return nil, err + } } + return modifiedCVE, nil +} - revno, err := strconv.Atoi(strings.TrimSpace(string(out))) +func processDirectory(repositoryLocalPath, dirName string, modifiedCVE map[string]struct{}) error { + // Open the directory. + d, err := os.Open(repositoryLocalPath + "/" + dirName) if err != nil { - log.WithError(err).WithField("output", string(out)).Error("could not parse Ubuntu repository's revision number") - return 0, commonerr.ErrCouldNotDownload + log.WithError(err).Error("could not open Ubuntu vulnerabilities repository's folder") + return vulnsrc.ErrFilesystem } + defer d.Close() - return revno, nil -} + // Get the FileInfo of all the files in the directory. + names, err := d.Readdirnames(-1) + if err != nil { + log.WithError(err).Error("could not read Ubuntu vulnerabilities repository's folder") + return vulnsrc.ErrFilesystem + } -func collectModifiedVulnerabilities(revision int, dbRevision, repositoryLocalPath string) (map[string]struct{}, error) { - modifiedCVE := make(map[string]struct{}) + // Add the vulnerabilities to the list. + for _, name := range names { + if strings.HasPrefix(name, "CVE-") { + modifiedCVE[dirName+"/"+name] = struct{}{} + } + } - // Handle a brand new database. - if dbRevision == "" { - for _, folder := range []string{"active", "retired"} { - d, err := os.Open(repositoryLocalPath + "/" + folder) - if err != nil { - log.WithError(err).Error("could not open Ubuntu vulnerabilities repository's folder") - return nil, vulnsrc.ErrFilesystem - } + return nil +} - // Get the FileInfo of all the files in the directory. - names, err := d.Readdirnames(-1) - if err != nil { - log.WithError(err).Error("could not read Ubuntu vulnerabilities repository's folder") - return nil, vulnsrc.ErrFilesystem - } +func collectVulnerabilitiesAndNotes(repositoryLocalPath string, modifiedCVE map[string]struct{}) ([]database.VulnerabilityWithAffected, []string, error) { + vulns := make([]database.VulnerabilityWithAffected, 0) + noteSet := make(map[string]struct{}) - // Add the vulnerabilities to the list. - for _, name := range names { - if strings.HasPrefix(name, "CVE-") { - modifiedCVE[folder+"/"+name] = struct{}{} - } - } + for cvePath := range modifiedCVE { + // Open the CVE file. + file, err := os.Open(repositoryLocalPath + "/" + cvePath) + if err != nil { + // This can happen when a file is modified then moved in another commit. + continue + } - // Close the file manually. - // - // We do that instead of using defer because defer works on a function-level scope. - // We would open many files and close them all at once at the end of the function, - // which could lead to exceed fs.file-max. - d.Close() + // Parse the vulnerability. + v, unknownReleases, err := parseUbuntuCVE(file) + if err != nil { + file.Close() + return nil, nil, err } - return modifiedCVE, nil - } + // Add the vulnerability to the response. + vulns = append(vulns, v) - // Handle an up to date database. - dbRevisionInt, _ := strconv.Atoi(dbRevision) - if revision == dbRevisionInt { - log.WithField("package", "Ubuntu").Debug("no update") - return modifiedCVE, nil - } + // Store any unknown releases as notes. + for k := range unknownReleases { + noteSet[errUnknownRelease.Error()+": "+k] = struct{}{} + } - // Handle a database that needs upgrading. - cmd := exec.Command("bzr", "log", "--verbose", "-r"+strconv.Itoa(dbRevisionInt+1)+"..", "-n0") - cmd.Dir = repositoryLocalPath - out, err := cmd.CombinedOutput() - if err != nil { - log.WithError(err).WithField("output", string(out)).Error("could not get Ubuntu vulnerabilities repository logs") - return nil, commonerr.ErrCouldNotDownload + file.Close() } - scanner := bufio.NewScanner(bytes.NewReader(out)) - for scanner.Scan() { - text := strings.TrimSpace(scanner.Text()) - if strings.Contains(text, "CVE-") && (strings.HasPrefix(text, "active/") || strings.HasPrefix(text, "retired/")) { - if strings.Contains(text, " => ") { - text = text[strings.Index(text, " => ")+4:] - } - modifiedCVE[text] = struct{}{} - } + // Convert the note set into a slice. + var notes []string + for note := range noteSet { + notes = append(notes, note) } - return modifiedCVE, nil + return vulns, notes, nil } func parseUbuntuCVE(fileContent io.Reader) (vulnerability database.VulnerabilityWithAffected, unknownReleases map[string]struct{}, err error) {