From 77df37ca6753ba06734329f9d04b44e14f51fe25 Mon Sep 17 00:00:00 2001 From: Tobias Furuholm Date: Wed, 17 Jan 2018 13:54:37 +0100 Subject: [PATCH] Update vendor --- glide.lock | 14 +- glide.yaml | 3 + .../grafeas/grafeas/.circleci/config.yml | 48 + vendor/github.com/grafeas/grafeas/.gitignore | 1 + .../grafeas/grafeas/.idea/go.imports.xml | 9 + vendor/github.com/grafeas/grafeas/AUTHORS | 7 + .../grafeas/grafeas/CONTRIBUTING.md | 30 + vendor/github.com/grafeas/grafeas/Dockerfile | 10 + vendor/github.com/grafeas/grafeas/LICENSE | 201 + vendor/github.com/grafeas/grafeas/Makefile | 46 + vendor/github.com/grafeas/grafeas/README.md | 216 + .../case-studies/binary-authorization.md | 240 + .../grafeas/grafeas/code-of-conduct.md | 29 + .../grafeas/grafeas/config.yaml.sample | 22 + .../grafeas/grafeas/docs/running_grafeas.md | 159 + .../server/go-server/api/server/README.md | 21 + .../server/go-server/api/server/app.yaml | 1 + .../go-server/api/server/config/config.go | 62 + .../server/go-server/api/server/main/main.go | 36 + .../server/go-server/api/server/name/name.go | 218 + .../go-server/api/server/name/name_test.go | 255 + .../go-server/api/server/server/server.go | 207 + .../go-server/api/server/storage/memstore.go | 307 + .../api/server/storage/memstore_test.go | 568 ++ .../api/server/testing/testobjects.go | 182 + .../go-server/api/server/v1alpha1/impl.go | 373 ++ .../api/server/v1alpha1/impl_test.go | 659 ++ .../grafeas/grafeas/server-go/storage.go | 86 + .../grafeas/grafeas/v1alpha1/grafeas.json | 2007 ++++++ .../grafeas/v1alpha1/proto/grafeas.pb.go | 5440 +++++++++++++++++ .../grafeas/v1alpha1/proto/grafeas.pb.gw.go | 1261 ++++ .../grafeas/v1alpha1/proto/grafeas.proto | 1454 +++++ .../v1alpha1/proto/grafeas.swagger.json | 1665 +++++ .../vendor/github.com/davecgh/go-spew/LICENSE | 15 - .../github.com/davecgh/go-spew/spew/bypass.go | 152 - .../davecgh/go-spew/spew/bypasssafe.go | 38 - .../github.com/davecgh/go-spew/spew/common.go | 341 -- .../github.com/davecgh/go-spew/spew/config.go | 297 - .../github.com/davecgh/go-spew/spew/doc.go | 202 - .../github.com/davecgh/go-spew/spew/dump.go | 509 -- .../github.com/davecgh/go-spew/spew/format.go | 419 -- .../github.com/davecgh/go-spew/spew/spew.go | 148 - .../github.com/pmezard/go-difflib/LICENSE | 27 - .../pmezard/go-difflib/difflib/difflib.go | 758 --- .../github.com/stretchr/objx/.gitignore | 22 - .../github.com/stretchr/objx/LICENSE.md | 23 - .../vendor/github.com/stretchr/objx/README.md | 3 - .../github.com/stretchr/objx/accessors.go | 179 - .../stretchr/objx/codegen/array-access.txt | 14 - .../stretchr/objx/codegen/index.html | 86 - .../stretchr/objx/codegen/template.txt | 286 - .../stretchr/objx/codegen/types_list.txt | 20 - .../github.com/stretchr/objx/constants.go | 13 - .../github.com/stretchr/objx/conversions.go | 117 - .../vendor/github.com/stretchr/objx/doc.go | 72 - .../vendor/github.com/stretchr/objx/map.go | 222 - .../github.com/stretchr/objx/mutations.go | 81 - .../github.com/stretchr/objx/security.go | 14 - .../vendor/github.com/stretchr/objx/tests.go | 17 - .../stretchr/objx/type_specific_codegen.go | 2881 --------- .../vendor/github.com/stretchr/objx/value.go | 13 - 61 files changed, 15835 insertions(+), 6971 deletions(-) create mode 100644 vendor/github.com/grafeas/grafeas/.circleci/config.yml create mode 100644 vendor/github.com/grafeas/grafeas/.gitignore create mode 100644 vendor/github.com/grafeas/grafeas/.idea/go.imports.xml create mode 100644 vendor/github.com/grafeas/grafeas/AUTHORS create mode 100644 vendor/github.com/grafeas/grafeas/CONTRIBUTING.md create mode 100644 vendor/github.com/grafeas/grafeas/Dockerfile create mode 100644 vendor/github.com/grafeas/grafeas/LICENSE create mode 100644 vendor/github.com/grafeas/grafeas/Makefile create mode 100644 vendor/github.com/grafeas/grafeas/README.md create mode 100644 vendor/github.com/grafeas/grafeas/case-studies/binary-authorization.md create mode 100644 vendor/github.com/grafeas/grafeas/code-of-conduct.md create mode 100644 vendor/github.com/grafeas/grafeas/config.yaml.sample create mode 100644 vendor/github.com/grafeas/grafeas/docs/running_grafeas.md create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/app.yaml create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name_test.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/testing/testobjects.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go create mode 100644 vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go create mode 100644 vendor/github.com/grafeas/grafeas/server-go/storage.go create mode 100644 vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json create mode 100644 vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go create mode 100644 vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go create mode 100644 vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto create mode 100644 vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypass.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/common.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/config.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/doc.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/dump.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/format.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/spew.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/difflib/difflib.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/.gitignore delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/README.md delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/accessors.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/array-access.txt delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/index.html delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/template.txt delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/types_list.txt delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/constants.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/conversions.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/doc.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/map.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/mutations.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/security.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/tests.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/type_specific_codegen.go delete mode 100644 vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/value.go diff --git a/glide.lock b/glide.lock index 1c864a32..e1ff905e 100644 --- a/glide.lock +++ b/glide.lock @@ -1,5 +1,5 @@ -hash: b5b9ebebad30becd361736196a015af23b1d9a616a375c7fc13823121fd17226 -updated: 2017-06-05T16:11:29.019891941-04:00 +hash: d9cd9bf3ab1048a80f5ad90e05a7ea4c7614c30c561199b71147609de600a524 +updated: 2018-01-17T13:29:36.176987+01:00 imports: - name: github.com/beorn7/perks version: 4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9 @@ -23,9 +23,19 @@ imports: - jsonpb - proto - protoc-gen-go/descriptor + - ptypes - ptypes/any + - ptypes/duration - ptypes/empty - ptypes/struct + - ptypes/timestamp +- name: github.com/grafeas/grafeas + version: 73210e9cadcba64b5b211a0ec64a9f4c2d4841b5 + repo: https://github.com/Grafeas/Grafeas.git + vcs: git + subpackages: + - samples/server/go-server/api/server/name + - v1alpha1/proto - name: github.com/grpc-ecosystem/go-grpc-prometheus version: 2500245aa6110c562d17020fb31a2c133d737799 - name: github.com/grpc-ecosystem/grpc-gateway diff --git a/glide.yaml b/glide.yaml index d67189b3..a0aea5f0 100644 --- a/glide.yaml +++ b/glide.yaml @@ -30,3 +30,6 @@ import: version: ^1.2.15 - package: gopkg.in/yaml.v2 - package: github.com/cockroachdb/cmux +- package: github.com/grafeas/grafeas + vcs: git + repo: https://github.com/Grafeas/Grafeas.git diff --git a/vendor/github.com/grafeas/grafeas/.circleci/config.yml b/vendor/github.com/grafeas/grafeas/.circleci/config.yml new file mode 100644 index 00000000..351a9687 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/.circleci/config.yml @@ -0,0 +1,48 @@ +# Golang CircleCI 2.0 configuration file +# +# Check https://circleci.com/docs/2.0/language-go/ for more details +version: 2 +jobs: + build: + docker: + # specify the version + - image: circleci/golang:1.8 + # Specify service dependencies here if necessary + # CircleCI maintains a library of pre-built images + # documented at https://circleci.com/docs/2.0/circleci-images/ + # - image: circleci/postgres:9.4 + + #### TEMPLATE_NOTE: go expects specific checkout path representing url + #### expecting it in the form of + #### /go/src/github.com/circleci/go-tool + #### /go/src/bitbucket.org/circleci/go-tool + working_directory: /go/src/github.com/grafeas/grafeas/ + steps: + - checkout + - run: + name: Install protoc + command: > + sudo apt-get update + + sudo wget + https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip + + unzip protoc-3.3.0-linux-x86_64.zip -d . + + rm protoc-3.3.0-linux-x86_64.zip + + sudo apt-get remove -y wget unzip + + sudo apt-get autoremove -y + + - run: + name: Make protoc executables available globally + command: | + echo 'export PATH=./bin:$PATH' >> $BASH_ENV + source /home/circleci/.bashrc + + - run: + name: Install go dependencies + command: go get -u -v github.com/golang/protobuf/protoc-gen-go github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway + # specify any bash command here prefixed with `run: ` + - run: make build; make test diff --git a/vendor/github.com/grafeas/grafeas/.gitignore b/vendor/github.com/grafeas/grafeas/.gitignore new file mode 100644 index 00000000..f5c37d65 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/.gitignore @@ -0,0 +1 @@ +.install.protoc-gen-go diff --git a/vendor/github.com/grafeas/grafeas/.idea/go.imports.xml b/vendor/github.com/grafeas/grafeas/.idea/go.imports.xml new file mode 100644 index 00000000..b653761e --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/.idea/go.imports.xml @@ -0,0 +1,9 @@ + + + + + \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/AUTHORS b/vendor/github.com/grafeas/grafeas/AUTHORS new file mode 100644 index 00000000..36154f63 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/AUTHORS @@ -0,0 +1,7 @@ +# This is the list of Grafeas authors for copyright purposes. +# +# This does not necessarily list everyone who has contributed code, since in +# some cases, their employer may be the copyright holder. To see the full list +# of contributors, see the revision history in source control. +Google Inc. +JFrog Ltd diff --git a/vendor/github.com/grafeas/grafeas/CONTRIBUTING.md b/vendor/github.com/grafeas/grafeas/CONTRIBUTING.md new file mode 100644 index 00000000..21750370 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/CONTRIBUTING.md @@ -0,0 +1,30 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution, +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Proposals and PRs + +If you would like to make a large change, please start with a proposal issue that includes: +* What you would like to achieve +* Why you'd like to make this change +* A design overview + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. diff --git a/vendor/github.com/grafeas/grafeas/Dockerfile b/vendor/github.com/grafeas/grafeas/Dockerfile new file mode 100644 index 00000000..f64bfe1c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/Dockerfile @@ -0,0 +1,10 @@ +FROM golang:1.9 +COPY . /go/src/github.com/grafeas/grafeas/ +WORKDIR /go/src/github.com/grafeas/grafeas/samples/server/go-server/api/server/main +RUN CGO_ENABLED=0 go build -o grafeas-server . + +FROM alpine:latest +WORKDIR / +COPY --from=0 /go/src/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/grafeas-server /grafeas-server +EXPOSE 8080 +CMD ["/grafeas-server"] diff --git a/vendor/github.com/grafeas/grafeas/LICENSE b/vendor/github.com/grafeas/grafeas/LICENSE new file mode 100644 index 00000000..c5aedc52 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2017 The Grafeas Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/grafeas/grafeas/Makefile b/vendor/github.com/grafeas/grafeas/Makefile new file mode 100644 index 00000000..09051f4d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/Makefile @@ -0,0 +1,46 @@ +.PHONY: build fmt test vet clean + +SRC = $(shell find . -type f -name '*.go' -not -path "./vendor/*") +CLEAN := *~ + +default: build + +install.tools: .install.protoc-gen-go .install.grpc-gateway + +CLEAN += .install.protoc-gen-go .install.grpc-gateway +.install.protoc-gen-go: + go get -u -v github.com/golang/protobuf/protoc-gen-go && touch $@ + +.install.grpc-gateway: + go get -u -v github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger && touch $@ + +build: vet fmt grafeas_go + go build -v ./... + +# http://golang.org/cmd/go/#hdr-Run_gofmt_on_package_sources +fmt: + @gofmt -l -w $(SRC) + +test: + @go test -v ./... + +vet: + @go tool vet ${SRC} + +v1alpha1/proto/grafeas.pb.go: .install.protoc-gen-go .install.grpc-gateway v1alpha1/proto/grafeas.proto + protoc \ + -I ./ \ + -I vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + -I vendor/github.com/googleapis/googleapis \ + --go_out=plugins=grpc:. \ + --grpc-gateway_out=logtostderr=true:. \ + --swagger_out=logtostderr=true:. \ + v1alpha1/proto/grafeas.proto + + +.PHONY: grafeas_go +grafeas_go: v1alpha1/proto/grafeas.pb.go + +clean: + go clean ./... + rm -f $(CLEAN) diff --git a/vendor/github.com/grafeas/grafeas/README.md b/vendor/github.com/grafeas/grafeas/README.md new file mode 100644 index 00000000..42cd2bca --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/README.md @@ -0,0 +1,216 @@ +# Grafeas: A Component Metadata API +Grafeas defines metadata API spec for computing components (e.g., VM images, container images, jar files, scripts) that can assist with aggregations over your metadata. Grafeas uses two API concepts, a **note** and an **occurrence**. This division allows 3rd party metadata providers to create and manage metadata on behalf of many customers. Additionally, the division also allows implementation of access control settings that allow fine grain access control. + +## Running grafeas + +To run your own Grafeas instance just follow the [instructions](docs/running_greafeas.md). + +## Definition of terms +**Notes**: A note is an item or condition that can be found via an analysis or something that is used multiple times in a process. For example, a CVE could be the result of a vulnerability analysis of a Linux package. In a build process, we would store information about our builder in a note. + +A note name should take the format `/projects//notes/` where the project_id would typically be different from the project where the occurrence is created and the note_id would be unique per note-project, and informative if possible. + +Access to notes should be read-only for users who have access to occurrences referencing them, and editable only by the note owner. + +**Occurrences**: An occurrence can be thought of as an instantiation of a note and describes how the note was found in a specific cloud resource or project (e.g., location, specific remediation steps, etc.), or what the results of a specific note were (e.g., the container images that resulted from a build). For example, an occurrence might report that the heartbleed OpenSSL bug (a possible Note) was found in a specific package of a container image, and include information about how to remedy the heartbleed bug based on the customer’s package. + +An occurrence name should take the format `/projects//occurrences/` where the project_id would typically be different from the project where the note is created and the occurrence_id would be unique per occurrence-project, and would often be random. + +Write access to occurrences should only be granted to users who have access to link a note to the occurrence. Any users can have read access to occurrences. + +## Kind Specific Schemas +In order to properly aggregate over metadata stored in Grafeas, each kind of information stored has a strict schema. These schemas allow normalization of data from multiple providers, giving users the ability to see meaningful insights in their components over time. Defined below are the currently supported kinds, and a brief summary of what the notes and occurrences for each of them will contain. +Specifying a kind in our notes and occurrences makes Grafeas extensible. As new metadata types need support, new kinds can be added, each with their own schema. + +>TODO:Document the process for adding a new kind to the spec and generating the model, documents, and client libraries to include that kind. #38 + + +|Kind |Note Summary |Occurrence Summary | +|---------------------|-------------------------------------------------------------------------|-------------------------------------------------| +|PACKAGE_VULNERABILITY|CVE or vulnerability description and details including severity, versions|Affected packages/versions in a specific resource| +|BUILD_DETAILS |Builder version and signature |Details of this specific build including inputs and outputs| +|IMAGE_BASIS |Base Image for a container |An image that uses the base image, and layers included on top of base image| +|PACKAGE_MANAGER |Package Descriptions |Filesystem locations of where the package is installed in a specific resource| +|DEPLOYMENT_HISTORY |A resource that can be deployed |Details of each deployment of the resource| +|ATTESTATION |A logical attestation "role" or "authority", used as an anchor for attestations|An attestation by an authority for a specific property and resource| + + + +## Examples +A vulnerability scanning provider would create a note under their project with the following json for CVE-2017-14159 +``` +{ + "name": "projects/security-scanner/notes/CVE-2017-14159", + "shortDescription": "CVE-2017-14159", + "longDescription": "NIST vectors: AV:L/AC:M/Au:N/C:N/I:N", + "relatedUrl": [ + { + "url": "https://security-tracker.debian.org/tracker/CVE-2017-14159", + "label": "More Info" + }, + { + "url": "http://people.ubuntu.com/~ubuntu-security/cve/CVE-2017-14159", + "label": "More Info" + } + ], + "kind": "PACKAGE_VULNERABILITY", + "createTime": "2017-09-05T21:44:52.071982Z", + "updateTime": "2017-09-29T16:16:01.140652Z", + "vulnerabilityType": { + "cvssScore": 1.9, + "severity": "LOW", + "details": [ + { + "cpeUri": "cpe:/o:debian:debian_linux:7", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:7", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + }, + { + "cpeUri": "cpe:/o:debian:debian_linux:unstable", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:unstable", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + }, + { + "cpeUri": "cpe:/o:debian:debian_linux:9", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:9", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + }, + { + "cpeUri": "cpe:/o:debian:debian_linux:8", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:8", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + }, + { + "cpeUri": "cpe:/o:canonical:ubuntu_linux:14.04", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:canonical:ubuntu_linux:14.04", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + }, + { + "cpeUri": "cpe:/o:canonical:ubuntu_linux:16.04", + "severityName": "LOW", + "fixedLocation": { + "cpeUri": "cpe:/o:canonical:ubuntu_linux:16.04", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "minAffectedVersion": { + "kind": "MINIMUM" + }, + "package": "openldap", + "description": "slapd in OpenLDAP 2.4.45 and earlier creates a PID file after dropping privileges to a non-root account, which might allow local users to kill arbitrary processes by leveraging access to this non-root account for PID file modification before a root script executes a \"kill `cat /pathname`\" command, as demonstrated by openldap-initscript." + } + ] + } +} +``` + +On scanning and coming across this vulnerability, a security scanning provider would create the following in their customer’s project: + +``` +{ + "name": "projects/scanning-customer/occurrences/randomId1234", + "resourceUrl": "https://gcr.io/scanning-customer/dockerimage@sha256:hash", + "noteName": "projects/security-scanner/notes/CVE-2017-14159", + "kind": "PACKAGE_VULNERABILITY", + "createTime": "2017-09-29T02:58:23.376798Z", + "updateTime": "2017-09-29T07:35:22.141762Z", + "vulnerabilityDetails": { + "severity": "LOW", + "cvssScore": 1.9, + "packageIssue": [ + { + "affectedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:8", + "package": "openldap", + "version": { + "name": "2.4.40+dfsg", + "revision": "1+deb8u2" + } + }, + "fixedLocation": { + "cpeUri": "cpe:/o:debian:debian_linux:8", + "package": "openldap", + "version": { + "kind": "MAXIMUM" + } + }, + "severityName": "LOW" + } + ] + } +} + +``` + +## Resource Urls +Component resource Urls need to be unique per resource as well as immutable. This will mean that the metadata associated with a resourceUrl will always be associated with exactly one component, and what is pointed at should never change. Content addressable resource urls are preferred. In the case with resources that cannot be immutable, a timestamp should be appended. + +The following table provides examples one could use as resource urls for several component types: + +Component Type|Identifier |Example| +--------------|--------------------------------------------|-------| +|Debian |deb://dist(optional):arch:name:version |deb://lucid:i386:acl:2.2.49-2| +|Docker |https://Namespace/name@sha256: |https://gcr.io/scanning-customer/dockerimage@sha256:244fd47e07d1004f0aed9c156aa09083c82bf8944eceb67c946ff7430510a77b| +|Generic file |file://sha256::name |file://sha256:244fd47e07d1004f0aed9c156aa09083c82bf8944eceb67c946ff7430510a77b:foo.jar| +|Maven |gav://group:artifact:version |`gav://ant:ant:1.6.5`| +|NPM |npm://package:version |npm://mocha:2.4.5| +|NuGet |nuget://module:version |nuget://log4net:9.0.1| +|Python |pip://package:version |pip://raven:5.13.0| +|RPM |rpm://dist(optional):arch:name:version |rpm://el6:i386:ImageMagick:6.7.2.7-4| diff --git a/vendor/github.com/grafeas/grafeas/case-studies/binary-authorization.md b/vendor/github.com/grafeas/grafeas/case-studies/binary-authorization.md new file mode 100644 index 00000000..71538cd3 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/case-studies/binary-authorization.md @@ -0,0 +1,240 @@ +# Kritis: Deployment Authorization for Kubernetes Applications + +## Abstract + +Binary Authorization aims to provide full software supply chain security for +cloud based applications. In an initial release we enable customers to secure +their supply chain for Kubernetes applications using an attestation based +enforcement technology. + +## Introduction + +Organizations increasingly employ short software life cycle (for example +continuous delivery) and highly decoupled systems (for example microservice +architecture). In such environments it becomes difficult to ensure that all +software is released and deployed according to best practices and standards. +This is important because running, say, a wrong version of a binary, through +accident or malice, may result in downtime, loss of user data, financial loss, +or worse damage. Binary Authorization wants to address this major concern of +today’s organizations: central control and enforcement of software life cycle +process. + +Binary Authorization allows stakeholders to ensure that deployed software +artifacts have been prepared according to organization’s standards. It does so +through attestation and enforcement: a deployment is prevented unless the +artifact is conformant to central policy; and to express evidence of +conformance, teams use trusted attestations. A Binary Authorization policy then +states attestation requirements necessary for artifact deployment. Policy thus +codifies an important part of organization’s life cycle policy. + +### Scope + +Kritis is our initiative to provide an open implementation of Binary +Authorization. This paper discusses the general ideas behind Binary +Authorization, and as such is a first part of the Kritis initiative. It further +touches upon some specifics of our existing implementation of Binary +Authorization, which in its first release is focused on Google Container Engine +(GKE). + +### Software Life Cycle Within an Organization + +Each organization uses a release process tailored to specific needs and +constraints. Binary Authorization does not prescribe any process, it instead +helps codify and enforce the process that makes sense to the organization. To +see how Binary Authorization fits into a release process, consider the following +common pattern. Once a release is cut, artifacts go through the following +stages, successful completion of a stage being the prerequisite for progression +to the next one: + +* Build and unit test. +* Deploy into development environment, where users aren’t affected. + * End to end testing might occur here. +* Deploy into QA environment, where only internal users are affected. +* Deploy into canary environment, where only a fraction of external users are + affected. +* Deploy into production. + +When an artifact successfully completes a stage, an attestation on that artifact +is created which asserts success. The policy requires previous stage’s +attestations in order to allow deployment into next stage’s environment. In this +way, Binary Authorization policy increases assurance that release process is +followed. The policy may also specify that only recently attested artifacts be +allowed deployment, and so ensure freshness. + +#### Third Party Dependencies + +Organizations rarely develop software from blank slate. Most of the time there +is some reliance on third party “canned” software, e.g. sidecar container images +from public repositories. Ideally, third party dependencies should be subject to +the same scrutiny as internally developed software, however this is rarely +practical. Compromises are usually made, for example by vetting third party +container images as practically possible, and mandating the use of only tested +and vetted versions of the software. Binary Authorization supports this use +case. + +### Example + +An online merchant runs their services on Kubernetes. They have two clusters: +`production` to run production approved services, and `experimental` to run +experimental versions of services, but only 1% of the traffic is directed to the +experimental cluster. Production versions of the software must pass a stringent +test, while experimental versions, serving only a fraction of the traffic, are +subject to less strict criteria. Both experimental and production software must +pass a basic suite of tests. This organization wants to reduce the risk of +accidentally deploying experimental software to the production cluster. +Following policy realizes their requirements: + +``` +{ + “cluster_admission_requirements”: { + “cluster_name”: “prod”, + “require_attestations”: [ “tested”, “production-approved” ] + } + “cluster_admission_requirements”: { + “cluster_name”: “experimental”, + “require_attestations”: [ “tested” ] + } +} +``` + +In this organization, the release process has to be designed to respect this +policy. Production qualification process must create the `production-approved` +attestation on artifacts which have indeed passed the qualification. Continuous +testing system must create the `tested` attestation. + +## Binary Authorization Model in Google Container Engine (GKE) + +Binary Authorization for GKE (BinAuthz) is available as an Alpha release. We +overview its design, discussing some of the more interesting choices. In a +nutshell, a user sets a BinAuthz policy for a GCP project; this policy specifies +attestations required to deploy a container image into the project or into a +specific cluster (or service account in the future). Attestations are managed +through Grafeas as a dedicated Kind `ATTESTATION`. + +### Deployment Environments + +Google Cloud Platform (GCP) uses projects and service accounts as security +boundaries. In addition to that, GKE uses clusters as a security boundary (a +Kubernetes cluster can have its own secrets for example). These are some of the +deployment targets that we plan to support as subjects of BinAuthz policy +requirements: project, service account, cluster. + +### Policy Management + +Key concepts of BinAuthz are Attestation Authority and Policy, realized as REST +resources managed through a REST API. An Attestation Authority is a named entity +which has the power to create attestations. As a REST resource, it encapsulates +the location of its attestations (where to store and retrieve from), as well as +verification criteria (what makes an attestation valid). A Policy then names +Attestation Authorities (whose attestations are) required to deploy an artifact +to some target. + +#### Example + +This might be an Attestation Authority which represents an organization’s secure +build system. + +``` +{ + “name”: “projects/secure-builder/attestationAuthorities/built-securely” + “public_keys”: +} +``` + +A policy which requires securely built artifacts to deploy to the `prod` cluster +may then look like this. + +``` +{ + “cluster_admission_requirements”: { + “cluster_name”: “prod”, + “attestation_requirements”: [ “projects/secure-builder/attestationAuthorities/built-securely” ] + } +} +``` + +### Attestations via Component Metadata API / Grafeas + +Attestations are represented as Component Metadata objects. Grafeas is its open +source sister project, and can be used as attestation transport too. An +Attestation Authority names a Note (of Kind `ATTESTATION`) which is used as an +anchor for this authority’s attestations, and optionally specifies public keys +if attestations must be signed. Attestations by this authority are then +represented as Occurrences attached to the authority’s Note. + +### Enforcement Module Entry Point + +Kubernetes orchestrates the execution of containers, predominantly focusing on +Docker as the container runtime. Pod is the lowest level abstraction of a +running container. Users can create Pods directly, or users can create +Controllers (such as ReplicaSet) which then mediate Pod creation. We chose Pod +admission as the interception point. At Pod admission time, information which BinAuthz +needs is available: artifact identifier (container image URL), deploy target +(project, service account, cluster). And Pod creation is the chokepoint through +which flow all code paths to run a Docker container. Intercepting at Pod +creation has some consequences which we must tolerate. Notably, a user may +create a Controller, which then creates and manages Pods asynchronously from +user’s original request. Because BinAuthz intercepts Pod creation, but not +Container creation, we don't report a BinAuthz failure at Controller creation +time, but only later, asynchronously from user’s action. Longer-term we plan to +support intercepting Controller creation too for a better user experience +(clearer errors immediately delivered). Even then we will have to keep the Pod +enforcement, to ensure that community contributed controllers don’t accidentally +bypass enforcement. Image policy webhook is a Kubernetes admission control +webhook which allows delegating Pod admission decisions to a web service. We +implement an enforcement service and configure the webhook to point to the +service. + +### Artifact Identification via Early Tag Resolution + +A Docker container image is identified by the registry where it is stored, +repository within the registry, and either tag or digest. Tags or digests are +usually used for versioning. A digest uniquely and immutably identifies a +container image. A tag, by contrast, may be associated to any digest, and this +association may change over time. We chose to allow only digest based container +images in BinAuthz. That is, when deploying to an environment which is subject +to BinAuthz, a tag based deployment is automatically disallowed as it is +impossible to decide the actual version that will be used once the Pod is +created. Besides giving clearer BinAuthz semantics, we believe that digest based +deployments are better production hygiene and thus were favored strongly by +customers we worked with. As an exception to this rule, our policy language +allows glob-based container image whitelisting, through which even tag based +deployments may be allowed. This is useful for images that don’t go through the +same internal vetting process (e.g., various sidecar containers). Longer-term +this should be solved through exchange of attestations with the image providers. + +## Conclusion and Next Steps + +Binary Authorization enables centralized control over software release cycle. +Stakeholders configure policies to enforce the requirements of the release +process, gaining confidence that software is delivered to customers only if it +meets the organization’s requirements. Attestations - trusted metadata +associated to software artifacts - are used to assert that software meets +specified requirements. + +### Generalizing to Other Orchestration Systems + +We described our first, GKE specific, implementation of BinAuthz, however we +note that the basic principles apply to a variety of orchestration systems. We +plan to support other GCP platforms (such as App Engine and App Engine Flex) in +the future. Furthermore, an open specification of BinAuthz is forthcoming. + +### Richer Policies and Attestations + +BinAuthz attestation is like a seal stamp: its only meaning is “authority `X` +attests artifact `Y`”. A richer language of statements, such as “authority `X` +attests that artifact `Y` was built from source code `Z`” allows for more +expressive policies and more meaningful control. We plan to extend the data +model and policy language to support such richer statements. + +### Toolchain Integration + +In its current form, BinAuthz requires custom integration into organizations’ +workflows. We will reduce the integration cost by working with partners to +support BinAuthz in their products. For example, CI/CD providers are of +particular interest. A CI/CD pipeline may at different stages produce various +attestations for the artifacts which it creates, and BinAuthz policy then +enforce that proper process was followed. Source control system is another +integration point, especially if Binary Authorization is to be based on source +provenance of artifacts. To strengthen such guarantees, a trusted build system +may be needed. diff --git a/vendor/github.com/grafeas/grafeas/code-of-conduct.md b/vendor/github.com/grafeas/grafeas/code-of-conduct.md new file mode 100644 index 00000000..77f81f78 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/code-of-conduct.md @@ -0,0 +1,29 @@ +## Grafeas Code of Conduct + +At Google, we recognize and celebrate the creativity and collaboration of open source contributors and the diversity of skills, experiences, cultures, and opinions they bring to the projects and communities they participate in. + +Every one of Google's open source projects and communities are inclusive environments, based on treating all individuals respectfully, regardless of gender identity and expression, sexual orientation, disabilities, neurodiversity, physical appearance, body size, ethnicity, nationality, race, age, religion, or other protected category. + +We value diverse opinions, but we value respectful behavior more. + +Respectful behavior includes: +* Being considerate, kind, constructive, and helpful. +* Not engaging in demeaning, discriminatory, harassing, hateful, sexualized, or physically threatening behavior, speech, and imagery. +* Not engaging in unwanted physical contact. + +Some Google open source projects may adopt their own specific codes of conduct, which may have additional detailed expectations for participants. + +### Resolve Peacefully + +We do not believe that all conflict is bad; healthy debate and disagreement often yield positive results. However, it is never okay to be disrespectful or to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address the behavior directly with those involved. Many issues can be resolved quickly and easily, and this gives people more control over the outcome of their dispute. If you are unable to resolve the matter for any reason, or if the behavior is threatening or harassing, report it. We are dedicated to providing an environment where participants feel welcome and safe. + +Reports should be directed to wmd@google.com, the Project Steward for Grafeas. It is the Project Steward’s duty to receive and address reported violations of the code of conduct. The steward will then work with a committee consisting of representatives from the Open Source Programs Office and the Google Open Source Strategy team. + +We will investigate every complaint, but you may not receive a direct response. We will use our discretion in determining when and how to follow up on reported incidents, which may range from not taking action to permanent expulsion from the project and project-sponsored spaces. We will notify the accused of the report and provide them an opportunity to discuss it before any action is taken. In potentially harmful situations, such as ongoing harassment or threats to anyone's safety, we may take action without notice. We will do our utmost to keep the identity of the reporter anonymous. + + +This enforcement policy has been adopted from [IndieWeb Code of Conduct](https://indieweb.org/code-of-conduct). + + diff --git a/vendor/github.com/grafeas/grafeas/config.yaml.sample b/vendor/github.com/grafeas/grafeas/config.yaml.sample new file mode 100644 index 00000000..1fbd7f2c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/config.yaml.sample @@ -0,0 +1,22 @@ +# Copyright 2017 The Grafeas Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +grafeas: + server: + # Endpoint address, e.g. localhost:10000 + address: localhost:10000 + # PKI configuration (optional) + cafile: + keyfile: + certfile: diff --git a/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md b/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md new file mode 100644 index 00000000..f4a05c70 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md @@ -0,0 +1,159 @@ +# Running Grafeas + +## Start Grafeas + +To start the server go to `samples/server/go-server/api/server/main` and execute + + go run main.go + +This will start the Grafeas gRPC and REST API:s on `localhost:10000`. To start grafeas with a custom configuration use the `-config` flag (e.g. `-config config.yaml`). The root directory includes a `config.yaml.sample` that can be used as a starting point when creating your own config file. + +### Access REST API with curl + +Grafeas provides both a REST API and a gRPC API. Here is an example of using the REST API to list projects in Grafeas. + +`curl http://localhost:10000/v1alpha1/projects` + +### Access gRPC API with a go client + +Below is a small example of a go client that connects to grafeas and outputs any notes in `myproject` + +``` +package main + +import ( + "context" + "log" + + pb "github.com/grafeas/grafeas/v1alpha1/proto" + "google.golang.org/grpc" +) + +func main() { + conn, err := grpc.Dial("localhost:10000", grpc.WithInsecure()) + defer conn.Close() + client := pb.NewGrafeasClient(conn) + // List notes + resp, err := client.ListNotes(context.Background(), + &pb.ListNotesRequest{ + Parent: "projects/myproject", + }) + if err != nil { + log.Fatal(err) + } + + if len(resp.Notes) != 0 { + log.Println(resp.Notes) + } else { + log.Println("Project does not contain any notes") + } +} +``` + +## Use Grafeas with self-signed certificate + +### Generate CA, keys and certs + +_NOTE: The steps described in this section is meant for development environments._ + +``` +# Create CA +openssl genrsa -out ca.key 2048 +# make sure to set Common Name to your domain, e.g. localhost (without port) +openssl req -new -x509 -days 365 -key ca.key -out ca.crt + +# Create the Client Key and CSR +openssl genrsa -out client.key 2048 +openssl req -new -key client.key -out client.csr + +# Create self-signed client cert +openssl x509 -req -days 365 -in client.csr -CA ca.crt -CAkey ca.key -set_serial 01 -out client.crt + +# Convert Client Key to PKCS +openssl pkcs12 -export -clcerts -in client.crt -inkey client.key -out client.p12 + +# Convert Client Key to (combined) PEM +openssl pkcs12 -in client.p12 -out client.pem -clcerts +``` + +This is basically following https://gist.github.com/mtigas/952344 with some tweaks + +### Update config + +Add the following to your config file + + cafile: ca.crt + keyfile: ca.key + certfile: ca.crt + +### Access REST API with curl + +When using curl with a self signed certificate you need to add `-k/--insecure` and specify the client certificate. + +`curl -k --cert path/to/client.pem https://localhost:10000/v1alpha1/projects` + +### Access gRPC with a go client + +When using a go client to access Grafeas with a self signed certificate you need to specify the client certificate, client key and the CA certificate. + +``` +package main + +import ( + "crypto/tls" + "crypto/x509" + "io/ioutil" + "log" + + pb "github.com/grafeas/grafeas/v1alpha1/proto" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" +) + +var ( + certFile = "/path/to/client.crt" + keyFile = "/path/to/client.key" + caFile = "/path/to/ca.crt" +) + +func main() { + // Load client cert + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + log.Fatal(err) + } + + // Load CA cert + caCert, err := ioutil.ReadFile(caFile) + if err != nil { + log.Fatal(err) + } + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + + // Setup HTTPS client + tlsConfig := &tls.Config{ + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + } + tlsConfig.BuildNameToCertificate() + creds := credentials.NewTLS(tlsConfig) + conn, err := grpc.Dial("localhost:10000", grpc.WithTransportCredentials(creds)) + client := pb.NewGrafeasClient(conn) + + // List notes + resp, err := client.ListNotes(context.Background(), + &pb.ListNotesRequest{ + Parent: "projects/myproject", + }) + if err != nil { + log.Fatal(err) + } + + if len(resp.Notes) != 0 { + log.Println(resp.Notes) + } else { + log.Println("Project does not contain any notes") + } +} +``` \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md new file mode 100644 index 00000000..3470cf68 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md @@ -0,0 +1,21 @@ +# Grafeas API Reference Implementation + +This is a reference implementation of the [Grafeas API Spec](https://github.com/Grafeas/Grafeas/blob/master/README) + +## Overview + +This reference implementation comes with the following caveats: +* Storage: map backed in memory server storage +* No ACLs are used in this implementation +* No authorization is in place #28 +* Filtering in list methods is not currently supported #29 +* Operation names are not currently validated when tied to notes/occurrences #31 + + +### Running the server +To run the server, follow these simple steps: + +``` +go run main/main.go +``` + diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/app.yaml b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/app.yaml new file mode 100644 index 00000000..912fc81e --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/app.yaml @@ -0,0 +1 @@ +application: \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go new file mode 100644 index 00000000..8013ad5a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go @@ -0,0 +1,62 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package config + +import ( + "io/ioutil" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/server" + "gopkg.in/yaml.v2" +) + +// File is the grafeas config file. +type file struct { + Grafeas *config `yaml:"grafeas"` +} + +// Config is the global configuration for an instance of Grafeas. +type config struct { + Server *server.Config `yaml:"server"` +} + +// DefaultConfig is a configuration that can be used as a fallback value. +func defaultConfig() *config { + return &config{ + &server.Config{ + Address: "localhost:10000", + CertFile: "", + KeyFile: "", + CAFile: "", + }, + } +} + +// Creates a config from a YAML-file. If fileName is an empty +// string a default config will be returned. +func LoadConfig(fileName string) (*config, error) { + if fileName == "" { + return defaultConfig(), nil + } + data, err := ioutil.ReadFile(fileName) + if err != nil { + return nil, err + } + var configFile file + err = yaml.Unmarshal(data, &configFile) + if err != nil { + return nil, err + } + return configFile.Grafeas, nil +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go new file mode 100644 index 00000000..923f1054 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go @@ -0,0 +1,36 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "flag" + "log" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/config" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/server" +) + +var ( + configFile = flag.String("config", "", "Path to a config file") +) + +func main() { + flag.Parse() + config, err := config.LoadConfig(*configFile) + if err != nil { + log.Fatalf("Failed to load config file") + } + server.Run(config.Server) +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name.go new file mode 100644 index 00000000..e2ed3865 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name.go @@ -0,0 +1,218 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package name provides methods for manipulating resource names. +package name + +import ( + "fmt" + "strings" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// ResourceKind is the type that will be used for all public resource kinds. +type ResourceKind string + +const ( + // Position of projectID in name string + projectKeywordIndex = 1 + // Position of the resourceID in the string + resourceKeywordIndex = 3 + projectsKeyword = "projects" + occurrencesKeyword = "occurrences" + notesKeyword = "notes" + operationsKeyword = "operations" + + // Note is the ResourceKind associated with notes. + Note = ResourceKind(notesKeyword) + // Occurrence is the ResourceKind associated with occurrences. + Occurrence = ResourceKind(occurrencesKeyword) + // Operation is the ResourceKind associated with operations. + Operation = ResourceKind(operationsKeyword) + // Unknown is the ResourceKind when the kind cannot be determined. + Unknown = ResourceKind("") + + // NoCharLimit is used to signal that no resource id length validation is needed + NoCharLimit = -1 +) + +var ( + projectNameFormat = FormatProject("{project_id}") + occurrenceNameFormat = FormatOccurrence("{project_id}", "{occurrence_id}") + operationNameFormat = FormatOperation("{provider_project_id}", "{operation_id}") + noteNameFormat = FormatNote("{provider_project_id}", "{note_id}") +) + +func invalidArg(pattern, got string) error { + return status.Error(codes.InvalidArgument, fmt.Sprintf("expected name to be of form %q, input was %v", pattern, got)) +} + +// FormatProject synthesizes a stringly typed name of the form: +// projects/{project_id} +// See also: ParseProject +func FormatProject(projectID string) string { + return fmt.Sprintf("%v/%v", projectsKeyword, projectID) +} + +func OccurrenceName(pID, oID string) string { + return fmt.Sprintf("projects/%v/occurrences/%v", pID, oID) +} + +func OperationName(pID, oID string) string { + return fmt.Sprintf("projects/%v/operations/%v", pID, oID) +} + +func NoteName(pID, nID string) string { + return fmt.Sprintf("projects/%v/notes/%v", pID, nID) +} + +// FormatNoteProjectKW synthesizes a stringly typed name of the form: +// projects/{project_id}/notes/{note_id} +// See also: ParseNote +func FormatNote(projectID, noteID string) string { + return strings.Join([]string{projectsKeyword, projectID, string(Note), noteID}, "/") +} + +// FormatOccurrence synthesizes a stringly typed name of the form: +// projects/{project_id}/occurrences/{occurrence_id} +// See also: ParseOccurrence +func FormatOccurrence(projectID, occurrenceID string) string { + return strings.Join([]string{projectsKeyword, projectID, string(Occurrence), occurrenceID}, "/") +} + +// FormatOperation synthesizes a stringly typed name of the form: +// providers/{provider_id}/project/{project_id}/operations/{operation_id} +// See also: ParseOperation +func FormatOperation(projectID, operationID string) string { + return strings.Join([]string{projectsKeyword, projectID, operationsKeyword, operationID}, "/") +} + +// ParseResourceKindAndResource takes a stringly typed name of the form: +// projects/{project_id}/occurrences/{occurrence_id} +// projects/{project_name}/notes/{note_id} +// or: +// validates form and returns either an error or the ResourceKind +// (either occurrence or note) and project/resource-ids +func ParseResourceKindAndResource(name string) (ResourceKind, string, string, error) { + err := invalidArg(fmt.Sprintf("%q or %q", occurrenceNameFormat, noteNameFormat), name) + params := strings.Split(name, "/") + if len(params) != 4 { + return Unknown, "", "", err + } + switch params[projectKeywordIndex-1] { + case projectsKeyword: + switch params[resourceKeywordIndex-1] { + case string(Occurrence): + return Occurrence, params[projectKeywordIndex], params[resourceKeywordIndex], nil + case string(Note): + return Note, params[projectKeywordIndex], params[resourceKeywordIndex], nil + case string(Operation): + return Operation, params[projectKeywordIndex], params[resourceKeywordIndex], nil + } + + return Unknown, "", "", invalidArg(fmt.Sprintf("%q or %q", occurrenceNameFormat, noteNameFormat), name) + } + return Unknown, "", "", err +} + +// ParseResourceKindAndProjectFromPath retrieves a projectID and resource kind from a Grafeas URL path +// This method should be used with CreateRequests. +func ParseResourceKindAndProject(parent string) (ResourceKind, string, error) { + err := invalidArg(fmt.Sprintf("%q or %q", occurrenceNameFormat, noteNameFormat), parent) + params := strings.Split(parent, "/") + if len(params) != 3 { + return Unknown, "", err + } + + switch params[projectKeywordIndex-1] { + case projectsKeyword: + switch params[resourceKeywordIndex-1] { + case string(Occurrence): + return Occurrence, params[projectKeywordIndex], nil + case string(Note): + return Note, params[projectKeywordIndex], nil + case string(Operation): + return Operation, params[projectKeywordIndex], nil + } + + return Unknown, "", invalidArg(fmt.Sprintf("%q, %q, or %q", occurrenceNameFormat, + noteNameFormat, operationNameFormat), parent) + } + return Unknown, "", err +} + +// ParseOccurrence takes a stringly typed name of the form: +// projects/{project_id}/occurrences/{occurrence_id} +// validates its form and returns either an error or the project-/occurrence-ids. +func ParseOccurrence(name string) (string, string, error) { + return parseProjectAndEntityID(name, projectsKeyword, occurrencesKeyword, NoCharLimit) +} + +// ParseNote takes a stringly typed name of the forms: +// providers/{provider_name}/notes/{note_id} +// providers/{provider_name}/notes/{note_id} +// validates its form and returns either an error or the provider-/note-ids. +func ParseNote(name string) (string, string, error) { + return parseProjectAndEntityID(name, projectsKeyword, notesKeyword, 100) +} + +// ParseOperation takes a stringly typed name of the form: +// projects/{project_id}/operations/{operation_id} +// validates its form and returns either an error or the project-/operation-ids +func ParseOperation(name string) (string, string, error) { + return parseProjectAndEntityID(name, projectsKeyword, operationsKeyword, 100) +} + +// parseProjectAndEntityID takes resource and project keywords, a max resource id length and a stringly typed name of the form: +// projects/{project_id}//{entity_id} +// validates its form and returns either an error or the project and resource ids. Only validates maxResourceIDLength if it is greater than 0 +func parseProjectAndEntityID(name, projectKeyword, resourceKeyword string, maxResourceIDLength int) (string, string, error) { + format := fmt.Sprintf("%s/{project_id}/%s/{entity_id}", projectKeyword, resourceKeyword) + params := strings.Split(name, "/") + if len(params) != 4 { + return "", "", invalidArg(format, name) + } + if params[projectKeywordIndex-1] != projectKeyword { + return "", "", invalidArg(format, name) + } + if params[resourceKeywordIndex-1] != resourceKeyword { + return "", "", invalidArg(format, name) + } + if params[projectKeywordIndex] == "" || params[resourceKeywordIndex] == "" { + return "", "", invalidArg(format, name) + } + if maxResourceIDLength > 0 && len(params[resourceKeywordIndex]) > maxResourceIDLength { + return "", "", status.Error(codes.InvalidArgument, fmt.Sprintf("resource id must be <= %v characters. Input was %v", maxResourceIDLength, name)) + } + return params[projectKeywordIndex], params[resourceKeywordIndex], nil +} + +// ParseProject takes a stringly typed name of the form: +// projects/{project_id} +// validates its form and returns either an error or the project-id. +func ParseProject(name string) (string, error) { + params := strings.Split(name, "/") + if len(params) != 2 { + return "", invalidArg(projectNameFormat, name) + } + if params[projectKeywordIndex-1] != projectsKeyword { + return "", invalidArg(projectNameFormat, name) + } + if params[projectKeywordIndex] == "" { + return "", invalidArg(projectNameFormat, name) + } + return params[projectKeywordIndex], nil +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name_test.go new file mode 100644 index 00000000..eebcd95d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/name/name_test.go @@ -0,0 +1,255 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package name + +import ( + "strings" + "testing" +) + +func TestNameRoundtrips(t *testing.T) { + tests := []struct { + part1 string + part2 string + }{ + {"a", "b"}, + {"foo", "foo"}, + {"blah-foo", "baz-inga"}, + } + + // Test two-part names + for _, test := range tests { + + on := FormatOccurrence(test.part1, test.part2) + if p1, p2, err := ParseOccurrence(on); err != nil { + t.Errorf("ParseOccurrence %v; want (%v, %v), got error %v", + on, test.part1, test.part2, err) + } else if p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseOccurrence %v; want (%v, %v), got (%v, %v)", + on, test.part1, test.part2, p1, p2) + } + if rt, p1, p2, err := ParseResourceKindAndResource(on); err != nil { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got error %v", + on, Occurrence, test.part1, test.part2, err) + } else if rt != Occurrence || p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got (%v, %v, %v)", + on, Occurrence, test.part1, test.part2, rt, p1, p2) + } + + if rt, p1, p2, err := ParseResourceKindAndResource(on); err != nil { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got error %v", + on, Occurrence, test.part1, test.part2, err) + } else if rt != Occurrence || p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got (%v, %v, %v)", + on, Occurrence, test.part1, test.part2, rt, p1, p2) + } + + nn := FormatNote(test.part1, test.part2) + if p1, p2, err := ParseNote(nn); err != nil { + t.Errorf("ParseNote %v; want (%v, %v), got error %v", + nn, test.part1, test.part2, err) + } else if p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseNote %v; want (%v, %v), got (%v, %v)", + nn, test.part1, test.part2, p1, p2) + } + if rt, p1, p2, err := ParseResourceKindAndResource(nn); err != nil { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got error %v", + on, Note, test.part1, test.part2, err) + } else if rt != Note || p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseResourceKindAndResource %v; want (%v, %v, %v), got (%v, %v, %v)", + on, Note, test.part1, test.part2, rt, p1, p2) + } + + opn := FormatOperation(test.part1, test.part2) + if p1, p2, err := ParseOperation(opn); err != nil { + t.Errorf("ParseOperation %v; got error %v, want (%v, %v)", + opn, err, test.part1, test.part2) + } else if p1 != test.part1 || p2 != test.part2 { + t.Errorf("ParseOperation %v; got (%v, %v), want (%v, %v)", + opn, p1, p2, test.part1, test.part2) + } + } + + // Test one-part names + for _, test := range tests { + fn := FormatProject(test.part1) + if p1, err := ParseProject(fn); err != nil { + t.Errorf("ParseProject %v; want %v, got error %v", + fn, test.part1, err) + } else if p1 != test.part1 { + t.Errorf("ParseProject %v; want %v, got %v", + fn, test.part1, p1) + } + + } +} + +func TestParseNoteValidation(t *testing.T) { + badNoteNames := []string{ + // Bad keyword + "providers/foo/findings/bar", + // Too few parts + "providers/foo/notes", + // Too many parts + "providers/foo/notes/bar/baz", + // Empty part + "providers//notes/bar", + "providers/foo/notes/", + // Too long + "providers/foo/occurrences/" + strings.Repeat("a", 101), + } + + for _, test := range badNoteNames { + if p1, p2, err := ParseNote(test); err == nil { + t.Errorf("ParseNote %v; wanted error, got (%v, %v)", + test, p1, p2) + } + } +} + +func TestParseOccurrenceValidation(t *testing.T) { + badOccurrenceNames := []string{ + // Bad keyword + "providers/foo/occurrences/bar", + // Bad keyword + "projects/foo/results/bar", + // Too few parts + "projects/foo/occurrences", + // Too many parts + "projects/foo/occurrences/bar/baz", + // Empty part + "projects//occurrences/bar", + "projects/foo/occurrences/", + } + + for _, test := range badOccurrenceNames { + if p1, p2, err := ParseOccurrence(test); err == nil { + t.Errorf("ParseOccurrence %v; wanted error, got (%v, %v)", + test, p1, p2) + } + } +} + +func TestParseResourceKindAndResource(t *testing.T) { + badResourceNames := []string{ + "providers/foo/findings/bar", + "providers/foo/occurrences/bar", + "foo/foo/bar/bar", + "projects/foo/results/bar", + "projects/foo/results", + "projects/foo/notes", + "projects/foo", + "providers/foo/results", + "providers/foo/notes", + "providers/foo", + "projects/foo/findings", + "projects/foo", + "projects/foo/occurrences", + } + for _, test := range badResourceNames { + if t1, p, r, err := ParseResourceKindAndResource(test); err == nil { + t.Errorf("ParseResourceTypeAndResource %v; wanted error, got (%v, %v, %v)", + test, t1, p, r) + } + } +} + +func TestParseOperations(t *testing.T) { + badResourceNames := []string{ + "providers/foo/operations/bar", + "providers/foo/operations/bar", + "foo/foo/bar/bar", + "projects/foo/providers/bar", + "providers/foo/projects/bar", + "projects/foo/providers/bar/operations/baz", + "operations/foo", + "providers/-/projects/-/operations/abc", + "providers//projects//operations/", + "providers/foo/projects/bar/operations/" + strings.Repeat("a", 101), + } + for _, test := range badResourceNames { + if t1, r, err := ParseOperation(test); err == nil { + t.Errorf("ParseOperation %v; got (%v, %v), wanted error", + test, t1, r) + } + } +} + +func TestOccurrenceErrorMessage(t *testing.T) { + want := "projects/{project_id}/occurrences/{entity_id}" + if _, _, err := ParseOccurrence("providers/foo/notes/bar"); !strings.Contains(err.Error(), "projects/{project_id}/occurrences/{entity_id}") { + t.Fatalf("bad error msg, got %q want it to contain %q", err, want) + } +} + +func TestParseResourceKindAndProjectPath(t *testing.T) { + badResourcePaths := []string{ + "providers/foo/operations/bar", + "providers/foo/operations/bar", + "foo/foo/bar/bar", + "projects/foo/providers/bar", + "providers/foo/projects/bar", + "projects/foo/providers/bar/operations/baz", + "projects/foo/operations/bar", + "projects/foo/occurrences/bar", + "projects/foo/notes/bar", + "operations/foo", + "providers/-/projects/-/operations/abc", + "providers//projects//operations/", + "providers/foo/projects/bar/operations/" + strings.Repeat("a", 101), + } + for _, test := range badResourcePaths { + if t1, r, err := ParseResourceKindAndProject(test); err == nil { + t.Errorf("ParseResourceKindAndProject %v; got (%v, %v), wanted error", + test, t1, r) + } + } + + goodResourcePaths := []string{ + "projects/foo/occurrences", + "projects/foo/operations", + "projects/foo/notes", + } + for _, test := range goodResourcePaths { + if t1, r, err := ParseResourceKindAndProject(test); err != nil { + t.Errorf("ParseResourceKindAndProject %v; got (%v, %v, %v), wanted success", + test, t1, r, err) + } else if r != "foo" { + t.Errorf("ParseResourceKindAndProject %v; got %v, wanted foo", test, t1) + } + } +} + +func TestParseProjectValidation(t *testing.T) { + badProjectNames := []string{ + // Bad keyword + "providers/foo/", + // Trailing slash + "projects/foo/", + // Last part non-empty + "projects/foo/baz", + // Too many parts + "projects/foo/baz/asdf", + // Empty part + "projects//", + } + + for _, test := range badProjectNames { + if p1, err := ParseProject(test); err == nil { + t.Errorf("ParseProject %v; wanted error, got %v", + test, p1) + } + } +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go new file mode 100644 index 00000000..9ed4b23b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go @@ -0,0 +1,207 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "crypto/tls" + "crypto/x509" + "io/ioutil" + "log" + "net" + "net/http" + "strings" + + "github.com/cockroachdb/cmux" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/storage" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" +) + +type Config struct { + Address string `yaml:"address"` // Endpoint address, e.g. localhost:10000 + CertFile string `yaml:"certfile"` // A PEM eoncoded certificate file + KeyFile string `yaml:"keyfile"` // A PEM encoded private key file + CAFile string `yaml:"cafile"` // A PEM eoncoded CA's certificate file +} + +// Run initializes grpc and grpc gateway api services on the same address +func Run(config *Config) { + l, err := net.Listen("tcp", config.Address) + if err != nil { + log.Fatalln("could not listen to address", config.Address) + } + log.Println("starting grpc server") + + var ( + apiHandler http.Handler + apiListener net.Listener + srv *http.Server + ctx = context.Background() + httpMux = http.NewServeMux() + tcpMux = cmux.New(l) + ) + + tlsConfig, err := tlsClientConfig(config.CertFile) + if err != nil { + log.Fatal("Failed to create tls config", err) + } + + if tlsConfig != nil { + cert, err := tls.LoadX509KeyPair(config.CertFile, config.KeyFile) + if err != nil { + log.Fatalln("Failed to load certificate files", err) + } + tlsConfig.Certificates = []tls.Certificate{cert} + tlsConfig.NextProtos = []string{"h2"} + + apiListener = tls.NewListener(tcpMux.Match(cmux.Any()), tlsConfig) + go func() { handleShutdown(tcpMux.Serve()) }() + + grpcServer := newGrpcServer(tlsConfig) + gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), tlsConfig) + + httpMux.Handle("/", gwmux) + apiHandler = grpcHandlerFunc(grpcServer, httpMux) + + log.Println("grpc server is configured with client certificate authentication") + } else { + grpcL := tcpMux.Match(cmux.HTTP2HeaderField("content-type", "application/grpc")) + apiListener = tcpMux.Match(cmux.Any()) + go func() { handleShutdown(tcpMux.Serve()) }() + + grpcServer := newGrpcServer(nil) + go func() { handleShutdown(grpcServer.Serve(grpcL)) }() + + gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), nil) + + httpMux.Handle("/", gwmux) + apiHandler = httpMux + + log.Println("grpc server is configured without client certificate authentication") + } + + srv = &http.Server{ + Handler: apiHandler, + TLSConfig: tlsConfig, + } + + // blocking call + handleShutdown(srv.Serve(apiListener)) + log.Println("Grpc API stopped") +} + +// handleShutdown handles the server shut down error. +func handleShutdown(err error) { + if err != nil { + if opErr, ok := err.(*net.OpError); !ok || (ok && opErr.Op != "accept") { + log.Fatal(err) + } + } +} + +func newGrpcServer(tlsConfig *tls.Config) *grpc.Server { + grpcOpts := []grpc.ServerOption{} + + if tlsConfig != nil { + grpcOpts = append(grpcOpts, grpc.Creds(credentials.NewTLS(tlsConfig))) + } + + grpcServer := grpc.NewServer(grpcOpts...) + g := v1alpha1.Grafeas{S: storage.NewMemStore()} + pb.RegisterGrafeasServer(grpcServer, &g) + pb.RegisterGrafeasProjectsServer(grpcServer, &g) + opspb.RegisterOperationsServer(grpcServer, &g) + + return grpcServer +} + +func newGrpcGatewayServer(ctx context.Context, listenerAddr string, tlsConfig *tls.Config) http.Handler { + var ( + gwTLSConfig *tls.Config + gwOpts []grpc.DialOption + ) + + if tlsConfig != nil { + gwTLSConfig = tlsConfig.Clone() + gwTLSConfig.InsecureSkipVerify = true + gwOpts = append(gwOpts, grpc.WithTransportCredentials(credentials.NewTLS(gwTLSConfig))) + } else { + gwOpts = append(gwOpts, grpc.WithInsecure()) + } + + // changes json serializer to include empty fields with default values + jsonOpt := runtime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.JSONPb{EmitDefaults: true}) + gwmux := runtime.NewServeMux(jsonOpt) + + conn, err := grpc.DialContext(ctx, listenerAddr, gwOpts...) + if err != nil { + log.Fatal("could not initialize grpc gateway connection") + } + err = pb.RegisterGrafeasHandler(ctx, gwmux, conn) + if err != nil { + log.Fatal("could not initialize ancestry grpc gateway") + } + + err = pb.RegisterGrafeasProjectsHandler(ctx, gwmux, conn) + if err != nil { + log.Fatal("could not initialize notification grpc gateway") + } + + return http.Handler(gwmux) +} + +// grpcHandlerFunc returns an http.Handler that delegates to grpcServer on incoming gRPC +// connections or otherHandler otherwise. Copied from cockroachdb. +func grpcHandlerFunc(grpcServer *grpc.Server, otherHandler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.ProtoMajor == 2 && strings.Contains(r.Header.Get("Content-Type"), "application/grpc") { + grpcServer.ServeHTTP(w, r) + } else { + otherHandler.ServeHTTP(w, r) + } + }) +} + +// tlsClientConfig initializes a *tls.Config using the given CA. The resulting +// *tls.Config is meant to be used to configure an HTTP server to do client +// certificate authentication. +// +// If no CA is given, a nil *tls.Config is returned; no client certificate will +// be required and verified. In other words, authentication will be disabled. +func tlsClientConfig(caPath string) (*tls.Config, error) { + if caPath == "" { + return nil, nil + } + + caCert, err := ioutil.ReadFile(caPath) + if err != nil { + return nil, err + } + + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + + tlsConfig := &tls.Config{ + ClientCAs: caCertPool, + ClientAuth: tls.RequireAndVerifyClientCert, + } + + return tlsConfig, nil +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go new file mode 100644 index 00000000..ed6db860 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go @@ -0,0 +1,307 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + "strings" + "sync" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + "github.com/grafeas/grafeas/server-go" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// memStore is an in-memory storage solution for Grafeas +type memStore struct { + sync.RWMutex + occurrencesByID map[string]*pb.Occurrence + notesByID map[string]*pb.Note + opsByID map[string]*opspb.Operation + projects map[string]bool +} + +// NewMemStore creates a memStore with all maps initialized. +func NewMemStore() server.Storager { + return &memStore{ + occurrencesByID: map[string]*pb.Occurrence{}, + notesByID: map[string]*pb.Note{}, + opsByID: map[string]*opspb.Operation{}, + projects: map[string]bool{}, + } +} + +// CreateProject adds the specified project to the mem store +func (m *memStore) CreateProject(pID string) error { + m.Lock() + defer m.Unlock() + if _, ok := m.projects[pID]; ok { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Project with name %q already exists", pID)) + } + m.projects[pID] = true + return nil +} + +// DeleteProject deletes the project with the given pID from the mem store +func (m *memStore) DeleteProject(pID string) error { + m.Lock() + defer m.Unlock() + if _, ok := m.projects[pID]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Project with name %q does not Exist", pID)) + } + delete(m.projects, pID) + return nil +} + +// GetProject returns the project with the given pID from the mem store +func (m *memStore) GetProject(pID string) (*pb.Project, error) { + m.RLock() + defer m.RUnlock() + if _, ok := m.projects[pID]; !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Project with name %q does not Exist", pID)) + } + return &pb.Project{Name: name.FormatProject(pID)}, nil +} + +// ListProjects returns the project id for all projects from the mem store +func (m *memStore) ListProjects(filters string) []*pb.Project { + m.RLock() + defer m.RUnlock() + projects := make([]*pb.Project, len(m.projects)) + i := 0 + for k := range m.projects { + projects[i] = &pb.Project{Name: name.FormatProject(k)} + i++ + } + return projects +} + +// CreateOccurrence adds the specified occurrence to the mem store +func (m *memStore) CreateOccurrence(o *pb.Occurrence) error { + m.Lock() + defer m.Unlock() + if _, ok := m.occurrencesByID[o.Name]; ok { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Occurrence with name %q already exists", o.Name)) + } + m.occurrencesByID[o.Name] = o + return nil +} + +// DeleteOccurrence deletes the occurrence with the given pID and oID from the memStore +func (m *memStore) DeleteOccurrence(pID, oID string) error { + oName := name.OccurrenceName(pID, oID) + m.Lock() + defer m.Unlock() + if _, ok := m.occurrencesByID[oName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Occurrence with oName %q does not Exist", oName)) + } + delete(m.occurrencesByID, oName) + return nil +} + +// UpdateOccurrence updates the existing occurrence with the given projectID and occurrenceID +func (m *memStore) UpdateOccurrence(pID, oID string, o *pb.Occurrence) error { + oName := name.OccurrenceName(pID, oID) + m.Lock() + defer m.Unlock() + if _, ok := m.occurrencesByID[oName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Occurrence with oName %q does not Exist", oName)) + } + m.occurrencesByID[oName] = o + return nil +} + +// GetOccurrence returns the occurrence with pID and oID +func (m *memStore) GetOccurrence(pID, oID string) (*pb.Occurrence, error) { + oName := name.OccurrenceName(pID, oID) + m.RLock() + defer m.RUnlock() + o, ok := m.occurrencesByID[oName] + if !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Occurrence with name %q does not Exist", oName)) + } + return o, nil +} + +// ListOccurrences returns the occurrences for this project ID (pID) +func (m *memStore) ListOccurrences(pID, filters string) []*pb.Occurrence { + os := []*pb.Occurrence{} + m.RLock() + defer m.RUnlock() + for _, o := range m.occurrencesByID { + if strings.HasPrefix(o.Name, fmt.Sprintf("projects/%v", pID)) { + os = append(os, o) + } + } + return os +} + +// CreateNote adds the specified note to the mem store +func (m *memStore) CreateNote(n *pb.Note) error { + m.Lock() + defer m.Unlock() + if _, ok := m.notesByID[n.Name]; ok { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Note with name %q already exists", n.Name)) + } + m.notesByID[n.Name] = n + return nil +} + +// DeleteNote deletes the note with the given pID and nID from the memStore +func (m *memStore) DeleteNote(pID, nID string) error { + nName := name.NoteName(pID, nID) + m.Lock() + defer m.Unlock() + if _, ok := m.notesByID[nName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Note with name %q does not Exist", nName)) + } + delete(m.notesByID, nName) + return nil +} + +// UpdateNote updates the existing note with the given pID and nID +func (m *memStore) UpdateNote(pID, nID string, n *pb.Note) error { + nName := name.NoteName(pID, nID) + m.Lock() + defer m.Unlock() + if _, ok := m.notesByID[nName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Note with name %q does not Exist", nName)) + } + m.notesByID[nName] = n + return nil +} + +// GetNote returns the note with pID and nID +func (m *memStore) GetNote(pID, nID string) (*pb.Note, error) { + nName := name.NoteName(pID, nID) + m.RLock() + defer m.RUnlock() + n, ok := m.notesByID[nName] + if !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Note with name %q does not Exist", nName)) + } + return n, nil +} + +// GetNoteByOccurrence returns the note attached to occurrence with pID and oID +func (m *memStore) GetNoteByOccurrence(pID, oID string) (*pb.Note, error) { + oName := name.OccurrenceName(pID, oID) + m.RLock() + defer m.RUnlock() + o, ok := m.occurrencesByID[oName] + if !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Occurrence with name %q does not Exist", oName)) + } + n, ok := m.notesByID[o.NoteName] + if !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Note with name %q does not Exist", o.NoteName)) + } + return n, nil +} + +// ListNotes returns the notes for for this project (pID) +func (m *memStore) ListNotes(pID, filters string) []*pb.Note { + ns := []*pb.Note{} + m.RLock() + defer m.RUnlock() + for _, n := range m.notesByID { + if strings.HasPrefix(n.Name, fmt.Sprintf("projects/%v", pID)) { + ns = append(ns, n) + } + } + return ns +} + +// ListNoteOccurrences returns the occcurrences on the particular note (nID) for this project (pID) +func (m *memStore) ListNoteOccurrences(pID, nID, filters string) ([]*pb.Occurrence, error) { + // TODO: use filters + m.RLock() + defer m.RUnlock() + // Verify that note exists + if _, err := m.GetNote(pID, nID); err != nil { + return nil, err + } + nName := name.FormatNote(pID, nID) + os := []*pb.Occurrence{} + for _, o := range m.occurrencesByID { + if o.NoteName == nName { + os = append(os, o) + } + } + return os, nil +} + +// GetOperation returns the operation with pID and oID +func (m *memStore) GetOperation(pID, opID string) (*opspb.Operation, error) { + oName := name.OperationName(pID, opID) + m.RLock() + defer m.RUnlock() + o, ok := m.opsByID[oName] + if !ok { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q does not Exist", oName)) + } + return o, nil +} + +// CreateOperation adds the specified operation to the mem store +func (m *memStore) CreateOperation(o *opspb.Operation) error { + m.Lock() + defer m.Unlock() + if _, ok := m.opsByID[o.Name]; ok { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Operation with name %q already exists", o.Name)) + } + m.opsByID[o.Name] = o + return nil +} + +// DeleteOperation deletes the operation with the given pID and oID from the memStore +func (m *memStore) DeleteOperation(pID, opID string) error { + opName := name.OperationName(pID, opID) + m.Lock() + defer m.Unlock() + if _, ok := m.opsByID[opName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q does not Exist", opName)) + } + delete(m.occurrencesByID, opName) + return nil +} + +// UpdateOperation updates the existing operation with the given pID and nID +func (m *memStore) UpdateOperation(pID, opID string, op *opspb.Operation) error { + opName := name.OperationName(pID, opID) + m.Lock() + defer m.Unlock() + if _, ok := m.opsByID[opName]; !ok { + return status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q does not Exist", opName)) + } + m.opsByID[opName] = op + return nil +} + +// ListOperations returns the operations for this project (pID) +func (m *memStore) ListOperations(pID, filters string) []*opspb.Operation { + ops := []*opspb.Operation{} + m.RLock() + defer m.RUnlock() + for _, op := range m.opsByID { + if strings.HasPrefix(op.Name, fmt.Sprintf("projects/%v", pID)) { + ops = append(ops, op) + } + } + return ops +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go new file mode 100644 index 00000000..8f84262f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go @@ -0,0 +1,568 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + "sort" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/testing" + + "reflect" + "strings" + "testing" + + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func TestCreateProject(t *testing.T) { + s := NewMemStore() + p := "myproject" + if err := s.CreateProject(p); err != nil { + t.Errorf("CreateProject got %v want success", err) + } + // Try to insert the same project twice, expect failure. + if err := s.CreateProject(p); err == nil { + t.Errorf("CreateProject got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateProject got code %v want %v", s.Code(), codes.AlreadyExists) + } +} + +func TestCreateNote(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Errorf("CreateNote got %v want success", err) + } + // Try to insert the same note twice, expect failure. + if err := s.CreateNote(n); err == nil { + t.Errorf("CreateNote got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateNote got code %v want %v", s.Code(), codes.AlreadyExists) + } +} + +func TestCreateOccurrence(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + // Try to insert the same occurrence twice, expect failure. + if err := s.CreateOccurrence(o); err == nil { + t.Errorf("CreateOccurrence got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateOccurrence got code %v want %v", s.Code(), codes.AlreadyExists) + } + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and occurrenceID %v", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } +} + +func TestCreateOperation(t *testing.T) { + s := NewMemStore() + opPID := "vulnerability-scanner-a" + op := testutil.Operation(opPID) + if err := s.CreateOperation(op); err != nil { + t.Errorf("CreateOperation got %v want success", err) + } + // Try to insert the same note twice, expect failure. + if err := s.CreateOperation(op); err == nil { + t.Errorf("CreateOperation got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateOperation got code %v want %v", s.Code(), codes.AlreadyExists) + } +} +func TestDeleteProject(t *testing.T) { + s := NewMemStore() + pID := "myproject" + // Delete before the note exists + if err := s.DeleteProject(pID); err == nil { + t.Error("Deleting nonexistant note got success, want error") + } + if err := s.CreateProject(pID); err != nil { + t.Fatalf("CreateProject got %v want success", err) + } + + if err := s.DeleteProject(pID); err != nil { + t.Errorf("DeleteProject got %v, want success ", err) + } +} + +func TestDeleteOccurrence(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + // Delete before the occurrence exists + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if err := s.DeleteOccurrence(pID, oID); err == nil { + t.Error("Deleting nonexistant occurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + if err := s.DeleteOccurrence(pID, oID); err != nil { + t.Errorf("DeleteOccurrence got %v, want success ", err) + } +} + +func TestUpdateOccurrence(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and occurrenceID %v", err) + } + if err := s.UpdateOccurrence(pID, oID, o); err == nil { + t.Fatal("UpdateOccurrence got success want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } + + o2 := o + o2.GetVulnerabilityDetails().CvssScore = 1.0 + if err := s.UpdateOccurrence(pID, oID, o2); err != nil { + t.Fatalf("UpdateOccurrence got %v want success", err) + } + + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o2) { + t.Errorf("GetOccurrence got %v, want %v", got, o2) + } +} + +func TestDeleteNote(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + // Delete before the note exists + pID, oID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if err := s.DeleteNote(pID, oID); err == nil { + t.Error("Deleting nonexistant note got success, want error") + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + + if err := s.DeleteNote(pID, oID); err != nil { + t.Errorf("DeleteNote got %v, want success ", err) + } +} + +func TestUpdateNote(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing projectID and noteID %v", err) + } + if err := s.UpdateNote(pID, nID, n); err == nil { + t.Fatal("UpdateNote got success want error") + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNote got %v, want %v", got, n) + } + + n2 := n + n2.GetVulnerabilityType().CvssScore = 1.0 + if err := s.UpdateNote(pID, nID, n2); err != nil { + t.Fatalf("UpdateNote got %v want success", err) + } + + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n2) { + t.Errorf("GetNote got %v, want %v", got, n2) + } +} + +func TestGetProject(t *testing.T) { + s := NewMemStore() + pID := "myproject" + // Try to get project before it has been created, expect failure. + if _, err := s.GetProject(pID); err == nil { + t.Errorf("GetProject got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("GetProject got code %v want %v", s.Code(), codes.NotFound) + } + s.CreateProject(pID) + if p, err := s.GetProject(pID); err != nil { + t.Fatalf("GetProject got %v want success", err) + } else if p.Name != name.FormatProject(pID) { + t.Fatalf("Got %s want %s", p.Name, pID) + } +} + +func TestGetOccurrence(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if _, err := s.GetOccurrence(pID, oID); err == nil { + t.Fatal("GetOccurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v, want Success", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } +} + +func TestGetNote(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if _, err := s.GetNote(pID, nID); err == nil { + t.Fatal("GetNote got success, want error") + } + if err := s.CreateNote(n); err != nil { + t.Errorf("CreateNote got %v, want Success", err) + } + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNote got %v, want %v", got, n) + } +} + +func TestGetNoteByOccurrence(t *testing.T) { + s := NewMemStore() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if _, err := s.GetNoteByOccurrence(pID, oID); err == nil { + t.Fatal("GetNoteByOccurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v, want Success", err) + } + if got, err := s.GetNoteByOccurrence(pID, oID); err != nil { + t.Fatalf("GetNoteByOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNoteByOccurrence got %v, want %v", got, n) + } +} + +func TestGetOperation(t *testing.T) { + s := NewMemStore() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing operation %v", err) + } + if _, err := s.GetOperation(pID, oID); err == nil { + t.Fatal("GetOperation got success, want error") + } + if err := s.CreateOperation(o); err != nil { + t.Errorf("CreateOperation got %v, want Success", err) + } + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOperation got %v, want %v", got, o) + } +} + +func TestDeleteOperation(t *testing.T) { + s := NewMemStore() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + // Delete before the operation exists + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if err := s.DeleteOperation(pID, oID); err == nil { + t.Error("Deleting nonexistant operation got success, want error") + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + + if err := s.DeleteOperation(pID, oID); err != nil { + t.Errorf("DeleteOperation got %v, want success ", err) + } +} + +func TestUpdateOperation(t *testing.T) { + s := NewMemStore() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and operationID %v", err) + } + if err := s.UpdateOperation(pID, oID, o); err == nil { + t.Fatal("UpdateOperation got success want error") + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOperation got %v, want %v", got, o) + } + + o2 := o + o2.Done = true + if err := s.UpdateOperation(pID, oID, o2); err != nil { + t.Fatalf("UpdateOperation got %v want success", err) + } + + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o2) { + t.Errorf("GetOperation got %v, want %v", got, o2) + } +} + +func TestListProjects(t *testing.T) { + s := NewMemStore() + wantProjectNames := []string{} + for i := 0; i < 20; i++ { + pID := fmt.Sprint("Project", i) + if err := s.CreateProject(pID); err != nil { + t.Fatalf("CreateProject got %v want success", err) + } + wantProjectNames = append(wantProjectNames, name.FormatProject(pID)) + } + filter := "filters_are_yet_to_be_implemented" + gotProjects := s.ListProjects(filter) + if len(gotProjects) != 20 { + t.Errorf("ListProjects got %v operations, want 20", len(gotProjects)) + } + gotProjectNames := make([]string, len(gotProjects)) + for i, project := range gotProjects { + gotProjectNames[i] = project.Name + } + // Sort to handle that wantProjectNames are not guaranteed to be listed in insertion order + sort.Strings(wantProjectNames) + sort.Strings(gotProjectNames) + if !reflect.DeepEqual(gotProjectNames, wantProjectNames) { + t.Errorf("ListProjects got %v want %v", gotProjectNames, wantProjectNames) + } +} + +func TestListOperations(t *testing.T) { + s := NewMemStore() + ops := []opspb.Operation{} + findProject := "findThese" + dontFind := "dontFind" + for i := 0; i < 20; i++ { + o := testutil.Operation("") + if i < 5 { + o.Name = name.FormatOperation(findProject, string(i)) + } else { + o.Name = name.FormatOperation(dontFind, string(i)) + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + ops = append(ops, *o) + } + gotOs := s.ListOperations(findProject, "") + + if len(gotOs) != 5 { + t.Errorf("ListOperations got %v operations, want 5", len(gotOs)) + } + for _, o := range gotOs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(o.Name, want) { + t.Errorf("ListOperations got %v want prefix %v", o.Name, want) + } + } +} + +func TestListNotes(t *testing.T) { + s := NewMemStore() + ns := []*pb.Note{} + findProject := "findThese" + dontFind := "dontFind" + for i := 0; i < 20; i++ { + n := testutil.Note("") + if i < 5 { + n.Name = name.FormatNote(findProject, string(i)) + } else { + n.Name = name.FormatNote(dontFind, string(i)) + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + ns = append(ns, n) + } + gotNs := s.ListNotes(findProject, "") + if len(gotNs) != 5 { + t.Errorf("ListNotes got %v operations, want 5", len(gotNs)) + } + for _, n := range gotNs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(n.Name, want) { + t.Errorf("ListNotes got %v want %v", n.Name, want) + } + } +} + +func TestListOccurrences(t *testing.T) { + s := NewMemStore() + os := []*pb.Occurrence{} + findProject := "findThese" + dontFind := "dontFind" + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + for i := 0; i < 20; i++ { + oPID := "_" + o := testutil.Occurrence(oPID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, string(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, string(i)) + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + os = append(os, o) + } + gotOs := s.ListOccurrences(findProject, "") + if len(gotOs) != 5 { + t.Errorf("ListOccurrences got %v Occurrences, want 5", len(gotOs)) + } + for _, o := range gotOs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(o.Name, want) { + t.Errorf("ListOccurrences got %v want %v", o.Name, want) + } + } +} + +func TestListNoteOccurrences(t *testing.T) { + s := NewMemStore() + os := []*pb.Occurrence{} + findProject := "findThese" + dontFind := "dontFind" + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + for i := 0; i < 20; i++ { + oPID := "_" + o := testutil.Occurrence(oPID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, string(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, string(i)) + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + os = append(os, o) + } + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note name %v", err) + } + gotOs, err := s.ListNoteOccurrences(pID, nID, "") + if err != nil { + t.Fatalf("ListNoteOccurrences got %v want success", err) + } + if len(gotOs) != 20 { + t.Errorf("ListNoteOccurrences got %v Occurrences, want 20", len(gotOs)) + } + for _, o := range gotOs { + if o.NoteName != n.Name { + t.Errorf("ListNoteOccurrences got %v want %v", o.Name, o.NoteName) + } + } +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/testing/testobjects.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/testing/testobjects.go new file mode 100644 index 00000000..5c1f9fa8 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/testing/testobjects.go @@ -0,0 +1,182 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "fmt" + "log" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/any" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" +) + +func Occurrence(pID, noteName string) *pb.Occurrence { + return &pb.Occurrence{ + Name: fmt.Sprintf("projects/%s/occurrences/134", pID), + ResourceUrl: "gcr.io/foo/bar", + NoteName: noteName, + Kind: pb.Note_PACKAGE_VULNERABILITY, + Details: &pb.Occurrence_VulnerabilityDetails{ + VulnerabilityDetails: &pb.VulnerabilityType_VulnerabilityDetails{ + Severity: pb.VulnerabilityType_HIGH, + CvssScore: 7.5, + PackageIssue: []*pb.VulnerabilityType_PackageIssue{ + &pb.VulnerabilityType_PackageIssue{ + SeverityName: "HIGH", + AffectedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:debian:debian_linux:8", + Package: "icu", + Version: &pb.VulnerabilityType_Version{ + Name: "52.1", + Revision: "8+deb8u3", + }, + }, + FixedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:debian:debian_linux:8", + Package: "icu", + Version: &pb.VulnerabilityType_Version{ + Name: "52.1", + Revision: "8+deb8u4", + }, + }, + }, + }, + }, + }, + } +} + +func Note(pID string) *pb.Note { + return &pb.Note{ + Name: fmt.Sprintf("projects/%s/notes/CVE-1999-0710", pID), + ShortDescription: "CVE-2014-9911", + LongDescription: "NIST vectors: AV:N/AC:L/Au:N/C:P/I:P", + Kind: pb.Note_PACKAGE_VULNERABILITY, + NoteType: &pb.Note_VulnerabilityType{ + &pb.VulnerabilityType{ + CvssScore: 7.5, + Severity: pb.VulnerabilityType_HIGH, + Details: []*pb.VulnerabilityType_Detail{ + &pb.VulnerabilityType_Detail{ + CpeUri: "cpe:/o:debian:debian_linux:7", + Package: "icu", + Description: "Stack-based buffer overflow in the ures_getByKeyWithFallback function in " + + "common/uresbund.cpp in International Components for Unicode (ICU) before 54.1 for C/C++ allows " + + "remote attackers to cause a denial of service or possibly have unspecified other impact via a crafted uloc_getDisplayName call.", + MinAffectedVersion: &pb.VulnerabilityType_Version{ + Kind: pb.VulnerabilityType_Version_MINIMUM, + }, + SeverityName: "HIGH", + + FixedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:debian:debian_linux:7", + Package: "icu", + Version: &pb.VulnerabilityType_Version{ + Name: "4.8.1.1", + Revision: "12+deb7u6", + }, + }, + }, + &pb.VulnerabilityType_Detail{ + CpeUri: "cpe:/o:debian:debian_linux:8", + Package: "icu", + Description: "Stack-based buffer overflow in the ures_getByKeyWithFallback function in " + + "common/uresbund.cpp in International Components for Unicode (ICU) before 54.1 for C/C++ allows " + + "remote attackers to cause a denial of service or possibly have unspecified other impact via a crafted uloc_getDisplayName call.", + MinAffectedVersion: &pb.VulnerabilityType_Version{ + Kind: pb.VulnerabilityType_Version_MINIMUM, + }, + SeverityName: "HIGH", + + FixedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:debian:debian_linux:8", + Package: "icu", + Version: &pb.VulnerabilityType_Version{ + Name: "52.1", + Revision: "8+deb8u4", + }, + }, + }, + &pb.VulnerabilityType_Detail{ + CpeUri: "cpe:/o:debian:debian_linux:9", + Package: "icu", + Description: "Stack-based buffer overflow in the ures_getByKeyWithFallback function in " + + "common/uresbund.cpp in International Components for Unicode (ICU) before 54.1 for C/C++ allows " + + "remote attackers to cause a denial of service or possibly have unspecified other impact via a crafted uloc_getDisplayName call.", + MinAffectedVersion: &pb.VulnerabilityType_Version{ + Kind: pb.VulnerabilityType_Version_MINIMUM, + }, + SeverityName: "HIGH", + + FixedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:debian:debian_linux:9", + Package: "icu", + Version: &pb.VulnerabilityType_Version{ + Name: "55.1", + Revision: "3", + }, + }, + }, + &pb.VulnerabilityType_Detail{ + CpeUri: "cpe:/o:canonical:ubuntu_linux:14.04", + Package: "andriod", + Description: "Stack-based buffer overflow in the ures_getByKeyWithFallback function in " + + "common/uresbund.cpp in International Components for Unicode (ICU) before 54.1 for C/C++ allows " + + "remote attackers to cause a denial of service or possibly have unspecified other impact via a crafted uloc_getDisplayName call.", + MinAffectedVersion: &pb.VulnerabilityType_Version{ + Kind: pb.VulnerabilityType_Version_MINIMUM, + }, + SeverityName: "MEDIUM", + + FixedLocation: &pb.VulnerabilityType_VulnerabilityLocation{ + CpeUri: "cpe:/o:canonical:ubuntu_linux:14.04", + Package: "andriod", + Version: &pb.VulnerabilityType_Version{ + Kind: pb.VulnerabilityType_Version_MAXIMUM, + }, + }, + }, + }, + }, + }, + RelatedUrl: []*pb.Note_RelatedUrl{ + &pb.Note_RelatedUrl{ + Url: "https://security-tracker.debian.org/tracker/CVE-2014-9911", + Label: "More Info", + }, + &pb.Note_RelatedUrl{ + Url: "http://people.ubuntu.com/~ubuntu-security/cve/CVE-2014-9911", + Label: "More Info", + }, + }, + } +} + +func Operation(pID string) *opspb.Operation { + md := &pb.OperationMetadata{CreateTime: ptypes.TimestampNow()} + bytes, err := proto.Marshal(md) + if err != nil { + log.Printf("Error parsing bytes: %v", err) + return nil + } + return &opspb.Operation{ + Name: fmt.Sprintf("projects/%s/operations/foo", pID), + Metadata: &any.Any{Value: bytes}, + Done: false, + } +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go new file mode 100644 index 00000000..618b427d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go @@ -0,0 +1,373 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package v1alpha1 is an implementation of the v1alpha1 version of Grafeas. +package v1alpha1 + +import ( + "fmt" + "log" + + "github.com/golang/protobuf/ptypes/empty" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + server "github.com/grafeas/grafeas/server-go" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + "golang.org/x/net/context" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// Grafeas is an implementation of the Grafeas API, which should be called by handler methods for verification of logic +// and storage. +type Grafeas struct { + S server.Storager +} + +// CreateProject validates that a project is valid and then creates a project in the backing datastore. +func (g *Grafeas) CreateProject(ctx context.Context, req *pb.CreateProjectRequest) (*empty.Empty, error) { + pID, err := name.ParseProject(req.Name) + if err != nil { + log.Printf("Error parsing project name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + } + return &empty.Empty{}, g.S.CreateProject(pID) +} + +// CreateNote validates that a note is valid and then creates a note in the backing datastore. +func (g *Grafeas) CreateNote(ctx context.Context, req *pb.CreateNoteRequest) (*pb.Note, error) { + n := req.Note + if req == nil { + log.Print("Note must not be empty.") + return nil, status.Error(codes.InvalidArgument, "Note must not be empty") + } + if n.Name == "" { + log.Printf("Note name must not be empty: %v", n.Name) + return nil, status.Error(codes.InvalidArgument, "Note name must not be empty") + } + pID, _, err := name.ParseNote(n.Name) + if err != nil { + log.Printf("Invalid note name: %v", n.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid note name") + } + if _, err = g.S.GetProject(pID); err != nil { + log.Printf("Unable to get project %v, err: %v", pID, err) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Project %v not found", pID)) + } + + // TODO: Validate that operation exists if it is specified when get methods are implmented + return n, g.S.CreateNote(n) +} + +// CreateOccurrence validates that a note is valid and then creates an occurrence in the backing datastore. +func (g *Grafeas) CreateOccurrence(ctx context.Context, req *pb.CreateOccurrenceRequest) (*pb.Occurrence, error) { + o := req.Occurrence + if req == nil { + log.Print("Occurrence must not be empty.") + return nil, status.Error(codes.InvalidArgument, "Occurrence must not be empty") + } + if o.Name == "" { + log.Printf("Invalid occurrence name: %v", o.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid occurrence name") + } + if o.NoteName == "" { + log.Print("No note is associated with this occurrence") + } + pID, _, err := name.ParseOccurrence(o.Name) + if _, err = g.S.GetProject(pID); err != nil { + log.Printf("Unable to get project %v, err: %v", pID, err) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Project %v not found", pID)) + } + pID, nID, err := name.ParseNote(o.NoteName) + if err != nil { + log.Printf("Invalid note name: %v", o.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid note name") + } + if n, err := g.S.GetNote(pID, nID); n == nil || err != nil { + log.Printf("Unable to getnote %v, err: %v", n, err) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Note %v not found", o.NoteName)) + } + // TODO: Validate that operation exists if it is specified + return o, g.S.CreateOccurrence(o) +} + +// CreateOperation validates that a note is valid and then creates an operation note in the backing datastore. +func (g *Grafeas) CreateOperation(ctx context.Context, req *pb.CreateOperationRequest) (*opspb.Operation, error) { + o := req.Operation + if o.Name == "" { + log.Printf("Invalid operation name: %v", o.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid operation name") + } + pID, _, err := name.ParseOperation(o.Name) + if _, err = g.S.GetProject(pID); err != nil { + log.Printf("Unable to get project %v, err: %v", pID, err) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Project %v not found", pID)) + } + return o, g.S.CreateOperation(o) +} + +// DeleteProject deletes a project from the datastore. +func (g *Grafeas) DeleteProject(ctx context.Context, req *pb.DeleteProjectRequest) (*empty.Empty, error) { + pID, err := name.ParseProject(req.Name) + if err != nil { + log.Printf("Error parsing project name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + } + return &empty.Empty{}, g.S.DeleteProject(pID) +} + +// DeleteOccurrence deletes an occurrence from the datastore. +func (g *Grafeas) DeleteOccurrence(ctx context.Context, req *pb.DeleteOccurrenceRequest) (*empty.Empty, error) { + pID, oID, err := name.ParseOccurrence(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid occurrence name") + } + return &empty.Empty{}, g.S.DeleteOccurrence(pID, oID) +} + +// DeleteNote deletes a note from the datastore. +func (g *Grafeas) DeleteNote(ctx context.Context, req *pb.DeleteNoteRequest) (*empty.Empty, error) { + pID, nID, err := name.ParseNote(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid note name") + } + // TODO: Check for occurrences tied to this note, and return an error if there are any before deletion. + return &empty.Empty{}, g.S.DeleteNote(pID, nID) +} + +// DeleteOperation deletes an operation from the datastore. +func (g *Grafeas) DeleteOperation(ctx context.Context, req *opspb.DeleteOperationRequest) (*empty.Empty, error) { + pID, oID, err := name.ParseOperation(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Operation name") + } + // TODO: Check for occurrences and notes tied to this operation, and return an error if there are any before deletion. + return &empty.Empty{}, g.S.DeleteOperation(pID, oID) +} + +// GetProject gets a project from the datastore. +func (g *Grafeas) GetProject(ctx context.Context, req *pb.GetProjectRequest) (*pb.Project, error) { + pID, err := name.ParseProject(req.Name) + if err != nil { + log.Printf("Error parsing project name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + } + return g.S.GetProject(pID) +} + +// GetNote gets a note from the datastore. +func (g *Grafeas) GetNote(ctx context.Context, req *pb.GetNoteRequest) (*pb.Note, error) { + pID, nID, err := name.ParseNote(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Note name") + } + return g.S.GetNote(pID, nID) +} + +// GetOccurrence gets a occurrence from the datastore. +func (g *Grafeas) GetOccurrence(ctx context.Context, req *pb.GetOccurrenceRequest) (*pb.Occurrence, error) { + pID, oID, err := name.ParseOccurrence(req.Name) + if err != nil { + log.Printf("Could note parse name %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Could note parse name") + } + return g.S.GetOccurrence(pID, oID) +} + +// GetOperation gets a occurrence from the datastore. +func (g *Grafeas) GetOperation(ctx context.Context, req *opspb.GetOperationRequest) (*opspb.Operation, error) { + pID, oID, err := name.ParseOperation(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Operation name") + } + return g.S.GetOperation(pID, oID) +} + +// GetOccurrenceNote gets a the note for the provided occurrence from the datastore. +func (g *Grafeas) GetOccurrenceNote(ctx context.Context, req *pb.GetOccurrenceNoteRequest) (*pb.Note, error) { + pID, oID, err := name.ParseOccurrence(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid occurrence name") + } + o, gErr := g.S.GetOccurrence(pID, oID) + if gErr != nil { + return nil, gErr + } + npID, nID, err := name.ParseNote(o.NoteName) + if err != nil { + log.Printf("Invalid note name: %v", o.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Invalid note name: %v", o.NoteName)) + } + return g.S.GetNote(npID, nID) +} + +func (g *Grafeas) UpdateNote(ctx context.Context, req *pb.UpdateNoteRequest) (*pb.Note, error) { + pID, nID, err := name.ParseNote(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Note name") + } + // get existing note + existing, gErr := g.S.GetNote(pID, nID) + if gErr != nil { + return nil, err + } + // verify that name didnt change + if req.Note.Name != existing.Name { + log.Printf("Cannot change note name: %v", req.Note.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot change note name: %v", req.Note.Name)) + } + + // update note + if gErr = g.S.UpdateNote(pID, nID, req.Note); err != nil { + log.Printf("Cannot update note : %v", gErr) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot change note name: %v", req.Note.Name)) + } + return g.S.GetNote(pID, nID) +} + +func (g *Grafeas) UpdateOccurrence(ctx context.Context, req *pb.UpdateOccurrenceRequest) (*pb.Occurrence, error) { + pID, oID, err := name.ParseOccurrence(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid occurrence name") + } + // get existing Occurrence + existing, gErr := g.S.GetOccurrence(pID, oID) + if gErr != nil { + return nil, gErr + } + + // verify that name didnt change + if req.Name != existing.Name { + log.Printf("Cannot change occurrence name: %v", req.Occurrence.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot change occurrence name: %v", req.Occurrence.Name)) + } + // verify that if note name changed, it still exists + if req.Occurrence.NoteName != existing.NoteName { + npID, nID, err := name.ParseNote(req.Occurrence.NoteName) + if err != nil { + return nil, err + } + if newN, err := g.S.GetNote(npID, nID); newN == nil || err != nil { + return nil, err + } + } + + // update Occurrence + if gErr = g.S.UpdateOccurrence(pID, oID, req.Occurrence); gErr != nil { + log.Printf("Cannot update occurrence : %v", req.Occurrence.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot update Occurrences: %v", err)) + } + return g.S.GetOccurrence(pID, oID) +} + +func (g *Grafeas) UpdateOperation(ctx context.Context, req *pb.UpdateOperationRequest) (*opspb.Operation, error) { + pID, oID, err := name.ParseOperation(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Operation name") + } + // get existing operation + existing, gErr := g.S.GetOperation(pID, oID) + if gErr != nil { + return nil, gErr + } + + // verify that operation isn't marked done + if req.Operation.Done != existing.Done && existing.Done { + log.Printf("Trying to update a done operation") + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot update operation in status done: %v", req.Name)) + } + + // verify that name didnt change + if req.Operation.Name != existing.Name { + log.Printf("Cannot change operation name: %v", req.Operation.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot change operation name: %v", req.Name)) + } + + // update operation + if gErr = g.S.UpdateOperation(pID, oID, req.Operation); gErr != nil { + log.Printf("Cannot update operation : %v", req.Operation.Name) + return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("Cannot update Opreation: %v", req.Operation.Name)) + } + return g.S.GetOperation(pID, oID) +} + +// ListProjects returns the project id for all projects in the backing datastore. +func (g *Grafeas) ListProjects(ctx context.Context, req *pb.ListProjectsRequest) (*pb.ListProjectsResponse, error) { + // TODO: support filters + ns := g.S.ListProjects(req.Filter) + return &pb.ListProjectsResponse{Projects: ns}, nil +} + +func (g *Grafeas) ListOperations(ctx context.Context, req *opspb.ListOperationsRequest) (*opspb.ListOperationsResponse, error) { + pID, err := name.ParseProject(req.Name) + if err != nil { + log.Printf("Error parsing name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + } + // TODO: support filters + ops := g.S.ListOperations(pID, req.Filter) + return &opspb.ListOperationsResponse{Operations: ops}, nil +} + +func (g *Grafeas) ListNotes(ctx context.Context, req *pb.ListNotesRequest) (*pb.ListNotesResponse, error) { + pID, err := name.ParseProject(req.Parent) + if err != nil { + log.Printf("Error parsing name: %v", req.Parent) + return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + } + + // TODO: support filters + ns := g.S.ListNotes(pID, req.Filter) + return &pb.ListNotesResponse{Notes: ns}, nil + +} + +func (g *Grafeas) ListOccurrences(ctx context.Context, req *pb.ListOccurrencesRequest) (*pb.ListOccurrencesResponse, error) { + pID, err := name.ParseProject(req.Parent) + if err != nil { + log.Printf("Error parsing name: %v", req.Parent) + return nil, err + } + + // TODO: support filters - prioritizing resource url + os := g.S.ListOccurrences(pID, req.Filter) + return &pb.ListOccurrencesResponse{Occurrences: os}, nil +} + +func (g *Grafeas) ListNoteOccurrences(ctx context.Context, req *pb.ListNoteOccurrencesRequest) (*pb.ListNoteOccurrencesResponse, error) { + pID, nID, err := name.ParseNote(req.Name) + if err != nil { + log.Printf("Invalid note name: %v", req.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid note name") + } + // TODO: support filters - prioritizing resource url + os, gErr := g.S.ListNoteOccurrences(pID, nID, req.Filter) + if gErr != nil { + return nil, gErr + } + return &pb.ListNoteOccurrencesResponse{Occurrences: os}, nil +} + +func (g *Grafeas) CancelOperation(context.Context, *opspb.CancelOperationRequest) (*empty.Empty, error) { + return nil, status.Error(codes.Unimplemented, "Currently Unimplemented") +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go new file mode 100644 index 00000000..bd6d858c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go @@ -0,0 +1,659 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1alpha1 + +import ( + "fmt" + "reflect" + "testing" + + "golang.org/x/net/context" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/storage" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/testing" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func createProject(t *testing.T, pID string, ctx context.Context, g Grafeas) { + req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + if _, err := g.CreateProject(ctx, &req); err != nil { + t.Errorf("CreateProject(empty operation): got %v, want success", err) + } +} + +func TestCreateProject(t *testing.T) { + ctx := context.Background() + pID := "myproject" + g := Grafeas{storage.NewMemStore()} + req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + _, err := g.CreateProject(ctx, &req) + if err != nil { + t.Errorf("CreateProject(empty operation): got %v, want success", err) + } + _, err = g.CreateProject(ctx, &req) + if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateProject(empty operation): got %v, want AlreadyExists", err) + } +} + +func TestCreateOperation(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + op := &opspb.Operation{} + req := pb.CreateOperationRequest{Parent: "projects/opp", Operation: op} + if _, err := g.CreateOperation(ctx, &req); err == nil { + t.Error("CreateOperation(empty operation): got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.InvalidArgument { + t.Errorf("CreateOperation(empty operation): got %v, want InvalidArgument", err) + } + pID := "vulnerability-scanner-a" + op = testutil.Operation(pID) + parent := name.FormatProject(pID) + req = pb.CreateOperationRequest{Parent: parent, Operation: op} + if _, err := g.CreateOperation(ctx, &req); err == nil { + t.Error("CreateOperation: got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("CreateOperation: got %v, want NotFound)", err) + } + createProject(t, pID, ctx, g) + if _, err := g.CreateOperation(ctx, &req); error(err) != nil { + t.Errorf("CreateOperation(%v) got %#v, want success", op, err) + } +} + +func TestCreateOccurrence(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + parent := name.FormatProject(pID) + createProject(t, pID, ctx, g) + req := &pb.CreateNoteRequest{Parent: parent, Note: n} + if _, err := g.CreateNote(ctx, req); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", req, err) + } + oReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: &pb.Occurrence{}} + if _, err := g.CreateOccurrence(ctx, oReq); err == nil { + t.Error("CreateOccurrence(empty occ): got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.InvalidArgument { + t.Errorf("CreateOccurrence(empty occ): got %v, want InvalidArgument)", err) + } + pID = "occurrence-project" + o := testutil.Occurrence(pID, n.Name) + parent = name.FormatProject(pID) + oReq = &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + // Try to insert an occurrence without first creating its project, expect failure + if _, err := g.CreateOccurrence(ctx, oReq); err == nil { + t.Error("CreateOccurrence: got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("CreateOccurrence: got %v, want NotFound)", err) + } + createProject(t, pID, ctx, g) + if _, err := g.CreateOccurrence(ctx, oReq); err != nil { + t.Errorf("CreateOccurrence(%v) got %v, want success", oReq, err) + } + // Try to insert an occurrence for a note that does not exist. + o.Name = "projects/testproject/occurrences/nonote" + o.NoteName = "projects/scan-provider/notes/notthere" + oReq = &pb.CreateOccurrenceRequest{Parent: "projects/testproject", Occurrence: o} + if _, err := g.CreateOccurrence(ctx, oReq); err == nil { + t.Errorf("CreateOccurrence got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("CreateOccurrence got code %v want %v", err, codes.NotFound) + } +} + +func TestCreateNote(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + n := &pb.Note{} + req := &pb.CreateNoteRequest{Parent: "projects/foo", Note: n} + // Try to insert an empty note, expect failure + if _, err := g.CreateNote(ctx, req); err == nil { + t.Error("CreateNote(empty note): got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.InvalidArgument { + t.Errorf("CreateNote(empty note): got %v, want %v", err, codes.InvalidArgument) + } + // Try to insert an onccurrence without first creating its project, expect failure + pID := "vulnerability-scanner-a" + n = testutil.Note(pID) + parent := name.FormatProject(pID) + req = &pb.CreateNoteRequest{Parent: parent, Note: n} + if _, err := g.CreateNote(ctx, req); err == nil { + t.Error("CreateNote: got success, want error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("CreateNote: got %v, want NotFound)", err) + } + createProject(t, pID, ctx, g) + if _, err := g.CreateNote(ctx, req); err != nil { + t.Errorf("CreateNote(%v) got %v, want success", n, err) + } +} + +func TestDeleteProject(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "myproject" + req := pb.DeleteProjectRequest{Name: name.FormatProject(pID)} + if _, err := g.DeleteProject(ctx, &req); err == nil { + t.Error("DeleteProject: got success, want error") + } + createProject(t, pID, ctx, g) + if _, err := g.DeleteProject(ctx, &req); err != nil { + t.Errorf("CreateProject(empty operation): got %v, want success", err) + } +} + +func TestDeleteNote(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + req := &pb.DeleteNoteRequest{Name: n.Name} + if _, err := g.DeleteNote(ctx, req); err == nil { + t.Error("DeleteNote that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Errorf("CreateNote(%v) got %v, want success", n, err) + } + if _, err := g.DeleteNote(ctx, req); err != nil { + t.Errorf("DeleteNote got %v, want success", err) + } +} + +func TestDeleteOccurrence(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + // CreateNote so we can create an occurrence + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + pID = "occurrence-project" + o := testutil.Occurrence(pID, n.Name) + createProject(t, pID, ctx, g) + + parent = name.FormatProject(pID) + oReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, oReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", n, err) + } + dReq := &pb.DeleteOccurrenceRequest{Name: o.Name} + if _, err := g.DeleteOccurrence(ctx, dReq); err != nil { + t.Errorf("DeleteOccurrence got %v, want success", err) + } +} + +func TestDeleteOperation(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + o := testutil.Operation(pID) + createProject(t, pID, ctx, g) + req := &opspb.DeleteOperationRequest{Name: o.Name} + if _, err := g.DeleteOperation(ctx, req); err == nil { + t.Error("DeleteOperation that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + cReq := &pb.CreateOperationRequest{Parent: parent, Operation: o} + if _, err := g.CreateOperation(ctx, cReq); err != nil { + t.Fatalf("CreateOperation(%v) got %v, want success", o, err) + } + if _, err := g.DeleteOperation(ctx, req); err != nil { + t.Errorf("DeleteOperation got %v, want success", err) + } +} + +func TestGetProjects(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "myproject" + req := pb.GetProjectRequest{Name: name.FormatProject(pID)} + if _, err := g.GetProject(ctx, &req); err == nil { + t.Error("GetProject that doesn't exist got success, want err") + } + createProject(t, pID, ctx, g) + if _, err := g.GetProject(ctx, &req); err != nil { + t.Errorf("GetProject: got %v, want success", err) + } +} + +func TestGetNote(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + req := &pb.GetNoteRequest{Name: n.Name} + if _, err := g.GetNote(ctx, req); err == nil { + t.Error("GetNote that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + if got, err := g.GetNote(ctx, req); err != nil { + t.Fatalf("GetNote(%v) got %v, want success", n, err) + } else if n.Name != got.Name || !reflect.DeepEqual(n.GetVulnerabilityType(), got.GetVulnerabilityType()) { + t.Errorf("GetNote got %v, want %v", *got, n) + } +} + +func TestGetOccurrence(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + opID := "occurrence-project" + o := testutil.Occurrence(opID, n.Name) + createProject(t, opID, ctx, g) + req := &pb.GetOccurrenceRequest{Name: o.Name} + if _, err := g.GetOccurrence(ctx, req); err == nil { + t.Error("GetOccurrence that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + oParent := name.FormatProject(opID) + ocReq := &pb.CreateOccurrenceRequest{Parent: oParent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, ocReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", n, err) + } + if got, err := g.GetOccurrence(ctx, req); err != nil { + t.Fatalf("GetOccurrence(%v) got %v, want success", o, err) + } else if o.Name != got.Name || !reflect.DeepEqual(o.GetVulnerabilityDetails(), got.GetVulnerabilityDetails()) { + t.Errorf("GetOccurrence got %v, want %v", *got, o) + } +} + +func TestGetOperation(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + o := testutil.Operation(pID) + createProject(t, pID, ctx, g) + req := &opspb.GetOperationRequest{Name: o.Name} + if _, err := g.GetOperation(ctx, req); err == nil { + t.Error("GetOperation that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + cReq := &pb.CreateOperationRequest{Parent: parent, Operation: o} + if _, err := g.CreateOperation(ctx, cReq); err != nil { + t.Fatalf("CreateOperation(%v) got %v, want success", o, err) + } + if got, err := g.GetOperation(ctx, req); err != nil { + t.Fatalf("GetOperation(%v) got %v, want success", o, err) + } else if o.Name != got.Name || !reflect.DeepEqual(got, o) { + t.Errorf("GetOperation got %v, want %v", *got, o) + } +} + +func TestGetOccurrenceNote(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + opID := "occurrence-project" + o := testutil.Occurrence(opID, n.Name) + createProject(t, opID, ctx, g) + + req := &pb.GetOccurrenceNoteRequest{Name: o.Name} + if _, err := g.GetOccurrenceNote(ctx, req); err == nil { + t.Error("GetOccurrenceNote that doesn't exist got success, want err") + } + pID, _, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing occurrence name %v", err) + } + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + parent = name.FormatProject(opID) + coReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, coReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", n, err) + } + if got, err := g.GetOccurrenceNote(ctx, req); err != nil { + t.Fatalf("GetOccurrenceNote(%v) got %v, want success", n, err) + } else if n.Name != got.Name || !reflect.DeepEqual(n.GetVulnerabilityType(), got.GetVulnerabilityType()) { + t.Errorf("GetOccurrenceNote got %v, want %v", *got, n) + } +} + +func TestUpdateNote(t *testing.T) { + ctx := context.Background() + // Update Note that doesn't exist + updateDesc := "this is a new description" + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + n := testutil.Note(pID) + createProject(t, pID, ctx, g) + update := testutil.Note(pID) + update.LongDescription = updateDesc + req := &pb.UpdateNoteRequest{Name: n.Name, Note: n} + if _, err := g.UpdateNote(ctx, req); err != nil { + t.Error("UpdateNote that doesn't exist got success, want err") + } + + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: n} + // Actually create note + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + + // Update Note name and fail + update.Name = "New name" + req = &pb.UpdateNoteRequest{Name: n.Name, Note: update} + if _, err := g.UpdateNote(ctx, req); err == nil { + t.Error("UpdateNote that with name change got success, want err") + } + + // Update Note and verify that update worked. + update = testutil.Note(pID) + update.LongDescription = updateDesc + req = &pb.UpdateNoteRequest{Name: n.Name, Note: update} + if got, err := g.UpdateNote(ctx, req); err != nil { + t.Errorf("UpdateNote got %v, want success", err) + } else if updateDesc != update.LongDescription { + t.Errorf("UpdateNote got %v, want %v", + got.LongDescription, updateDesc) + } + if got, err := g.GetNote(ctx, &pb.GetNoteRequest{Name: n.Name}); err != nil { + t.Fatalf("GetNote(%v) got %v, want success", n, err) + } else if updateDesc != got.LongDescription { + t.Errorf("GetNote got %v, want %v", got.LongDescription, updateDesc) + } +} + +func TestUpdateOccurrence(t *testing.T) { + ctx := context.Background() + // Update occurrence that doesn't exist + g := Grafeas{storage.NewMemStore()} + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + createProject(t, npID, ctx, g) + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + pID := "occurrence-project" + o := testutil.Occurrence(pID, n.Name) + createProject(t, pID, ctx, g) + + req := &pb.UpdateOccurrenceRequest{Name: o.Name, Occurrence: o} + if _, err := g.UpdateOccurrence(ctx, req); err == nil { + t.Error("UpdateOccurrence that doesn't exist got success, want err") + } + parent := name.FormatProject(pID) + ocReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, ocReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", n, err) + } + // update occurrence name + update := testutil.Occurrence(pID, n.Name) + update.Name = "New name" + req = &pb.UpdateOccurrenceRequest{Name: update.Name, Occurrence: update} + if _, err := g.UpdateOccurrence(ctx, req); err == nil { + t.Error("UpdateOccurrence with name change got success, want err") + } + + // update note name to a note that doesn't exist + update = testutil.Occurrence(pID, "projects/p/notes/bar") + req = &pb.UpdateOccurrenceRequest{Name: o.Name, Occurrence: update} + if _, err := g.UpdateOccurrence(ctx, req); err == nil { + t.Error("UpdateOccurrence that with note name that doesn't exist" + + " got success, want err") + } + + // update note name to a note that does exist + n = testutil.Note(npID) + newName := fmt.Sprintf("%v-new", n.Name) + n.Name = newName + + cReq = &pb.CreateNoteRequest{Parent: nParent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + update = testutil.Occurrence(pID, n.Name) + req = &pb.UpdateOccurrenceRequest{Name: o.Name, Occurrence: update} + if got, err := g.UpdateOccurrence(ctx, req); err != nil { + t.Errorf("UpdateOccurrence got %v, want success", err) + } else if n.Name != got.NoteName { + t.Errorf("UpdateOccurrence got %v, want %v", + got.NoteName, n.Name) + } + gReq := &pb.GetOccurrenceRequest{Name: o.Name} + if got, err := g.GetOccurrence(ctx, gReq); err != nil { + t.Fatalf("GetOccurrence(%v) got %v, want success", n, err) + } else if n.Name != got.NoteName { + t.Errorf("GetOccurrence got %v, want %v", + got.NoteName, n.Name) + } +} + +func TestListOccurrences(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + createProject(t, npID, ctx, g) + + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + os := []*pb.Occurrence{} + findProject := "findThese" + createProject(t, findProject, ctx, g) + dontFind := "dontFind" + createProject(t, dontFind, ctx, g) + for i := 0; i < 20; i++ { + pID := "_" + o := testutil.Occurrence(pID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, string(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, string(i)) + } + parent := name.FormatProject(pID) + ocReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, ocReq); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + os = append(os, o) + } + + lReq := &pb.ListOccurrencesRequest{Parent: name.FormatProject(findProject)} + resp, lErr := g.ListOccurrences(ctx, lReq) + if lErr != nil { + t.Fatalf("ListOccurrences got %v want success", lErr) + } + if len(resp.Occurrences) != 5 { + t.Errorf("resp.Occurrences got %d, want 5", len(resp.Occurrences)) + } +} + +func TestListProjects(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + var projects []string + for i := 0; i < 20; i++ { + pID := fmt.Sprintf("proj%v", i) + req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + if _, err := g.CreateProject(ctx, &req); err != nil { + t.Errorf("CreateProject: got %v, want success", err) + } + if _, err := g.CreateProject(ctx, &req); err == nil { + t.Errorf("CreateProject: got %v, want InvalidArgument", err) + } + projects = append(projects, name.FormatProject(pID)) + } + req := pb.ListProjectsRequest{} + _, err := g.ListProjects(ctx, &req) + if err != nil { + t.Errorf("ListProjects: got %v, want success", err) + } +} + +func TestListOperations(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + findProject := "findThese" + createProject(t, findProject, ctx, g) + dontFind := "dontFind" + createProject(t, dontFind, ctx, g) + for i := 0; i < 20; i++ { + pID := "vulnerability-scanner-a" + o := testutil.Operation(pID) + if i < 5 { + o.Name = name.FormatOperation(findProject, string(i)) + } else { + o.Name = name.FormatOperation(dontFind, string(i)) + } + parent := name.FormatProject(pID) + cReq := &pb.CreateOperationRequest{Parent: parent, Operation: o} + if _, err := g.CreateOperation(ctx, cReq); err != nil { + t.Fatalf("CreateOperation(%v) got %v, want success", o, err) + } + } + + lReq := &opspb.ListOperationsRequest{Name: name.FormatProject(findProject)} + resp, err := g.ListOperations(ctx, lReq) + if err != nil { + t.Fatalf("ListOperations got %v want success", err) + } + if len(resp.Operations) != 5 { + t.Errorf("resp.Operations got %d, want 5", len(resp.Operations)) + } +} + +func TestListNotes(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + findProject := "findThese" + createProject(t, findProject, ctx, g) + dontFind := "dontFind" + createProject(t, dontFind, ctx, g) + for i := 0; i < 20; i++ { + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + if i < 5 { + n.Name = name.FormatNote(findProject, string(i)) + } else { + n.Name = name.FormatNote(dontFind, string(i)) + } + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + } + + req := &pb.ListNotesRequest{Parent: name.FormatProject(findProject)} + resp, err := g.ListNotes(ctx, req) + if err != nil { + t.Fatalf("ListNotes got %v want success", err) + } + if len(resp.Notes) != 5 { + t.Errorf("resp.Notes got %d, want 5", len(resp.Notes)) + } +} + +func TestListNoteOccurrences(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + createProject(t, npID, ctx, g) + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + findProject := "findThese" + createProject(t, findProject, ctx, g) + dontFind := "dontFind" + createProject(t, dontFind, ctx, g) + for i := 0; i < 20; i++ { + pID := "_" + o := testutil.Occurrence(pID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, string(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, string(i)) + } + parent := name.FormatProject(pID) + ocReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, ocReq); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + } + // Create an occurrence tied to another note, to make sure we don't find it. + otherN := testutil.Note("") + otherN.Name = "projects/np/notes/not-to-find" + npID, _, err := name.ParseNote(otherN.Name) + if err != nil { + t.Fatalf("Error parsing note name %v", err) + } + nParent = name.FormatProject(npID) + createProject(t, npID, ctx, g) + cReq = &pb.CreateNoteRequest{Parent: nParent, Note: otherN} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + pID := "occurrence-project" + o := testutil.Occurrence(pID, otherN.Name) + createProject(t, pID, ctx, g) + parent := name.FormatProject(pID) + ocReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, ocReq); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + pID, _, err = name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note name %v", err) + } + lReq := &pb.ListNoteOccurrencesRequest{Name: n.Name} + resp, lErr := g.ListNoteOccurrences(ctx, lReq) + if lErr != nil { + t.Fatalf("ListNoteOccurrences got %v want success", err) + } + if len(resp.Occurrences) != 20 { + t.Errorf("resp.Occurrences got %d, want 20", len(resp.Occurrences)) + } +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/storage.go b/vendor/github.com/grafeas/grafeas/server-go/storage.go new file mode 100644 index 00000000..67950dd5 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/storage.go @@ -0,0 +1,86 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" +) + +// Storager is the interface that a Grafeas storage implementation would provide +type Storager interface { + // CreateProject adds the specified project + CreateProject(pID string) error + + // CreateNote adds the specified note + CreateNote(n *pb.Note) error + + // CreateOccurrence adds the specified occurrence + CreateOccurrence(o *pb.Occurrence) error + + // CreateOperation adds the specified operation + CreateOperation(o *opspb.Operation) error + + // DeleteNote deletes the project with the given pID + DeleteProject(pID string) error + + // DeleteNote deletes the note with the given pID and nID + DeleteNote(pID, nID string) error + + // DeleteOccurrence deletes the occurrence with the given pID and oID + DeleteOccurrence(pID, oID string) error + + // DeleteOperation deletes the operation with the given pID and oID + DeleteOperation(pID, opID string) error + + // GetProject returns the project with the given pID + GetProject(pID string) (*pb.Project, error) + + // GetNote returns the note with project (pID) and note ID (nID) + GetNote(pID, nID string) (*pb.Note, error) + + // GetNoteByOccurrence returns the note attached to occurrence with pID and oID + GetNoteByOccurrence(pID, oID string) (*pb.Note, error) + + // GetOccurrence returns the occurrence with pID and oID + GetOccurrence(pID, oID string) (*pb.Occurrence, error) + + // GetOperation returns the operation with pID and oID + GetOperation(pID, opID string) (*opspb.Operation, error) + + // ListProjects returns the project id for all projects + ListProjects(filters string) []*pb.Project + + // ListNoteOccurrences returns the occcurrences on the particular note (nID) for this project (pID) + ListNoteOccurrences(pID, nID, filters string) ([]*pb.Occurrence, error) + + // ListNotes returns the notes for for this project (pID) + ListNotes(pID, filters string) []*pb.Note + + // ListOccurrences returns the occurrences for this project ID (pID) + ListOccurrences(pID, filters string) []*pb.Occurrence + + // ListOperations returns the operations for this project (pID) + ListOperations(pID, filters string) []*opspb.Operation + + // UpdateNote updates the existing note with the given pID and nID + UpdateNote(pID, nID string, n *pb.Note) error + + // UpdateOccurrence updates the existing occurrence with the given projectID and occurrenceID + UpdateOccurrence(pID, oID string, o *pb.Occurrence) error + + // UpdateOperation updates the existing operation with the given pID and nID + UpdateOperation(pID, opID string, op *opspb.Operation) error +} diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json b/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json new file mode 100644 index 00000000..0ae7bdf5 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json @@ -0,0 +1,2007 @@ +{ + "swagger": "2.0", + "info": { + "title": "Grafeas API", + "description": "An API to insert and retrieve annotations on cloud artifacts.", + "version": "0.1", + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + } + }, + "schemes": ["http", "https"], + "paths": { + "/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}": { + "get": { + "tags": ["grafeas"], + "operationId": "GetOccurrence", + "description": "Returns the requested occurrence", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrencesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Occurrence" + } + } + } + }, + "delete": { + "tags": ["grafeas"], + "operationId": "DeleteOccurrence", + "description": "Deletes the given occurrence from the system.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrencesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Empty" + } + } + } + }, + "put": { + "tags": ["grafeas"], + "operationId": "UpdateOccurrence", + "description": "Updates an existing occurrence.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the occurrence.\nShould be of the form \"projects\/{project_id}\/occurrences\/{occurrence_id}\".", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrencesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrence", + "description": "The updated occurrence.", + "in": "body", + "schema": { + "$ref": "#/definitions/Occurrence" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Occurrence" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/occurrences": { + "get": { + "tags": ["grafeas"], + "operationId": "ListOccurrences", + "description": "Lists active occurrences for a given project\/Digest.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. This contains the projectId for example: projects\/{project_id}", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "type": "string" + }, + { + "name": "pageSize", + "description": "Number of occurrences to return in the list.", + "in": "query", + "type": "integer", + "format": "int32" + }, + { + "name": "pageToken", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/ListOccurrencesResponse" + } + } + } + }, + "post": { + "tags": ["grafeas"], + "operationId": "CreateOccurrence", + "description": "Creates a new occurrence.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. This field contains the projectId for example: \"projects\/{project_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrence", + "description": "The occurrence to be inserted", + "in": "body", + "schema": { + "$ref": "#/definitions/Occurrence" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Occurrence" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}/notes": { + "get": { + "tags": ["grafeas"], + "operationId": "GetOccurrenceNote", + "description": "Gets the note that this occurrence is attached to.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "occurrencesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Note" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/notes/{notesId}": { + "get": { + "tags": ["grafeas"], + "operationId": "GetNote", + "description": "Returns the requested occurrence", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "notesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Note" + } + } + } + }, + "delete": { + "tags": ["grafeas"], + "operationId": "DeleteNote", + "description": "Deletes the given note from the system.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "notesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Empty" + } + } + } + }, + "put": { + "tags": ["grafeas"], + "operationId": "UpdateNote", + "description": "Updates an existing note.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the note.\nShould be of the form \"projects\/{project_id}\/notes\/{note_id}\".", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "notesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "note", + "description": "The updated note.", + "in": "body", + "schema": { + "$ref": "#/definitions/Note" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Note" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/notes": { + "get": { + "tags": ["grafeas"], + "operationId": "ListNotes", + "description": "Lists all notes for a given project.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "type": "string" + }, + { + "name": "pageSize", + "description": "Number of notes to return in the list.", + "in": "query", + "type": "integer", + "format": "int32" + }, + { + "name": "pageToken", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/ListNotesResponse" + } + } + } + }, + "post": { + "tags": ["grafeas"], + "operationId": "CreateNote", + "description": "Creates a new note.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "noteId", + "description": "The ID to use for this note.", + "in": "query", + "type": "string" + }, + { + "name": "note", + "description": "The Note to be inserted", + "in": "body", + "schema": { + "$ref": "#/definitions/Note" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Note" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/notes/{notesId}/occurrences": { + "get": { + "tags": ["grafeas"], + "operationId": "ListNoteOccurrences", + "description": "Lists the names of Occurrences linked to a particular Note.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name field will contain the note name for example:\n \"project\/{project_id}\/notes\/{note_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "notesId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "type": "string" + }, + { + "name": "pageSize", + "description": "Number of notes to return in the list.", + "in": "query", + "type": "integer", + "format": "int32" + }, + { + "name": "pageToken", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/ListNoteOccurrencesResponse" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/operations": { + "post": { + "tags": ["grafeas"], + "operationId": "CreateOperation", + "description": "Creates a new operation", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. The projectId that this operation should be created under.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "description": "The ID to use for this operation. If empty a random string will be used.", + "in": "query", + "type": "string" + }, + { + "name": "body", + "description": "The request body.", + "in": "body", + "schema": { + "$ref": "#/definitions/CreateOperationRequest" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Operation" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/operations": { + "get": { + "tags": [ + "grafeas" + ], + "operationId": "ListOperations", + "description": "Lists all operations for a given project.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "type": "string" + }, + { + "name": "pageSize", + "description": "Number of operations to return in the list.", + "in": "query", + "type": "integer", + "format": "int32" + }, + { + "name": "pageToken", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/ListOperationsResponse" + } + } + } + } + }, + "/v1alpha1/projects/{projectsId}/operations/{operationsId}": { + "get": { + "tags": ["grafeas"], + "operationId": "GetOperation", + "description": "Returns the requested occurrence", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the operation in the form\n\"projects\/{project_id}\/operations\/{operation_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "operationsId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Operation" + } + } + } + }, + "put": { + "tags": ["grafeas"], + "operationId": "UpdateOperation", + "description": "Updates an existing operation returns an error if operation\n does not exist. The only valid operations are to update mark the done bit\nchange the result.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the Operation.\nShould be of the form \"projects\/{project_id}\/operations\/{operation_id}\".", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "operationsId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "description": "The request body.", + "in": "body", + "schema": { + "$ref": "#/definitions/UpdateOperationRequest" + } + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Operation" + } + } + } + } + }, + "delete": { + "tags": ["grafeas"], + "operationId": "DeleteOperation", + "description": "Deletes the given operation from the system.", + "parameters": [ + { + "name": "projectsId", + "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/operations\/{operation_id}\"", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "operationsId", + "description": "Part of `name`. See documentation of `projectsId`.", + "in": "path", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "description": "Successful operation", + "schema": { + "$ref": "#/definitions/Empty" + } + } + } + } + }, + "definitions": { + "Occurrence": { + "id": "Occurrence", + "description": "Occurrence includes information about analysis occurrences for an image.\n``", + "type": "object", + "properties": { + "name": { + "description": "The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"\n@OutputOnly", + "type": "string" + }, + "resourceUrl": { + "description": "The unique url of the image or container for which the occurrence applies.\nExample: https:\/\/gcr.io\/project\/image@sha256:foo\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "noteName": { + "description": "An analysis note associated with this image, in the form\n\"projects\/{project_id}\/notes\/{note_id}\"\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "kind": { + "description": "This explicitly denotes which of the occurrence details is specified.\nThis field can be used as a filter in list requests.\n@OutputOnly", + "type": "string", + "enum": [ + "CUSTOM", + "PACKAGE_VULNERABILITY", + "BUILD_DETAILS", + "IMAGE_BASIS", + "PACKAGE_MANAGER", + "DEPLOYABLE", + "DISCOVERY" + ] + }, + "customDetails": { + "description": "Details of the custom note.", + "$ref": "#/definitions/CustomDetails" + }, + "vulnerabilityDetails": { + "description": "Details of a security vulnerability note.", + "$ref": "#/definitions/VulnerabilityDetails" + }, + "buildDetails": { + "description": "Build details for a verifiable build.", + "$ref": "#/definitions/BuildDetails" + }, + "derivedImage": { + "description": "Describes how this resource derives from the basis\nin the associated note.", + "$ref": "#/definitions/Derived" + }, + "installation": { + "description": "Describes the installation of a package on the linked resource.", + "$ref": "#/definitions/Installation" + }, + "deployment": { + "description": "Describes the deployment of an artifact on a runtime.", + "$ref": "#/definitions/Deployment" + }, + "discovered": { + "description": "Describes the initial scan status for this resource.", + "$ref": "#/definitions/Discovered" + }, + "attestation": { + "description": "Describes an attestation of an artifact.", + "$ref": "#/definitions/Attestation" + }, + "remediation": { + "description": "A description of actions that can be taken to remedy the note", + "type": "string" + }, + "createTime": { + "description": "The time this occurrence was created.\n@OutputOnly", + "type": "string", + "format": "google-datetime" + }, + "updateTime": { + "description": "The time this occurrence was last updated.\n@OutputOnly", + "type": "string", + "format": "google-datetime" + }, + "operationName": { + "description": "The name of the operation that created this note.", + "type": "string" + } + } + }, + "CustomDetails": { + "id": "CustomDetails", + "description": "Details of the custom note type", + "type": "object", + "properties": { + "description": { + "description": "A description of location of a custom note.", + "type": "string" + } + } + }, + "VulnerabilityDetails": { + "id": "VulnerabilityDetails", + "description": "Used by Occurrence to point to where the vulnerability exists and how\nto fix it.", + "type": "object", + "properties": { + "affectedLocation": { + "description": "The location of the vulnerability.", + "$ref": "#/definitions/VulnerabilityLocation" + }, + "fixedLocation": { + "description": "The location of the available fix for vulnerability.", + "$ref": "#/definitions/VulnerabilityLocation" + }, + "type": { + "description": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)", + "type": "string" + }, + "severity": { + "description": "The note provider assigned Severity of the vulnerability.\n@OutputOnly", + "type": "string", + "enum": [ + "UNKNOWN", + "MINIMAL", + "LOW", + "MEDIUM", + "HIGH", + "CRITICAL" + ] + }, + "cvssScore": { + "description": "The CVSS score of this vulnerability. CVSS score is on a scale of 0-10\nwhere 0 indicates low severity and 10 indicates high severity.\n@OutputOnly", + "type": "number", + "format": "float" + }, + "packageIssue": { + "description": "The set of affected locations and their fixes (if available) within\nthe associated resource.", + "type": "array", + "items": { + "$ref": "#/definitions/PackageIssue" + } + } + } + }, + "VulnerabilityLocation": { + "id": "VulnerabilityLocation", + "description": "The location of the vulnerability", + "type": "object", + "properties": { + "cpeUri": { + "description": "The cpe_uri in [cpe format] (https:\/\/cpe.mitre.org\/specification\/)\nformat. Examples include distro or storage location for vulnerable jar.\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "package": { + "description": "The package being described.", + "type": "string" + }, + "version": { + "description": "The version of the package being described.\nThis field can be used as a filter in list requests.", + "$ref": "#/definitions/Version" + } + } + }, + "Version": { + "id": "Version", + "description": "Version contains structured information about the version of the package.\nFor a discussion of this in Debian\/Ubuntu:\nhttp:\/\/serverfault.com\/questions\/604541\/debian-packages-version-convention\nFor a discussion of this in Redhat\/Fedora\/Centos:\nhttp:\/\/blog.jasonantman.com\/2014\/07\/how-yum-and-rpm-compare-versions\/", + "type": "object", + "properties": { + "epoch": { + "description": "Used to correct mistakes in the version numbering scheme.", + "type": "integer", + "format": "int32" + }, + "name": { + "description": "The main part of the version name.", + "type": "string" + }, + "revision": { + "description": "The iteration of the package build from the above version.", + "type": "string" + }, + "kind": { + "description": "Distinguish between sentinel MIN\/MAX versions and normal versions.\nIf kind is not NORMAL, then the other fields are ignored.", + "type": "string", + "enum": [ + "NORMAL", + "MINIMUM", + "MAXIMUM" + ] + } + } + }, + "PackageIssue": { + "id": "PackageIssue", + "description": "This message wraps a location affected by a vulnerability and its\nassociated fix (if one is available).", + "type": "object", + "properties": { + "affectedLocation": { + "description": "The location of the vulnerability.", + "$ref": "#/definitions/VulnerabilityLocation" + }, + "fixedLocation": { + "description": "The location of the available fix for vulnerability.", + "$ref": "#/definitions/VulnerabilityLocation" + }, + "severityName": { + "description": "The severity (eg: distro assigned severity) for this vulnerability.", + "type": "string" + } + } + }, + "BuildDetails": { + "id": "BuildDetails", + "description": "Message encapsulating build provenance details", + "type": "object", + "properties": { + "provenance": { + "description": "The actual provenance", + "$ref": "#/definitions/BuildProvenance" + }, + "provenanceBytes": { + "description": "Serialized json representation of the provenance, used in generating the\nBuildSignature in the corresponding Result. After verifying the signature,\nprovenance_bytes can be unmarshalled and compared to the provenance to\nconfirm that it is unchanged. A base64-encoded string representation of the\nprovenance bytes is used for the signature in order to interoperate with\nopenssl which expects this format for signature verification.\n\nThe serialized form is captured both to avoid ambiguity in how the\nprovenance is marshalled to json as well to prevent incompatibilities with\nfuture changes.", + "type": "string" + } + } + }, + "BuildProvenance": { + "id": "BuildProvenance", + "description": "Provenance of a build. Contains all information needed to verify the full\ndetails about the build from source to completion.", + "type": "object", + "properties": { + "id": { + "description": "Unique identifier of the build.", + "type": "string" + }, + "projectId": { + "description": "ID of the project.", + "type": "string" + }, + "projectNum": { + "description": "Numerical ID of the project.", + "type": "string", + "format": "int64" + }, + "commands": { + "description": "Commands requested by the build.", + "type": "array", + "items": { + "$ref": "#/definitions/Command" + } + }, + "builtArtifacts": { + "description": "Output of the build.", + "type": "array", + "items": { + "$ref": "#/definitions/Artifact" + } + }, + "createTime": { + "description": "Time at which the build was created.", + "type": "string", + "format": "google-datetime" + }, + "startTime": { + "description": "Time at which execution of the build was started.", + "type": "string", + "format": "google-datetime" + }, + "finishTime": { + "description": "Time at which execution of the build was finished.", + "type": "string", + "format": "google-datetime" + }, + "userId": { + "description": "GAIA ID of end user who initiated this build; at the time that the\nBuildProvenance is uploaded to Analysis, this will be resolved to the\nprimary e-mail address of the user and stored in the Creator field.", + "type": "string", + "format": "int64" + }, + "creator": { + "description": "E-mail address of the user who initiated this build. Note that this was the\nuser's e-mail address at the time the build was initiated; this address may\nnot represent the same end-user for all time.", + "type": "string" + }, + "logsBucket": { + "description": "Google Cloud Storage bucket where logs were written.", + "type": "string" + }, + "sourceProvenance": { + "description": "Details of the Source input to the build.", + "$ref": "#/definitions/Source" + }, + "triggerId": { + "description": "Trigger identifier if the build was triggered automatically; empty if not.", + "type": "string" + }, + "buildOptions": { + "description": "Special options applied to this build. This is a catch-all field where\nbuild providers can enter any desired additional details.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "builderVersion": { + "description": "Version string of the builder at the time this build was executed.", + "type": "string" + } + } + }, + "Command": { + "id": "Command", + "description": "Command describes a step performed as part of the build pipeline.", + "type": "object", + "properties": { + "name": { + "description": "Name of the command, as presented on the command line, or if the command is\npackaged as a Docker container, as presented to `docker pull`.", + "type": "string" + }, + "env": { + "description": "Environment variables set before running this Command.", + "type": "array", + "items": { + "type": "string" + } + }, + "args": { + "description": "Command-line arguments used when executing this Command.", + "type": "array", + "items": { + "type": "string" + } + }, + "dir": { + "description": "Working directory (relative to project source root) used when running\nthis Command.", + "type": "string" + }, + "id": { + "description": "Optional unique identifier for this Command, used in wait_for to reference\nthis Command as a dependency.", + "type": "string" + }, + "waitFor": { + "description": "The ID(s) of the Command(s) that this Command depends on.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Artifact": { + "id": "Artifact", + "description": "Artifact describes a build product.", + "type": "object", + "properties": { + "checksum": { + "description": "Hash or checksum value of a binary, or Docker Registry 2.0 digest of a\ncontainer.", + "type": "string" + }, + "id": { + "description": "Artifact ID, if any; for container images, this will be a URL by digest\nlike gcr.io\/projectID\/imagename@sha256:123456", + "type": "string" + }, + "names": { + "description": "Related artifact names. This may be the path to a binary or jar file, or in\nthe case of a container build, the name used to push the container image to\nGoogle Container Registry, as presented to `docker push`. Note that a\nsingle Artifact ID can have multiple names, for example if two tags are\napplied to one image.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Source": { + "id": "Source", + "description": "Source describes the location of the source used for the build.", + "type": "object", + "properties": { + "storageSource": { + "description": "If provided, get the source from this location in in Google Cloud\nStorage.", + "$ref": "#/definitions/StorageSource" + }, + "repoSource": { + "description": "If provided, get source from this location in a Cloud Repo.", + "$ref": "#/definitions/RepoSource" + }, + "artifactStorageSource": { + "description": "If provided, the input binary artifacts for the build came from this\nlocation.", + "$ref": "#/definitions/StorageSource" + }, + "sourceContext": { + "description": "If provided, the source code used for the build came from this location.", + "$ref": "#/definitions/ExtendedSourceContext" + }, + "additionalSourceContexts": { + "description": "If provided, some of the source code used for the build may be found in\nthese locations, in the case where the source repository had multiple\nremotes or submodules. This list will not include the context specified in\nthe source_context field.", + "type": "array", + "items": { + "$ref": "#/definitions/ExtendedSourceContext" + } + }, + "fileHashes": { + "description": "Hash(es) of the build source, which can be used to verify that the original\nsource integrity was maintained in the build.\n\nThe keys to this map are file paths used as build source and the values\ncontain the hash values for those files.\n\nIf the build source came in a single package such as a gzipped tarfile\n(.tar.gz), the FileHash will be for the single path to that file.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/FileHashes" + } + } + } + }, + "StorageSource": { + "id": "StorageSource", + "description": "StorageSource describes the location of the source in an archive file in\nGoogle Cloud Storage.", + "type": "object", + "properties": { + "bucket": { + "description": "Google Cloud Storage bucket containing source (see [Bucket Name\nRequirements]\n(https:\/\/cloud.google.com\/storage\/docs\/bucket-naming#requirements)).", + "type": "string" + }, + "object": { + "description": "Google Cloud Storage object containing source.", + "type": "string" + }, + "generation": { + "description": "Google Cloud Storage generation for the object.", + "type": "string", + "format": "int64" + } + } + }, + "RepoSource": { + "id": "RepoSource", + "description": "RepoSource describes the location of the source in a Google Cloud Source\nRepository.", + "type": "object", + "properties": { + "projectId": { + "description": "ID of the project that owns the repo.", + "type": "string" + }, + "repoName": { + "description": "Name of the repo.", + "type": "string" + }, + "branchName": { + "description": "Name of the branch to build.", + "type": "string" + }, + "tagName": { + "description": "Name of the tag to build.", + "type": "string" + }, + "commitSha": { + "description": "Explicit commit SHA to build.", + "type": "string" + } + } + }, + "ExtendedSourceContext": { + "id": "ExtendedSourceContext", + "description": "An ExtendedSourceContext is a SourceContext combined with additional\ndetails describing the context.", + "type": "object", + "properties": { + "context": { + "description": "Any source context.", + "$ref": "#/definitions/SourceContext" + }, + "labels": { + "description": "Labels with user defined metadata.", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SourceContext": { + "id": "SourceContext", + "description": "A SourceContext is a reference to a tree of files. A SourceContext together\nwith a path point to a unique revision of a single file or directory.", + "type": "object", + "properties": { + "cloudRepo": { + "description": "A SourceContext referring to a revision in a cloud repo.", + "$ref": "#/definitions/CloudRepoSourceContext" + }, + "cloudWorkspace": { + "description": "A SourceContext referring to a snapshot in a cloud workspace.", + "$ref": "#/definitions/CloudWorkspaceSourceContext" + }, + "gerrit": { + "description": "A SourceContext referring to a Gerrit project.", + "$ref": "#/definitions/GerritSourceContext" + }, + "git": { + "description": "A SourceContext referring to any third party Git repo (e.g. GitHub).", + "$ref": "#/definitions/GitSourceContext" + } + } + }, + "CloudRepoSourceContext": { + "id": "CloudRepoSourceContext", + "description": "A CloudRepoSourceContext denotes a particular revision in a cloud\nrepo (a repo hosted by the Google Cloud Platform).", + "type": "object", + "properties": { + "repoId": { + "description": "The ID of the repo.", + "$ref": "#/definitions/RepoId" + }, + "revisionId": { + "description": "A revision ID.", + "type": "string" + }, + "aliasName": { + "description": "The name of an alias (branch, tag, etc.).", + "type": "string" + }, + "aliasContext": { + "description": "An alias, which may be a branch or tag.", + "$ref": "#/definitions/AliasContext" + } + } + }, + "RepoId": { + "id": "RepoId", + "description": "A unique identifier for a cloud repo.", + "type": "object", + "properties": { + "projectRepoId": { + "description": "A combination of a project ID and a repo name.", + "$ref": "#/definitions/ProjectRepoId" + }, + "uid": { + "description": "A server-assigned, globally unique identifier.", + "type": "string" + } + } + }, + "ProjectRepoId": { + "id": "ProjectRepoId", + "description": "Selects a repo using a Google Cloud Platform project ID\n(e.g. winged-cargo-31) and a repo name within that project.", + "type": "object", + "properties": { + "projectId": { + "description": "The ID of the project.", + "type": "string" + }, + "repoName": { + "description": "The name of the repo. Leave empty for the default repo.", + "type": "string" + } + } + }, + "AliasContext": { + "id": "AliasContext", + "description": "An alias to a repo revision.", + "type": "object", + "properties": { + "kind": { + "description": "The alias kind.", + "type": "string", + "enum": [ + "ANY", + "FIXED", + "MOVABLE", + "OTHER" + ] + }, + "name": { + "description": "The alias name.", + "type": "string" + } + } + }, + "CloudWorkspaceSourceContext": { + "id": "CloudWorkspaceSourceContext", + "description": "A CloudWorkspaceSourceContext denotes a workspace at a particular snapshot.", + "type": "object", + "properties": { + "workspaceId": { + "description": "The ID of the workspace.", + "$ref": "#/definitions/CloudWorkspaceId" + }, + "snapshotId": { + "description": "The ID of the snapshot.\nAn empty snapshot_id refers to the most recent snapshot.", + "type": "string" + } + } + }, + "CloudWorkspaceId": { + "id": "CloudWorkspaceId", + "description": "A CloudWorkspaceId is a unique identifier for a cloud workspace.\nA cloud workspace is a place associated with a repo where modified files\ncan be stored before they are committed.", + "type": "object", + "properties": { + "repoId": { + "description": "The ID of the repo containing the workspace.", + "$ref": "#/definitions/RepoId" + }, + "name": { + "description": "The unique name of the workspace within the repo. This is the name\nchosen by the client in the Source API's CreateWorkspace method.", + "type": "string" + } + } + }, + "GerritSourceContext": { + "id": "GerritSourceContext", + "description": "A SourceContext referring to a Gerrit project.", + "type": "object", + "properties": { + "hostUri": { + "description": "The URI of a running Gerrit instance.", + "type": "string" + }, + "gerritProject": { + "description": "The full project name within the host. Projects may be nested, so\n\"project\/subproject\" is a valid project name.\nThe \"repo name\" is hostURI\/project.", + "type": "string" + }, + "revisionId": { + "description": "A revision (commit) ID.", + "type": "string" + }, + "aliasName": { + "description": "The name of an alias (branch, tag, etc.).", + "type": "string" + }, + "aliasContext": { + "description": "An alias, which may be a branch or tag.", + "$ref": "#/definitions/AliasContext" + } + } + }, + "GitSourceContext": { + "id": "GitSourceContext", + "description": "A GitSourceContext denotes a particular revision in a third party Git\nrepository (e.g. GitHub).", + "type": "object", + "properties": { + "url": { + "description": "Git repository URL.", + "type": "string" + }, + "revisionId": { + "description": "Git commit hash.\nrequired.", + "type": "string" + } + } + }, + "FileHashes": { + "id": "FileHashes", + "description": "Container message for hashes of byte content of files, used in Source\nmessages to verify integrity of source input to the build.", + "type": "object", + "properties": { + "fileHash": { + "description": "Collection of file hashes.", + "type": "array", + "items": { + "$ref": "#/definitions/Hash" + } + } + } + }, + "Hash": { + "id": "Hash", + "description": "Container message for hash values.", + "type": "object", + "properties": { + "type": { + "description": "The type of hash that was performed.", + "type": "string", + "enum": [ + "NONE", + "SHA256", + "MD5" + ] + }, + "value": { + "description": "The hash value.", + "type": "string", + "format": "byte" + } + } + }, + "Derived": { + "id": "Derived", + "description": "Derived describes the derived image portion (Occurrence) of the\nDockerImage relationship. This image would be produced from a Dockerfile\nwith FROM .", + "type": "object", + "properties": { + "fingerprint": { + "description": "The fingerprint of the derived image", + "$ref": "#/definitions/Fingerprint" + }, + "distance": { + "description": "The number of layers by which this image differs from\nthe associated image basis.\n@OutputOnly", + "type": "integer", + "format": "uint32" + }, + "layerInfo": { + "description": "This contains layer-specific metadata, if populated it\nhas length \u201Cdistance\u201D and is ordered with [distance] being the\nlayer immediately following the base image and [1]\nbeing the final layer.", + "type": "array", + "items": { + "$ref": "#/definitions/Layer" + } + }, + "baseResourceUrl": { + "description": "This contains the base image url for the derived image Occurrence\n@OutputOnly", + "type": "string" + } + } + }, + "Fingerprint": { + "id": "Fingerprint", + "description": "A set of properties that uniquely identify a given Docker image.", + "type": "object", + "properties": { + "v1Name": { + "description": "The layer-id of the final layer in the Docker image\u2019s v1\nrepresentation.\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "v2Blob": { + "description": "The ordered list of v2 blobs that represent a given image.", + "type": "array", + "items": { + "type": "string" + } + }, + "v2Name": { + "description": "The name of the image\u2019s v2 blobs computed via:\n [bottom] := v2_blobbottom := sha256(v2_blob[N] + \u201C \u201D + v2_name[N+1])\nOnly the name of the final blob is kept.\nThis field can be used as a filter in list requests.\n@OutputOnly", + "type": "string" + } + } + }, + "Layer": { + "id": "Layer", + "description": "Layer holds metadata specific to a layer of a Docker image.", + "type": "object", + "properties": { + "directive": { + "description": "The recovered Dockerfile directive used to construct this layer.", + "type": "string", + "enum": [ + "UNKNOWN_DIRECTIVE", + "MAINTAINER", + "RUN", + "CMD", + "LABEL", + "EXPOSE", + "ENV", + "ADD", + "COPY", + "ENTRYPOINT", + "VOLUME", + "USER", + "WORKDIR", + "ARG", + "ONBUILD", + "STOPSIGNAL", + "HEALTHCHECK", + "SHELL" + ] + }, + "arguments": { + "description": "The recovered arguments to the Dockerfile directive.", + "type": "string" + } + } + }, + "Installation": { + "id": "Installation", + "description": "This represents how a particular software package may be installed on\na system.", + "type": "object", + "properties": { + "name": { + "description": "The name of the installed package.\n@OutputOnly", + "type": "string" + }, + "location": { + "description": "All of the places within the filesystem versions of this package\nhave been found.", + "type": "array", + "items": { + "$ref": "#/definitions/Location" + } + } + } + }, + "Location": { + "id": "Location", + "description": "An occurrence of a particular package installation found within a\nsystem's filesystem.\ne.g. glibc was found in \/var\/lib\/dpkg\/status", + "type": "object", + "properties": { + "cpeUri": { + "description": "The cpe_uri in [cpe format](https:\/\/cpe.mitre.org\/specification\/)\ndenoting the package manager version distributing a package.", + "type": "string" + }, + "version": { + "description": "The version installed at this location.", + "$ref": "#/definitions/Version" + }, + "path": { + "description": "The path from which we gathered that this package\/version is installed.", + "type": "string" + } + } + }, + "Deployment": { + "id": "Deployment", + "description": "The period during which some deployable was active in a runtime.", + "type": "object", + "properties": { + "userEmail": { + "description": "Identity of the user that triggered this deployment.", + "type": "string" + }, + "deployTime": { + "description": "Beginning of the lifetime of this deployment.", + "type": "string", + "format": "google-datetime" + }, + "undeployTime": { + "description": "End of the lifetime of this deployment.", + "type": "string", + "format": "google-datetime" + }, + "config": { + "description": "Configuration used to create this deployment.", + "type": "object", + "additionalProperties": { + "type": "string", + "description": "Properties of the object. Contains field @type with type URL." + } + }, + "address": { + "description": "Address of the runtime element hosting this deployment.", + "type": "string" + } + } + }, + "Discovered": { + "id": "Discovered", + "description": "Provides information about the scan status of a discovered resource.", + "type": "object", + "properties": { + "operation": { + "description": "An operation that indicates the status of the current scan.\n@OutputOnly", + "$ref": "#/definitions/Operation" + } + } + }, + "Operation": { + "id": "Operation", + "description": "This resource represents a long-running operation that is the result of a\nnetwork API call.", + "type": "object", + "properties": { + "name": { + "description": "The server-assigned name, which is only unique within the same service that\noriginally returns it. If you use the default HTTP mapping, the\n`name` should have the format of `operations\/some\/unique\/name`.", + "type": "string" + }, + "metadata": { + "description": "Service-specific metadata associated with the operation. It typically\ncontains progress information and common metadata such as create time.\nSome services might not provide such metadata. Any method that returns a\nlong-running operation should document the metadata type, if any.", + "type": "object", + "additionalProperties": { + "type": "string", + "description": "Properties of the object. Contains field @type with type URL." + } + }, + "done": { + "description": "If the value is `false`, it means the operation is still in progress.\nIf true, the operation is completed, and either `error` or `response` is\navailable.", + "type": "boolean" + }, + "error": { + "description": "The error result of the operation in case of failure or cancellation.", + "$ref": "#/definitions/Status" + }, + "response": { + "description": "The normal response of the operation in case of success. If the original\nmethod returns no data on success, such as `Delete`, the response is\n`google.protobuf.Empty`. If the original method is standard\n`Get`\/`Create`\/`Update`, the response should be the resource. For other\nmethods, the response should have the type `XxxResponse`, where `Xxx`\nis the original method name. For example, if the original method name\nis `TakeSnapshot()`, the inferred response type is\n`TakeSnapshotResponse`.", + "type": "object", + "additionalProperties": { + "type": "string", + "description": "Properties of the object. Contains field @type with type URL." + } + } + } + }, + "Status": { + "id": "Status", + "description": "The `Status` type defines a logical error model that is suitable for different\nprogramming environments, including REST APIs and RPC APIs. It is used by\n[gRPC](https:\/\/github.com\/grpc). The error model is designed to be:\n\n- Simple to use and understand for most users\n- Flexible enough to meet unexpected needs\n\n# Overview\n\nThe `Status` message contains three pieces of data: error code, error message,\nand error details. The error code should be an enum value of\ngoogle.rpc.Code, but it may accept additional error codes if needed. The\nerror message should be a developer-facing English message that helps\ndevelopers *understand* and *resolve* the error. If a localized user-facing\nerror message is needed, put the localized message in the error details or\nlocalize it in the client. The optional error details may contain arbitrary\ninformation about the error. There is a predefined set of error detail types\nin the package `google.rpc` that can be used for common error conditions.\n\n# Language mapping\n\nThe `Status` message is the logical representation of the error model, but it\nis not necessarily the actual wire format. When the `Status` message is\nexposed in different client libraries and different wire protocols, it can be\nmapped differently. For example, it will likely be mapped to some exceptions\nin Java, but more likely mapped to some error codes in C.\n\n# Other uses\n\nThe error model and the `Status` message can be used in a variety of\nenvironments, either with or without APIs, to provide a\nconsistent developer experience across different environments.\n\nExample uses of this error model include:\n\n- Partial errors. If a service needs to return partial errors to the client,\n it may embed the `Status` in the normal response to indicate the partial\n errors.\n\n- Workflow errors. A typical workflow has multiple steps. Each step may\n have a `Status` message for error reporting.\n\n- Batch operations. If a client uses batch request and batch response, the\n `Status` message should be used directly inside batch response, one for\n each error sub-response.\n\n- Asynchronous operations. If an API call embeds asynchronous operation\n results in its response, the status of those operations should be\n represented directly using the `Status` message.\n\n- Logging. If some API errors are stored in logs, the message `Status` could\n be used directly after any stripping needed for security\/privacy reasons.", + "type": "object", + "properties": { + "code": { + "description": "The status code, which should be an enum value of google.rpc.Code.", + "type": "integer", + "format": "int32" + }, + "message": { + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\ngoogle.rpc.Status.details field, or localized by the client.", + "type": "string" + }, + "details": { + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use.", + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "type": "string", + "description": "Properties of the object. Contains field @type with type URL." + } + } + } + } + }, + "Attestation": { + "id": "Attestation", + "description": "Occurrence that represents a single \"attestation\". The authenticity of an\nAttestation can be verified using the attached signature. If the verifier\ntrusts the public key of the signer, then verifying the signature is\nsufficient to establish trust. In this circumstance, the\nAttestationAuthority to which this Attestation is attached is primarily\nuseful for look-up (how to find this Attestation if you already know the\nAuthority and artifact to be verified) and intent (which authority was this\nattestation intended to sign for).", + "type": "object", + "properties": { + "pgpSignedAttestation": { + + "$ref": "#/definitions/PgpSignedAttestation" + } + } + }, + "PgpSignedAttestation": { + "id": "PgpSignedAttestation", + "description": "An attestation wrapper with a PGP-compatible signature.\nThis message only supports ATTACHED signatures, where the payload that is\nsigned is included alongside the signature itself in the same file.", + "type": "object", + "properties": { + "signature": { + "description": "The raw content of the signature, as output by gpg or equivalent. Since\nthis message only supports attached signatures, the payload that was signed\nmust be attached. While the signature format supported is dependent on the\nverification implementation, currently only ASCII-armored (`--armor` to\ngpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are\nsupported.\nConcretely, `gpg --sign --armor --output=signature.gpg payload.json` will\ncreate the signature content expected in this field in `signature.gpg` for\nthe `payload.json` attestation payload.", + "type": "string" + }, + "contentType": { + "description": "Type (e.g. schema) of the attestation payload that was signed.\nThe verifier must ensure that the provided type is one that the verifier\nsupports, and that the attestation payload is a valid instantiation of that\ntype (e.g. by validating a JSON schema).", + "type": "string", + "enum": [ + "UNSET", + "SIMPLE_SIGNING_JSON" + ] + }, + "pgpKeyId": { + "description": "The ID of the key, as output by `gpg --list-keys`. This should be 8\nhexadecimal digits, capitalized. e.g.\n$ gpg --list-keys pub\n2048R\/A663AEEA 2017-08-01 ui Fake Name\n\nIn the above example, the `key_id` is \"A663AEEA\".\nNote that in practice this ID is the last 64 bits of the key fingerprint.", + "type": "string" + } + } + }, + "ListOccurrencesResponse": { + "id": "ListOccurrencesResponse", + "description": "Response including listed active occurrences.", + "type": "object", + "properties": { + "occurrences": { + "description": "The occurrences requested.", + "type": "array", + "items": { + "$ref": "#/definitions/Occurrence" + } + }, + "nextPageToken": { + "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more results.", + "type": "string" + } + } + }, + "ListOperationsResponse": { + "id": "ListOperationsResponse", + "description": "Response including listed operations.", + "type": "object", + "properties": { + "operations": { + "description": "The operations requested.", + "type": "array", + "items": { + "$ref": "#/definitions/Operation" + } + }, + "nextPageToken": { + "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more results.", + "type": "string" + } + } + }, + "Empty": { + "id": "Empty", + "description": "A generic empty message that you can re-use to avoid defining duplicated\nempty messages in your APIs. A typical example is to use it as the request\nor the response type of an API method. For instance:\n\n service Foo {\n rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);\n }\n\nThe JSON representation for `Empty` is empty JSON object `{}`.", + "type": "object", + "properties": { + } + }, + "Note": { + "id": "Note", + "description": "Note provides a detailed description of a note using information\nfrom the provider of the note.", + "type": "object", + "properties": { + "name": { + "description": "The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", + "type": "string" + }, + "shortDescription": { + "description": "A one sentence description of this note", + "type": "string" + }, + "longDescription": { + "description": "A detailed description of this note", + "type": "string" + }, + "kind": { + "description": "This explicitly denotes which kind of note is specified.\nThis field can be used as a filter in list requests.\n@OutputOnly", + "type": "string", + "enum": [ + "CUSTOM", + "PACKAGE_VULNERABILITY", + "BUILD_DETAILS", + "IMAGE_BASIS", + "PACKAGE_MANAGER", + "DEPLOYABLE", + "DISCOVERY", + "ATTESTATION_AUTHORITY" + ] + }, + "vulnerabilityType": { + "description": "A package vulnerability type of note.", + "$ref": "#/definitions/VulnerabilityType" + }, + "buildType": { + "description": "Build provenance type for a verifiable build.", + "$ref": "#/definitions/BuildType" + }, + "baseImage": { + "description": "A note describing a base image.", + "$ref": "#/definitions/Basis" + }, + "package": { + "description": "A note describing a package hosted by various package managers.", + "$ref": "#/definitions/Package" + }, + "deployable": { + "description": "A note describing something that can be deployed.", + "$ref": "#/definitions/Deployable" + }, + "discovery": { + "description": "A note describing a project\/analysis type.", + "$ref": "#/definitions/Discovery" + }, + "attestationAuthority": { + "description": "A note describing an attestation role.", + "$ref": "#/definitions/AttestationAuthority" + }, + + "relatedUrl": { + "description": "Urls associated with this note", + "type": "array", + "items": { + "$ref": "#/definitions/RelatedUrl" + } + }, + "expirationTime": { + "description": "Time of expiration for this Note, null if Note currently does not\nexpire.", + "type": "string", + "format": "google-datetime" + }, + "createTime": { + "description": "The time this note was created.\nThis field can be used as a filter in list requests.\n@OutputOnly", + "type": "string", + "format": "google-datetime" + }, + "updateTime": { + "description": "The time this note was last updated.\nThis field can be used as a filter in list requests.\n@OutputOnly", + "type": "string", + "format": "google-datetime" + }, + "operationName": { + "description": "The name of the operation that created this note.", + "type": "string" + }, + "relatedNoteNames": { + "description": "Other notes related to this note.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "VulnerabilityType": { + "id": "VulnerabilityType", + "description": "VulnerabilityType provides metadata about a security vulnerability.", + "type": "object", + "properties": { + "cvssScore": { + "description": "The CVSS score for this Vulnerability.", + "type": "number", + "format": "float" + }, + "severity": { + "description": "Note provider assigned impact of the vulnerability", + "type": "string", + "enum": [ + "UNKNOWN", + "MINIMAL", + "LOW", + "MEDIUM", + "HIGH", + "CRITICAL" + ] + }, + "package_type": { + "description": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)", + "type": "string" + }, + "details": { + "description": "All information about the package to specifically identify this\nvulnerability. One entry per (version range and cpe_uri) the\npackage vulnerability has manifested in.", + "type": "array", + "items": { + "$ref": "#/definitions/Detail" + } + } + } + }, + "Detail": { + "id": "Detail", + "description": "Identifies all occurrences of this vulnerability in the package for a\nspecific distro\/location\nFor example: glibc in cpe:\/o:debian:debian_linux:8 for versions 2.1 - 2.2", + "type": "object", + "properties": { + "cpeUri": { + "description": "The cpe_uri in [cpe format] (https:\/\/cpe.mitre.org\/specification\/) in\nwhich the vulnerability manifests. Examples include distro or storage\nlocation for vulnerable jar.\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "package": { + "description": "The name of the package where the vulnerability was found.\nThis field can be used as a filter in list requests.", + "type": "string" + }, + "minAffectedVersion": { + "description": "The min version of the package in which the vulnerability exists.", + "$ref": "#/definitions/Version" + }, + "maxAffectedVersion": { + "description": "The max version of the package in which the vulnerability exists.\nThis field can be used as a filter in list requests.", + "$ref": "#/definitions/Version" + }, + "severityName": { + "description": "The severity (eg: distro assigned severity) for this vulnerability.", + "type": "string" + }, + "description": { + "description": "A vendor-specific description of this note.", + "type": "string" + }, + "fixedLocation": { + "description": "The fix for this specific package version.", + "$ref": "#/definitions/VulnerabilityLocation" + } + } + }, + "BuildType": { + "id": "BuildType", + "description": "Note holding the version of the provider's builder and the signature of\nthe provenance message in linked BuildDetails.", + "type": "object", + "properties": { + "builderVersion": { + "description": "Version of the builder which produced this Note.", + "type": "string" + }, + "signature": { + "description": "Signature of the build in Occurrences pointing to the Note containing this\nBuilderDetails.", + "$ref": "#/definitions/BuildSignature" + } + } + }, + "BuildSignature": { + "id": "BuildSignature", + "description": "Message encapsulating signature of the verified build", + "type": "object", + "properties": { + "publicKey": { + "description": "Public key of the builder which can be used to verify that related\nFindings are valid and unchanged. If `key_type` is empty this defaults\nto PEM encoded public keys.\n\nThis field may be empty if `key_id` references an external key.\n\nFor Cloud Container Builder based signatures this is a PEM encoded public\nkey. To verify the Cloud Container Builder signature, place the contents of\nthis field into a file (public.pem). The signature field is base64-decoded\ninto its binary representation in signature.bin, and the provenance bytes\nfrom BuildDetails are base64-decoded into a binary representation in\nsigned.bin. OpenSSL can then verify the signature:\n`openssl sha256 -verify public.pem -signature signature.bin signed.bin`", + "type": "string" + }, + "signature": { + "description": "Signature of the related BuildProvenance, encoded in a base64 string.", + "type": "string" + }, + "keyId": { + "description": "An ID for the key used to sign. This could be either an ID for the key\nstored in `public_key` (e.g., the ID or fingerprint for a PGP key, or the\nCN for a cert), or a reference to an external key (e.g., a reference to a\nkey in Cloud KMS).", + "type": "string" + }, + "keyType": { + "description": "The type of the key, either stored in `public_key` or referenced in\n`key_id`", + "type": "string", + "enum": [ + "UNSET", + "PGP_ASCII_ARMORED", + "PKIX_PEM" + ] + } + } + }, + "Basis": { + "id": "Basis", + "description": "Basis describes the base image portion (Note) of the DockerImage\nrelationship. Linked occurrences are derived from this or an\nequivalent image via:\n FROM \nOr an equivalent reference, e.g. a tag of the resource_url.", + "type": "object", + "properties": { + "resourceUrl": { + "description": "The resource_url for the resource representing the basis of\nassociated occurrence images.", + "type": "string" + }, + "fingerprint": { + "description": "The fingerprint of the base image", + "$ref": "#/definitions/Fingerprint" + } + } + }, + "Package": { + "id": "Package", + "description": "This represents a particular package that is distributed over\nvarious channels.\ne.g. glibc (aka libc6) is distributed by many, at various versions.", + "type": "object", + "properties": { + "name": { + "description": "The name of the package.", + "type": "string" + }, + "distribution": { + "description": "The various channels by which a package is distributed.", + "type": "array", + "items": { + "$ref": "#/definitions/Distribution" + } + } + } + }, + "Distribution": { + "id": "Distribution", + "description": "This represents a particular channel of distribution for a given package.\ne.g. Debian's jessie-backports dpkg mirror", + "type": "object", + "properties": { + "cpeUri": { + "description": "The cpe_uri in [cpe format](https:\/\/cpe.mitre.org\/specification\/)\ndenoting the package manager version distributing a package.", + "type": "string" + }, + "architecture": { + "description": "The CPU architecture for which packages in this distribution\nchannel were built", + "type": "string", + "enum": [ + "UNKNOWN", + "X86", + "X64" + ] + }, + "latestVersion": { + "description": "The latest available version of this package in\nthis distribution channel.", + "$ref": "#/definitions/Version" + }, + "maintainer": { + "description": "A freeform string denoting the maintainer of this package.", + "type": "string" + }, + "url": { + "description": "The distribution channel-specific homepage for this package.", + "type": "string" + }, + "description": { + "description": "The distribution channel-specific description of this package.", + "type": "string" + } + } + }, + "Deployable": { + "id": "Deployable", + "description": "An artifact that can be deployed in some runtime.", + "type": "object", + "properties": { + "resourceUri": { + "description": "Resource URI for the artifact being deployed.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Discovery": { + "id": "Discovery", + "description": "Note that indicates a type of analysis and exists in a provider project to\nindicate the status of an analysis on a resource. Absence of an occurrence\nlinked to this note for a resource indicates that analysis hasn't started.", + "type": "object", + "properties": { + "analysisKind": { + "description": "The kind of analysis that is handled by this discovery.", + "type": "string", + "enum": [ + "UNKNOWN", + "CUSTOM", + "PACKAGE_VULNERABILITY", + "BUILD_DETAILS", + "IMAGE_BASIS", + "PACKAGE_MANAGER", + "DEPLOYABLE", + "DISCOVERY", + "ATTESTATION_AUTHORITY" + ] + } + } + }, + "AttestationAuthority": { + "id": "AttestationAuthority", + "description": "Note kind that represents a logical attestation \"role\" or \"authority\". For\nexample, an organization might have one AttestationAuthority for \"QA\" and one\nfor \"build\". This Note is intended to act strictly as a grouping mechanism\nfor the attached Occurrences (Attestations). This grouping mechanism also\nprovides a security boundary, since IAM ACLs gate the ability for a principle\nto attach an Occurrence to a given Note. It also provides a single point of\nlookup to find all attached Attestation Occurrences, even if they don't all\nlive in the same project.", + "type": "object", + "properties": { + "hint": { + + "$ref": "#/definitions/AttestationAuthorityHint" + } + } + }, + "AttestationAuthorityHint": { + "id": "AttestationAuthorityHint", + "description": "This submessage provides human-readable hints about the purpose of the\nAttestationAuthority. Because the name of a Note acts as its resource\nreference, it is important to disambiguate the canonical name of the Note\n(which might be a UUID for security purposes) from \"readable\" names more\nsuitable for debug output. Note that these hints should NOT be used to\nlook up AttestationAuthorities in security sensitive contexts, such as when\nlooking up Attestations to verify.", + "type": "object", + "properties": { + "humanReadableName": { + "description": "The human readable name of this Attestation Authority, e.g. \"qa\".", + "type": "string" + } + } + }, + "RelatedUrl": { + "id": "RelatedUrl", + "description": "Metadata for any related url information", + "type": "object", + "properties": { + "url": { + "description": "Specific url to associate with the note", + "type": "string" + }, + "label": { + "description": "Label to describe usage of the url", + "type": "string" + } + } + }, + "ListNotesResponse": { + "id": "ListNotesResponse", + "description": "Response including listed notes.", + "type": "object", + "properties": { + "notes": { + "description": "The occurrences requested", + "type": "array", + "items": { + "$ref": "#/definitions/Note" + } + }, + "nextPageToken": { + "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more result.", + "type": "string" + } + } + }, + "ListNoteOccurrencesResponse": { + "id": "ListNoteOccurrencesResponse", + "description": "Response including listed occurrences for a note.", + "type": "object", + "properties": { + "occurrences": { + "description": "The occurrences attached to the specified note.", + "type": "array", + "items": { + "$ref": "#/definitions/Occurrence" + } + }, + "nextPageToken": { + "description": "Token to receive the next page of notes.", + "type": "string" + } + } + }, + "CreateOperationRequest": { + "id": "CreateOperationRequest", + "description": "Request for creating an operation", + "type": "object", + "properties": { + "operationId": { + "description": "The ID to use for this operation.", + "type": "string" + }, + "operation": { + "description": "The operation to create.", + "$ref": "#/definitions/Operation" + } + } + }, + "UpdateOperationRequest": { + "id": "UpdateOperationRequest", + "description": "Request for updating an existing operation", + "type": "object", + "properties": { + "operation": { + "description": "The operation to create.", + "$ref": "#/definitions/Operation" + } + } + } + } +} diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go new file mode 100644 index 00000000..ab92f4aa --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go @@ -0,0 +1,5440 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: v1alpha1/proto/grafeas.proto + +/* +Package grafeas is a generated protocol buffer package. + +It is generated from these files: + v1alpha1/proto/grafeas.proto + +It has these top-level messages: + CreateProjectRequest + GetProjectRequest + ListProjectsRequest + DeleteProjectRequest + GetOccurrenceRequest + ListOccurrencesRequest + DeleteOccurrenceRequest + CreateOccurrenceRequest + UpdateOccurrenceRequest + GetNoteRequest + GetOccurrenceNoteRequest + ListNotesRequest + DeleteNoteRequest + CreateNoteRequest + UpdateNoteRequest + ListNoteOccurrencesRequest + ListProjectsResponse + ListNoteOccurrencesResponse + ListNotesResponse + ListOccurrencesResponse + ListOperationsResponse + UpdateOperationRequest + CreateOperationRequest + Project + OperationMetadata + Artifact + AttestationAuthority + BuildDetails + BuildProvenance + BuildSignature + BuildType + Command + Deployable + DockerImage + Discovery + FileHashes + Hash + Note + Occurrence + PackageManager + PgpSignedAttestation + Source + RepoSource + StorageSource + VulnerabilityType + SourceContext + AliasContext + CloudRepoSourceContext + GerritSourceContext + GitSourceContext + RepoId + ProjectRepoId +*/ +package grafeas + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import google_protobuf2 "google.golang.org/genproto/protobuf/field_mask" +import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" +import google_longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Public key formats +type BuildSignature_KeyType int32 + +const ( + // `KeyType` is not set. + BuildSignature_KEY_TYPE_UNSPECIFIED BuildSignature_KeyType = 0 + // `PGP ASCII Armored` public key. + BuildSignature_PGP_ASCII_ARMORED BuildSignature_KeyType = 1 + // `PKIX PEM` public key. + BuildSignature_PKIX_PEM BuildSignature_KeyType = 2 +) + +var BuildSignature_KeyType_name = map[int32]string{ + 0: "KEY_TYPE_UNSPECIFIED", + 1: "PGP_ASCII_ARMORED", + 2: "PKIX_PEM", +} +var BuildSignature_KeyType_value = map[string]int32{ + "KEY_TYPE_UNSPECIFIED": 0, + "PGP_ASCII_ARMORED": 1, + "PKIX_PEM": 2, +} + +func (x BuildSignature_KeyType) String() string { + return proto.EnumName(BuildSignature_KeyType_name, int32(x)) +} +func (BuildSignature_KeyType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{29, 0} } + +// Types of platforms. +type Deployable_Deployment_Platform int32 + +const ( + // Unknown + Deployable_Deployment_PLATFORM_UNSPECIFIED Deployable_Deployment_Platform = 0 + // Google Container Engine + Deployable_Deployment_GKE Deployable_Deployment_Platform = 1 + // Google App Engine: Flexible Environment + Deployable_Deployment_FLEX Deployable_Deployment_Platform = 2 + // Custom user-defined platform + Deployable_Deployment_CUSTOM Deployable_Deployment_Platform = 3 +) + +var Deployable_Deployment_Platform_name = map[int32]string{ + 0: "PLATFORM_UNSPECIFIED", + 1: "GKE", + 2: "FLEX", + 3: "CUSTOM", +} +var Deployable_Deployment_Platform_value = map[string]int32{ + "PLATFORM_UNSPECIFIED": 0, + "GKE": 1, + "FLEX": 2, + "CUSTOM": 3, +} + +func (x Deployable_Deployment_Platform) String() string { + return proto.EnumName(Deployable_Deployment_Platform_name, int32(x)) +} +func (Deployable_Deployment_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{32, 0, 0} +} + +// Instructions from dockerfile +type DockerImage_Layer_Directive int32 + +const ( + // Default value for unsupported/missing directive + DockerImage_Layer_DIRECTIVE_UNSPECIFIED DockerImage_Layer_Directive = 0 + // https://docs.docker.com/reference/builder/#maintainer + DockerImage_Layer_MAINTAINER DockerImage_Layer_Directive = 1 + // https://docs.docker.com/reference/builder/#run + DockerImage_Layer_RUN DockerImage_Layer_Directive = 2 + // https://docs.docker.com/reference/builder/#cmd + DockerImage_Layer_CMD DockerImage_Layer_Directive = 3 + // https://docs.docker.com/reference/builder/#label + DockerImage_Layer_LABEL DockerImage_Layer_Directive = 4 + // https://docs.docker.com/reference/builder/#expose + DockerImage_Layer_EXPOSE DockerImage_Layer_Directive = 5 + // https://docs.docker.com/reference/builder/#env + DockerImage_Layer_ENV DockerImage_Layer_Directive = 6 + // https://docs.docker.com/reference/builder/#add + DockerImage_Layer_ADD DockerImage_Layer_Directive = 7 + // https://docs.docker.com/reference/builder/#copy + DockerImage_Layer_COPY DockerImage_Layer_Directive = 8 + // https://docs.docker.com/reference/builder/#entrypoint + DockerImage_Layer_ENTRYPOINT DockerImage_Layer_Directive = 9 + // https://docs.docker.com/reference/builder/#volume + DockerImage_Layer_VOLUME DockerImage_Layer_Directive = 10 + // https://docs.docker.com/reference/builder/#user + DockerImage_Layer_USER DockerImage_Layer_Directive = 11 + // https://docs.docker.com/reference/builder/#workdir + DockerImage_Layer_WORKDIR DockerImage_Layer_Directive = 12 + // https://docs.docker.com/reference/builder/#arg + DockerImage_Layer_ARG DockerImage_Layer_Directive = 13 + // https://docs.docker.com/reference/builder/#onbuild + DockerImage_Layer_ONBUILD DockerImage_Layer_Directive = 14 + // https://docs.docker.com/reference/builder/#stopsignal + DockerImage_Layer_STOPSIGNAL DockerImage_Layer_Directive = 15 + // https://docs.docker.com/reference/builder/#healthcheck + DockerImage_Layer_HEALTHCHECK DockerImage_Layer_Directive = 16 + // https://docs.docker.com/reference/builder/#shell + DockerImage_Layer_SHELL DockerImage_Layer_Directive = 17 +) + +var DockerImage_Layer_Directive_name = map[int32]string{ + 0: "DIRECTIVE_UNSPECIFIED", + 1: "MAINTAINER", + 2: "RUN", + 3: "CMD", + 4: "LABEL", + 5: "EXPOSE", + 6: "ENV", + 7: "ADD", + 8: "COPY", + 9: "ENTRYPOINT", + 10: "VOLUME", + 11: "USER", + 12: "WORKDIR", + 13: "ARG", + 14: "ONBUILD", + 15: "STOPSIGNAL", + 16: "HEALTHCHECK", + 17: "SHELL", +} +var DockerImage_Layer_Directive_value = map[string]int32{ + "DIRECTIVE_UNSPECIFIED": 0, + "MAINTAINER": 1, + "RUN": 2, + "CMD": 3, + "LABEL": 4, + "EXPOSE": 5, + "ENV": 6, + "ADD": 7, + "COPY": 8, + "ENTRYPOINT": 9, + "VOLUME": 10, + "USER": 11, + "WORKDIR": 12, + "ARG": 13, + "ONBUILD": 14, + "STOPSIGNAL": 15, + "HEALTHCHECK": 16, + "SHELL": 17, +} + +func (x DockerImage_Layer_Directive) String() string { + return proto.EnumName(DockerImage_Layer_Directive_name, int32(x)) +} +func (DockerImage_Layer_Directive) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{33, 0, 0} +} + +// Specifies the hash algorithm, if any. +type Hash_HashType int32 + +const ( + // No hash requested. + Hash_NONE Hash_HashType = 0 + // A sha256 hash. + Hash_SHA256 Hash_HashType = 1 +) + +var Hash_HashType_name = map[int32]string{ + 0: "NONE", + 1: "SHA256", +} +var Hash_HashType_value = map[string]int32{ + "NONE": 0, + "SHA256": 1, +} + +func (x Hash_HashType) String() string { + return proto.EnumName(Hash_HashType_name, int32(x)) +} +func (Hash_HashType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{36, 0} } + +// This must be 1:1 with members of our oneofs, it can be used for filtering +// Note and Occurrence on their kind. +type Note_Kind int32 + +const ( + // Unknown + Note_KIND_UNSPECIFIED Note_Kind = 0 + // The note and occurrence represent a package vulnerability. + Note_PACKAGE_VULNERABILITY Note_Kind = 2 + // The note and occurrence assert build provenance. + Note_BUILD_DETAILS Note_Kind = 3 + // This represents an image basis relationship. + Note_IMAGE_BASIS Note_Kind = 4 + // This represents a package installed via a package manager. + Note_PACKAGE_MANAGER Note_Kind = 5 + // The note and occurrence track deployment events. + Note_DEPLOYABLE Note_Kind = 6 + // The note and occurrence track the initial discovery status of a resource. + Note_DISCOVERY Note_Kind = 7 +) + +var Note_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 2: "PACKAGE_VULNERABILITY", + 3: "BUILD_DETAILS", + 4: "IMAGE_BASIS", + 5: "PACKAGE_MANAGER", + 6: "DEPLOYABLE", + 7: "DISCOVERY", +} +var Note_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "PACKAGE_VULNERABILITY": 2, + "BUILD_DETAILS": 3, + "IMAGE_BASIS": 4, + "PACKAGE_MANAGER": 5, + "DEPLOYABLE": 6, + "DISCOVERY": 7, +} + +func (x Note_Kind) String() string { + return proto.EnumName(Note_Kind_name, int32(x)) +} +func (Note_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{37, 0} } + +// Instruction set architectures supported by various package managers. +type PackageManager_Architecture int32 + +const ( + // Unknown architecture + PackageManager_ARCHITECTURE_UNSPECIFIED PackageManager_Architecture = 0 + // X86 architecture + PackageManager_X86 PackageManager_Architecture = 1 + // X64 architecture + PackageManager_X64 PackageManager_Architecture = 2 +) + +var PackageManager_Architecture_name = map[int32]string{ + 0: "ARCHITECTURE_UNSPECIFIED", + 1: "X86", + 2: "X64", +} +var PackageManager_Architecture_value = map[string]int32{ + "ARCHITECTURE_UNSPECIFIED": 0, + "X86": 1, + "X64": 2, +} + +func (x PackageManager_Architecture) String() string { + return proto.EnumName(PackageManager_Architecture_name, int32(x)) +} +func (PackageManager_Architecture) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{39, 0} +} + +// Type (e.g. schema) of the attestation payload that was signed. +type PgpSignedAttestation_ContentType int32 + +const ( + // ContentType is not set. + PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED PgpSignedAttestation_ContentType = 0 + // Atomic format attestation signature. See + // https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md + // The payload extracted from `signature` is a JSON blob conforming to the + // linked schema. + PgpSignedAttestation_SIMPLE_SIGNING_JSON PgpSignedAttestation_ContentType = 1 +) + +var PgpSignedAttestation_ContentType_name = map[int32]string{ + 0: "CONTENT_TYPE_UNSPECIFIED", + 1: "SIMPLE_SIGNING_JSON", +} +var PgpSignedAttestation_ContentType_value = map[string]int32{ + "CONTENT_TYPE_UNSPECIFIED": 0, + "SIMPLE_SIGNING_JSON": 1, +} + +func (x PgpSignedAttestation_ContentType) String() string { + return proto.EnumName(PgpSignedAttestation_ContentType_name, int32(x)) +} +func (PgpSignedAttestation_ContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{40, 0} +} + +// Note provider-assigned severity/impact ranking +type VulnerabilityType_Severity int32 + +const ( + // Unknown Impact + VulnerabilityType_SEVERITY_UNSPECIFIED VulnerabilityType_Severity = 0 + // Minimal Impact + VulnerabilityType_MINIMAL VulnerabilityType_Severity = 1 + // Low Impact + VulnerabilityType_LOW VulnerabilityType_Severity = 2 + // Medium Impact + VulnerabilityType_MEDIUM VulnerabilityType_Severity = 3 + // High Impact + VulnerabilityType_HIGH VulnerabilityType_Severity = 4 + // Critical Impact + VulnerabilityType_CRITICAL VulnerabilityType_Severity = 5 +) + +var VulnerabilityType_Severity_name = map[int32]string{ + 0: "SEVERITY_UNSPECIFIED", + 1: "MINIMAL", + 2: "LOW", + 3: "MEDIUM", + 4: "HIGH", + 5: "CRITICAL", +} +var VulnerabilityType_Severity_value = map[string]int32{ + "SEVERITY_UNSPECIFIED": 0, + "MINIMAL": 1, + "LOW": 2, + "MEDIUM": 3, + "HIGH": 4, + "CRITICAL": 5, +} + +func (x VulnerabilityType_Severity) String() string { + return proto.EnumName(VulnerabilityType_Severity_name, int32(x)) +} +func (VulnerabilityType_Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{44, 0} +} + +// Whether this is an ordinary package version or a +// sentinel MIN/MAX version. +type VulnerabilityType_Version_VersionKind int32 + +const ( + // A standard package version, defined by the other fields. + VulnerabilityType_Version_NORMAL VulnerabilityType_Version_VersionKind = 0 + // A special version representing negative infinity, + // other fields are ignored. + VulnerabilityType_Version_MINIMUM VulnerabilityType_Version_VersionKind = 1 + // A special version representing positive infinity, + // other fields are ignored. + VulnerabilityType_Version_MAXIMUM VulnerabilityType_Version_VersionKind = 2 +) + +var VulnerabilityType_Version_VersionKind_name = map[int32]string{ + 0: "NORMAL", + 1: "MINIMUM", + 2: "MAXIMUM", +} +var VulnerabilityType_Version_VersionKind_value = map[string]int32{ + "NORMAL": 0, + "MINIMUM": 1, + "MAXIMUM": 2, +} + +func (x VulnerabilityType_Version_VersionKind) String() string { + return proto.EnumName(VulnerabilityType_Version_VersionKind_name, int32(x)) +} +func (VulnerabilityType_Version_VersionKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{44, 0, 0} +} + +// The type of an alias. +type AliasContext_Kind int32 + +const ( + // Unknown. + AliasContext_KIND_UNSPECIFIED AliasContext_Kind = 0 + // Git tag. + AliasContext_FIXED AliasContext_Kind = 1 + // Git branch. + AliasContext_MOVABLE AliasContext_Kind = 2 + // Used to specify non-standard aliases. For example, if a Git repo has a + // ref named "refs/foo/bar". + AliasContext_OTHER AliasContext_Kind = 4 +) + +var AliasContext_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "FIXED", + 2: "MOVABLE", + 4: "OTHER", +} +var AliasContext_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "FIXED": 1, + "MOVABLE": 2, + "OTHER": 4, +} + +func (x AliasContext_Kind) String() string { + return proto.EnumName(AliasContext_Kind_name, int32(x)) +} +func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{46, 0} } + +// Request to insert a new Project. +type CreateProjectRequest struct { + // The name of the project of the form + // "projects/{project_id}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *CreateProjectRequest) Reset() { *m = CreateProjectRequest{} } +func (m *CreateProjectRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProjectRequest) ProtoMessage() {} +func (*CreateProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *CreateProjectRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to get a Project. +type GetProjectRequest struct { + // The name of the project of the form + // "projects/{project_id}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *GetProjectRequest) Reset() { *m = GetProjectRequest{} } +func (m *GetProjectRequest) String() string { return proto.CompactTextString(m) } +func (*GetProjectRequest) ProtoMessage() {} +func (*GetProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *GetProjectRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list projects. +type ListProjectsRequest struct { + // The filter expression. + Filter string `protobuf:"bytes,1,opt,name=filter" json:"filter,omitempty"` + // Number of projects to return in the list. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` +} + +func (m *ListProjectsRequest) Reset() { *m = ListProjectsRequest{} } +func (m *ListProjectsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProjectsRequest) ProtoMessage() {} +func (*ListProjectsRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *ListProjectsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListProjectsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProjectsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Request to delete a project +type DeleteProjectRequest struct { + // The name of the project of the form + // "projects/{project_id}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *DeleteProjectRequest) Reset() { *m = DeleteProjectRequest{} } +func (m *DeleteProjectRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProjectRequest) ProtoMessage() {} +func (*DeleteProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *DeleteProjectRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to get a Occurrenceg. +type GetOccurrenceRequest struct { + // The name of the occurrence of the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *GetOccurrenceRequest) Reset() { *m = GetOccurrenceRequest{} } +func (m *GetOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceRequest) ProtoMessage() {} +func (*GetOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *GetOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list occurrences. +type ListOccurrencesRequest struct { + // This contains the project Id for example: projects/{project_id}. + Parent string `protobuf:"bytes,5,opt,name=parent" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter" json:"filter,omitempty"` + // Number of occurrences to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` +} + +func (m *ListOccurrencesRequest) Reset() { *m = ListOccurrencesRequest{} } +func (m *ListOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesRequest) ProtoMessage() {} +func (*ListOccurrencesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *ListOccurrencesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Request to delete a occurrence +type DeleteOccurrenceRequest struct { + // The name of the occurrence in the form of + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *DeleteOccurrenceRequest) Reset() { *m = DeleteOccurrenceRequest{} } +func (m *DeleteOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteOccurrenceRequest) ProtoMessage() {} +func (*DeleteOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *DeleteOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to insert a new occurrence. +type CreateOccurrenceRequest struct { + // This field contains the project Id for example: "projects/{project_id}" + Parent string `protobuf:"bytes,3,opt,name=parent" json:"parent,omitempty"` + // The occurrence to be inserted + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence" json:"occurrence,omitempty"` +} + +func (m *CreateOccurrenceRequest) Reset() { *m = CreateOccurrenceRequest{} } +func (m *CreateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOccurrenceRequest) ProtoMessage() {} +func (*CreateOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *CreateOccurrenceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +// Request to update an existing occurrence +type UpdateOccurrenceRequest struct { + // The name of the occurrence. + // Should be of the form "projects/{project_id}/occurrences/{OCCURRENCE_ID}". + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The updated occurrence. + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence" json:"occurrence,omitempty"` + // The fields to update. + UpdateMask *google_protobuf2.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` +} + +func (m *UpdateOccurrenceRequest) Reset() { *m = UpdateOccurrenceRequest{} } +func (m *UpdateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOccurrenceRequest) ProtoMessage() {} +func (*UpdateOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *UpdateOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +func (m *UpdateOccurrenceRequest) GetUpdateMask() *google_protobuf2.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to get a Note. +type GetNoteRequest struct { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *GetNoteRequest) Reset() { *m = GetNoteRequest{} } +func (m *GetNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetNoteRequest) ProtoMessage() {} +func (*GetNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *GetNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to get the note to which this occurrence is attached. +type GetOccurrenceNoteRequest struct { + // The name of the occurrence in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *GetOccurrenceNoteRequest) Reset() { *m = GetOccurrenceNoteRequest{} } +func (m *GetOccurrenceNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceNoteRequest) ProtoMessage() {} +func (*GetOccurrenceNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *GetOccurrenceNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list notes. +type ListNotesRequest struct { + // This field contains the project ID for example: "projects/{project_id}". + Parent string `protobuf:"bytes,5,opt,name=parent" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter" json:"filter,omitempty"` + // Number of notes to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` +} + +func (m *ListNotesRequest) Reset() { *m = ListNotesRequest{} } +func (m *ListNotesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotesRequest) ProtoMessage() {} +func (*ListNotesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *ListNotesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListNotesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNotesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNotesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Request to delete a note +type DeleteNoteRequest struct { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *DeleteNoteRequest) Reset() { *m = DeleteNoteRequest{} } +func (m *DeleteNoteRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNoteRequest) ProtoMessage() {} +func (*DeleteNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *DeleteNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to insert a new note +type CreateNoteRequest struct { + // This field contains the project Id for example: + // "project/{project_id} + Parent string `protobuf:"bytes,4,opt,name=parent" json:"parent,omitempty"` + // The ID to use for this note. + NoteId string `protobuf:"bytes,2,opt,name=note_id,json=noteId" json:"note_id,omitempty"` + // The Note to be inserted + Note *Note `protobuf:"bytes,3,opt,name=note" json:"note,omitempty"` +} + +func (m *CreateNoteRequest) Reset() { *m = CreateNoteRequest{} } +func (m *CreateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNoteRequest) ProtoMessage() {} +func (*CreateNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +func (m *CreateNoteRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateNoteRequest) GetNoteId() string { + if m != nil { + return m.NoteId + } + return "" +} + +func (m *CreateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +// Request to update an existing note +type UpdateNoteRequest struct { + // The name of the note. + // Should be of the form "projects/{provider_id}/notes/{note_id}". + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The updated note. + Note *Note `protobuf:"bytes,2,opt,name=note" json:"note,omitempty"` + // The fields to update. + UpdateMask *google_protobuf2.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` +} + +func (m *UpdateNoteRequest) Reset() { *m = UpdateNoteRequest{} } +func (m *UpdateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNoteRequest) ProtoMessage() {} +func (*UpdateNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *UpdateNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +func (m *UpdateNoteRequest) GetUpdateMask() *google_protobuf2.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to list occurrences. +type ListNoteOccurrencesRequest struct { + // The name field will contain the note name for example: + // "provider/{provider_id}/notes/{note_id}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter" json:"filter,omitempty"` + // Number of notes to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` +} + +func (m *ListNoteOccurrencesRequest) Reset() { *m = ListNoteOccurrencesRequest{} } +func (m *ListNoteOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesRequest) ProtoMessage() {} +func (*ListNoteOccurrencesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +func (m *ListNoteOccurrencesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNoteOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response including listed projects +type ListProjectsResponse struct { + // The projects requested. + Projects []*Project `protobuf:"bytes,1,rep,name=projects" json:"projects,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` +} + +func (m *ListProjectsResponse) Reset() { *m = ListProjectsResponse{} } +func (m *ListProjectsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProjectsResponse) ProtoMessage() {} +func (*ListProjectsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +func (m *ListProjectsResponse) GetProjects() []*Project { + if m != nil { + return m.Projects + } + return nil +} + +func (m *ListProjectsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Response including listed occurrences for a note. +type ListNoteOccurrencesResponse struct { + // The occurrences attached to the specified note. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences" json:"occurrences,omitempty"` + // Token to receive the next page of notes. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` +} + +func (m *ListNoteOccurrencesResponse) Reset() { *m = ListNoteOccurrencesResponse{} } +func (m *ListNoteOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesResponse) ProtoMessage() {} +func (*ListNoteOccurrencesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *ListNoteOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListNoteOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Response including listed notes. +type ListNotesResponse struct { + // The occurrences requested + Notes []*Note `protobuf:"bytes,1,rep,name=notes" json:"notes,omitempty"` + // The next pagination token in the list response. It should be used as + // page_token for the following request. An empty value means no more result. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` +} + +func (m *ListNotesResponse) Reset() { *m = ListNotesResponse{} } +func (m *ListNotesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNotesResponse) ProtoMessage() {} +func (*ListNotesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *ListNotesResponse) GetNotes() []*Note { + if m != nil { + return m.Notes + } + return nil +} + +func (m *ListNotesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Response including listed active occurrences. +type ListOccurrencesResponse struct { + // The occurrences requested. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences" json:"occurrences,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` +} + +func (m *ListOccurrencesResponse) Reset() { *m = ListOccurrencesResponse{} } +func (m *ListOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesResponse) ProtoMessage() {} +func (*ListOccurrencesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *ListOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Response including listed operations. +type ListOperationsResponse struct { + // The next pagination token in the List response. It should be used as + // page_token for the following request. An empty value means no more results. + NextPageToken string `protobuf:"bytes,1,opt,name=nextPageToken" json:"nextPageToken,omitempty"` + // The operations requested. + Operations []*google_longrunning.Operation `protobuf:"bytes,2,rep,name=operations" json:"operations,omitempty"` +} + +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *ListOperationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListOperationsResponse) GetOperations() []*google_longrunning.Operation { + if m != nil { + return m.Operations + } + return nil +} + +// Request for updating an existing operation +type UpdateOperationRequest struct { + // The name of the Operation. + // Should be of the form "projects/{provider_id}/operations/{operation_id}". + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The operation to create. + Operation *google_longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` +} + +func (m *UpdateOperationRequest) Reset() { *m = UpdateOperationRequest{} } +func (m *UpdateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOperationRequest) ProtoMessage() {} +func (*UpdateOperationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +func (m *UpdateOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateOperationRequest) GetOperation() *google_longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +// Request for creating an operation +type CreateOperationRequest struct { + // The projectId that this operation should be created under. + Parent string `protobuf:"bytes,1,opt,name=parent" json:"parent,omitempty"` + // The ID to use for this operation. + OperationId string `protobuf:"bytes,2,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` + // The operation to create. + Operation *google_longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` +} + +func (m *CreateOperationRequest) Reset() { *m = CreateOperationRequest{} } +func (m *CreateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOperationRequest) ProtoMessage() {} +func (*CreateOperationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +func (m *CreateOperationRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CreateOperationRequest) GetOperation() *google_longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +// Provides detailed description of a `Project`. +type Project struct { + // The name of the project of the form + // "projects/{project_id}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *Project) Reset() { *m = Project{} } +func (m *Project) String() string { return proto.CompactTextString(m) } +func (*Project) ProtoMessage() {} +func (*Project) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *Project) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Metadata for all operations used and required for all operations +// that created by Container Analysis Providers +type OperationMetadata struct { + // Output only. The time this operation was created. + CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + // Output only. The time that this operation was marked completed or failed. + EndTime *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *OperationMetadata) GetCreateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetEndTime() *google_protobuf3.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Artifact describes a build product. +type Artifact struct { + // Name of the artifact. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. + // + // This field is deprecated in favor of the plural `names` field; it continues + // to exist here to allow existing BuildProvenance serialized to json in + // google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to + // deserialize back into proto. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a + // container. + Checksum string `protobuf:"bytes,2,opt,name=checksum" json:"checksum,omitempty"` + // Artifact ID, if any; for container images, this will be a URL by digest + // like gcr.io/projectID/imagename@sha256:123456 + Id string `protobuf:"bytes,3,opt,name=id" json:"id,omitempty"` + // Related artifact names. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. Note that a + // single Artifact ID can have multiple names, for example if two tags are + // applied to one image. + Names []string `protobuf:"bytes,4,rep,name=names" json:"names,omitempty"` +} + +func (m *Artifact) Reset() { *m = Artifact{} } +func (m *Artifact) String() string { return proto.CompactTextString(m) } +func (*Artifact) ProtoMessage() {} +func (*Artifact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *Artifact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Artifact) GetChecksum() string { + if m != nil { + return m.Checksum + } + return "" +} + +func (m *Artifact) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Artifact) GetNames() []string { + if m != nil { + return m.Names + } + return nil +} + +// Note kind that represents a logical attestation "role" or "authority". For +// example, an organization might have one AttestationAuthority for "QA" and one +// for "build". This Note is intended to act strictly as a grouping mechanism +// for the attached Occurrences (Attestations). This grouping mechanism also +// provides a security boundary and provides a single point of lookup to find +// all attached Attestation Occurrences, even if they don't all live in the same +// project. +type AttestationAuthority struct { + Hint *AttestationAuthority_AttestationAuthorityHint `protobuf:"bytes,1,opt,name=hint" json:"hint,omitempty"` +} + +func (m *AttestationAuthority) Reset() { *m = AttestationAuthority{} } +func (m *AttestationAuthority) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority) ProtoMessage() {} +func (*AttestationAuthority) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *AttestationAuthority) GetHint() *AttestationAuthority_AttestationAuthorityHint { + if m != nil { + return m.Hint + } + return nil +} + +type AttestationAuthority_AttestationAuthorityHint struct { + // The human readable name of this Attestation Authority, e.g. "qa". + HumanReadableName string `protobuf:"bytes,1,opt,name=human_readable_name,json=humanReadableName" json:"human_readable_name,omitempty"` +} + +func (m *AttestationAuthority_AttestationAuthorityHint) Reset() { + *m = AttestationAuthority_AttestationAuthorityHint{} +} +func (m *AttestationAuthority_AttestationAuthorityHint) String() string { + return proto.CompactTextString(m) +} +func (*AttestationAuthority_AttestationAuthorityHint) ProtoMessage() {} +func (*AttestationAuthority_AttestationAuthorityHint) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{26, 0} +} + +func (m *AttestationAuthority_AttestationAuthorityHint) GetHumanReadableName() string { + if m != nil { + return m.HumanReadableName + } + return "" +} + +type AttestationAuthority_Attestation struct { + // The signature, generally over the `resource_url`, that verifies this + // attestation. The semantics of the signature veracity are ultimately + // determined by the verification engine. + // + // Types that are valid to be assigned to Signature: + // *AttestationAuthority_Attestation_PgpSignedAttestation + Signature isAttestationAuthority_Attestation_Signature `protobuf_oneof:"signature"` +} + +func (m *AttestationAuthority_Attestation) Reset() { *m = AttestationAuthority_Attestation{} } +func (m *AttestationAuthority_Attestation) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority_Attestation) ProtoMessage() {} +func (*AttestationAuthority_Attestation) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{26, 1} +} + +type isAttestationAuthority_Attestation_Signature interface { + isAttestationAuthority_Attestation_Signature() +} + +type AttestationAuthority_Attestation_PgpSignedAttestation struct { + PgpSignedAttestation *PgpSignedAttestation `protobuf:"bytes,1,opt,name=pgp_signed_attestation,json=pgpSignedAttestation,oneof"` +} + +func (*AttestationAuthority_Attestation_PgpSignedAttestation) isAttestationAuthority_Attestation_Signature() { +} + +func (m *AttestationAuthority_Attestation) GetSignature() isAttestationAuthority_Attestation_Signature { + if m != nil { + return m.Signature + } + return nil +} + +func (m *AttestationAuthority_Attestation) GetPgpSignedAttestation() *PgpSignedAttestation { + if x, ok := m.GetSignature().(*AttestationAuthority_Attestation_PgpSignedAttestation); ok { + return x.PgpSignedAttestation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AttestationAuthority_Attestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttestationAuthority_Attestation_OneofMarshaler, _AttestationAuthority_Attestation_OneofUnmarshaler, _AttestationAuthority_Attestation_OneofSizer, []interface{}{ + (*AttestationAuthority_Attestation_PgpSignedAttestation)(nil), + } +} + +func _AttestationAuthority_Attestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttestationAuthority_Attestation) + // signature + switch x := m.Signature.(type) { + case *AttestationAuthority_Attestation_PgpSignedAttestation: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PgpSignedAttestation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AttestationAuthority_Attestation.Signature has unexpected type %T", x) + } + return nil +} + +func _AttestationAuthority_Attestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttestationAuthority_Attestation) + switch tag { + case 1: // signature.pgp_signed_attestation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PgpSignedAttestation) + err := b.DecodeMessage(msg) + m.Signature = &AttestationAuthority_Attestation_PgpSignedAttestation{msg} + return true, err + default: + return false, nil + } +} + +func _AttestationAuthority_Attestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttestationAuthority_Attestation) + // signature + switch x := m.Signature.(type) { + case *AttestationAuthority_Attestation_PgpSignedAttestation: + s := proto.Size(x.PgpSignedAttestation) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message encapsulating build provenance details. +type BuildDetails struct { + // The actual provenance + Provenance *BuildProvenance `protobuf:"bytes,1,opt,name=provenance" json:"provenance,omitempty"` + // Serialized JSON representation of the provenance, used in generating the + // `BuildSignature` in the corresponding Result. After verifying the + // signature, `provenance_bytes` can be unmarshalled and compared to the + // provenance to confirm that it is unchanged. A base64-encoded string + // representation of the provenance bytes is used for the signature in order + // to interoperate with openssl which expects this format for signature + // verification. + // + // The serialized form is captured both to avoid ambiguity in how the + // provenance is marshalled to json as well to prevent incompatibilities with + // future changes. + ProvenanceBytes string `protobuf:"bytes,2,opt,name=provenance_bytes,json=provenanceBytes" json:"provenance_bytes,omitempty"` +} + +func (m *BuildDetails) Reset() { *m = BuildDetails{} } +func (m *BuildDetails) String() string { return proto.CompactTextString(m) } +func (*BuildDetails) ProtoMessage() {} +func (*BuildDetails) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } + +func (m *BuildDetails) GetProvenance() *BuildProvenance { + if m != nil { + return m.Provenance + } + return nil +} + +func (m *BuildDetails) GetProvenanceBytes() string { + if m != nil { + return m.ProvenanceBytes + } + return "" +} + +// Provenance of a build. Contains all information needed to verify the full +// details about the build from source to completion. +type BuildProvenance struct { + // Unique identifier of the build. + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + // ID of the project. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId" json:"project_id,omitempty"` + // Commands requested by the build. + Commands []*Command `protobuf:"bytes,5,rep,name=commands" json:"commands,omitempty"` + // Output of the build. + BuiltArtifacts []*Artifact `protobuf:"bytes,6,rep,name=built_artifacts,json=builtArtifacts" json:"built_artifacts,omitempty"` + // Time at which the build was created. + CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + // Time at which execution of the build was started. + StartTime *google_protobuf3.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + // Time at which execution of the build was finished. + FinishTime *google_protobuf3.Timestamp `protobuf:"bytes,9,opt,name=finish_time,json=finishTime" json:"finish_time,omitempty"` + // E-mail address of the user who initiated this build. Note that this was the + // user's e-mail address at the time the build was initiated; this address may + // not represent the same end-user for all time. + Creator string `protobuf:"bytes,11,opt,name=creator" json:"creator,omitempty"` + // Google Cloud Storage bucket where logs were written. + LogsBucket string `protobuf:"bytes,13,opt,name=logs_bucket,json=logsBucket" json:"logs_bucket,omitempty"` + // Details of the Source input to the build. + SourceProvenance *Source `protobuf:"bytes,14,opt,name=source_provenance,json=sourceProvenance" json:"source_provenance,omitempty"` + // Trigger identifier if the build was triggered automatically; empty if not. + TriggerId string `protobuf:"bytes,15,opt,name=trigger_id,json=triggerId" json:"trigger_id,omitempty"` + // Special options applied to this build. This is a catch-all field where + // build providers can enter any desired additional details. + BuildOptions map[string]string `protobuf:"bytes,16,rep,name=build_options,json=buildOptions" json:"build_options,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // Version string of the builder at the time this build was executed. + BuilderVersion string `protobuf:"bytes,17,opt,name=builder_version,json=builderVersion" json:"builder_version,omitempty"` +} + +func (m *BuildProvenance) Reset() { *m = BuildProvenance{} } +func (m *BuildProvenance) String() string { return proto.CompactTextString(m) } +func (*BuildProvenance) ProtoMessage() {} +func (*BuildProvenance) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } + +func (m *BuildProvenance) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *BuildProvenance) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BuildProvenance) GetCommands() []*Command { + if m != nil { + return m.Commands + } + return nil +} + +func (m *BuildProvenance) GetBuiltArtifacts() []*Artifact { + if m != nil { + return m.BuiltArtifacts + } + return nil +} + +func (m *BuildProvenance) GetCreateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *BuildProvenance) GetStartTime() *google_protobuf3.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *BuildProvenance) GetFinishTime() *google_protobuf3.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +func (m *BuildProvenance) GetCreator() string { + if m != nil { + return m.Creator + } + return "" +} + +func (m *BuildProvenance) GetLogsBucket() string { + if m != nil { + return m.LogsBucket + } + return "" +} + +func (m *BuildProvenance) GetSourceProvenance() *Source { + if m != nil { + return m.SourceProvenance + } + return nil +} + +func (m *BuildProvenance) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +func (m *BuildProvenance) GetBuildOptions() map[string]string { + if m != nil { + return m.BuildOptions + } + return nil +} + +func (m *BuildProvenance) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +// Message encapsulating the signature of the verified build. +type BuildSignature struct { + // Public key of the builder which can be used to verify that the related + // findings are valid and unchanged. If `key_type` is empty, this defaults + // to PEM encoded public keys. + // + // This field may be empty if `key_id` references an external key. + // + // For Cloud Container Builder based signatures, this is a PEM encoded public + // key. To verify the Cloud Container Builder signature, place the contents of + // this field into a file (public.pem). The signature field is base64-decoded + // into its binary representation in signature.bin, and the provenance bytes + // from `BuildDetails` are base64-decoded into a binary representation in + // signed.bin. OpenSSL can then verify the signature: + // `openssl sha256 -verify public.pem -signature signature.bin signed.bin` + PublicKey string `protobuf:"bytes,1,opt,name=public_key,json=publicKey" json:"public_key,omitempty"` + // Signature of the related `BuildProvenance`, encoded in a base64 string. + Signature string `protobuf:"bytes,2,opt,name=signature" json:"signature,omitempty"` + // An Id for the key used to sign. This could be either an Id for the key + // stored in `public_key` (such as the Id or fingerprint for a PGP key, or the + // CN for a cert), or a reference to an external key (such as a reference to a + // key in Cloud Key Management Service). + KeyId string `protobuf:"bytes,3,opt,name=key_id,json=keyId" json:"key_id,omitempty"` + // The type of the key, either stored in `public_key` or referenced in + // `key_id` + KeyType BuildSignature_KeyType `protobuf:"varint,4,opt,name=key_type,json=keyType,enum=grafeas.v1alpha1.api.BuildSignature_KeyType" json:"key_type,omitempty"` +} + +func (m *BuildSignature) Reset() { *m = BuildSignature{} } +func (m *BuildSignature) String() string { return proto.CompactTextString(m) } +func (*BuildSignature) ProtoMessage() {} +func (*BuildSignature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } + +func (m *BuildSignature) GetPublicKey() string { + if m != nil { + return m.PublicKey + } + return "" +} + +func (m *BuildSignature) GetSignature() string { + if m != nil { + return m.Signature + } + return "" +} + +func (m *BuildSignature) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *BuildSignature) GetKeyType() BuildSignature_KeyType { + if m != nil { + return m.KeyType + } + return BuildSignature_KEY_TYPE_UNSPECIFIED +} + +// Note holding the version of the provider's builder and the signature of +// the provenance message in linked BuildDetails. +type BuildType struct { + // Version of the builder which produced this Note. + BuilderVersion string `protobuf:"bytes,1,opt,name=builder_version,json=builderVersion" json:"builder_version,omitempty"` + // Signature of the build in Occurrences pointing to the Note containing this + // `BuilderDetails`. + Signature *BuildSignature `protobuf:"bytes,2,opt,name=signature" json:"signature,omitempty"` +} + +func (m *BuildType) Reset() { *m = BuildType{} } +func (m *BuildType) String() string { return proto.CompactTextString(m) } +func (*BuildType) ProtoMessage() {} +func (*BuildType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } + +func (m *BuildType) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +func (m *BuildType) GetSignature() *BuildSignature { + if m != nil { + return m.Signature + } + return nil +} + +// Command describes a step performed as part of the build pipeline. +type Command struct { + // Name of the command, as presented on the command line, or if the command is + // packaged as a Docker container, as presented to `docker pull`. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Environment variables set before running this Command. + Env []string `protobuf:"bytes,2,rep,name=env" json:"env,omitempty"` + // Command-line arguments used when executing this Command. + Args []string `protobuf:"bytes,3,rep,name=args" json:"args,omitempty"` + // Working directory (relative to project source root) used when running + // this Command. + Dir string `protobuf:"bytes,4,opt,name=dir" json:"dir,omitempty"` + // Optional unique identifier for this Command, used in wait_for to reference + // this Command as a dependency. + Id string `protobuf:"bytes,5,opt,name=id" json:"id,omitempty"` + // The ID(s) of the Command(s) that this Command depends on. + WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor" json:"wait_for,omitempty"` +} + +func (m *Command) Reset() { *m = Command{} } +func (m *Command) String() string { return proto.CompactTextString(m) } +func (*Command) ProtoMessage() {} +func (*Command) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{31} } + +func (m *Command) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Command) GetEnv() []string { + if m != nil { + return m.Env + } + return nil +} + +func (m *Command) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *Command) GetDir() string { + if m != nil { + return m.Dir + } + return "" +} + +func (m *Command) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Command) GetWaitFor() []string { + if m != nil { + return m.WaitFor + } + return nil +} + +// An artifact that can be deployed in some runtime. +type Deployable struct { + // Resource URI for the artifact being deployed. + ResourceUri []string `protobuf:"bytes,1,rep,name=resource_uri,json=resourceUri" json:"resource_uri,omitempty"` +} + +func (m *Deployable) Reset() { *m = Deployable{} } +func (m *Deployable) String() string { return proto.CompactTextString(m) } +func (*Deployable) ProtoMessage() {} +func (*Deployable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32} } + +func (m *Deployable) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +// The period during which some deployable was active in a runtime. +type Deployable_Deployment struct { + // Identity of the user that triggered this deployment. + UserEmail string `protobuf:"bytes,1,opt,name=user_email,json=userEmail" json:"user_email,omitempty"` + // Beginning of the lifetime of this deployment. + DeployTime *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=deploy_time,json=deployTime" json:"deploy_time,omitempty"` + // End of the lifetime of this deployment. + UndeployTime *google_protobuf3.Timestamp `protobuf:"bytes,3,opt,name=undeploy_time,json=undeployTime" json:"undeploy_time,omitempty"` + // Configuration used to create this deployment. + Config string `protobuf:"bytes,8,opt,name=config" json:"config,omitempty"` + // Address of the runtime element hosting this deployment. + Address string `protobuf:"bytes,5,opt,name=address" json:"address,omitempty"` + // Output only. Resource URI for the artifact being deployed taken from the + // deployable field with the same name. + ResourceUri []string `protobuf:"bytes,6,rep,name=resource_uri,json=resourceUri" json:"resource_uri,omitempty"` + // Platform hosting this deployment. + Platform Deployable_Deployment_Platform `protobuf:"varint,7,opt,name=platform,enum=grafeas.v1alpha1.api.Deployable_Deployment_Platform" json:"platform,omitempty"` +} + +func (m *Deployable_Deployment) Reset() { *m = Deployable_Deployment{} } +func (m *Deployable_Deployment) String() string { return proto.CompactTextString(m) } +func (*Deployable_Deployment) ProtoMessage() {} +func (*Deployable_Deployment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32, 0} } + +func (m *Deployable_Deployment) GetUserEmail() string { + if m != nil { + return m.UserEmail + } + return "" +} + +func (m *Deployable_Deployment) GetDeployTime() *google_protobuf3.Timestamp { + if m != nil { + return m.DeployTime + } + return nil +} + +func (m *Deployable_Deployment) GetUndeployTime() *google_protobuf3.Timestamp { + if m != nil { + return m.UndeployTime + } + return nil +} + +func (m *Deployable_Deployment) GetConfig() string { + if m != nil { + return m.Config + } + return "" +} + +func (m *Deployable_Deployment) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *Deployable_Deployment) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +func (m *Deployable_Deployment) GetPlatform() Deployable_Deployment_Platform { + if m != nil { + return m.Platform + } + return Deployable_Deployment_PLATFORM_UNSPECIFIED +} + +// DockerImage holds types defining base image notes +// and derived image occurrences. +type DockerImage struct { +} + +func (m *DockerImage) Reset() { *m = DockerImage{} } +func (m *DockerImage) String() string { return proto.CompactTextString(m) } +func (*DockerImage) ProtoMessage() {} +func (*DockerImage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33} } + +// Layer holds metadata specific to a layer of a Docker image. +type DockerImage_Layer struct { + // The recovered Dockerfile directive used to construct this layer. + Directive DockerImage_Layer_Directive `protobuf:"varint,1,opt,name=directive,enum=grafeas.v1alpha1.api.DockerImage_Layer_Directive" json:"directive,omitempty"` + // The recovered arguments to the Dockerfile directive. + Arguments string `protobuf:"bytes,2,opt,name=arguments" json:"arguments,omitempty"` +} + +func (m *DockerImage_Layer) Reset() { *m = DockerImage_Layer{} } +func (m *DockerImage_Layer) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Layer) ProtoMessage() {} +func (*DockerImage_Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 0} } + +func (m *DockerImage_Layer) GetDirective() DockerImage_Layer_Directive { + if m != nil { + return m.Directive + } + return DockerImage_Layer_DIRECTIVE_UNSPECIFIED +} + +func (m *DockerImage_Layer) GetArguments() string { + if m != nil { + return m.Arguments + } + return "" +} + +// A set of properties that uniquely identify a given Docker image. +type DockerImage_Fingerprint struct { + // The layer-id of the final layer in the Docker image's v1 + // representation. + // This field can be used as a filter in list requests. + V1Name string `protobuf:"bytes,1,opt,name=v1_name,json=v1Name" json:"v1_name,omitempty"` + // The ordered list of v2 blobs that represent a given image. + V2Blob []string `protobuf:"bytes,2,rep,name=v2_blob,json=v2Blob" json:"v2_blob,omitempty"` + // Output only. The name of the image's v2 blobs computed via: + // [bottom] := v2_blob[bottom] + // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) + // Only the name of the final blob is kept. + // This field can be used as a filter in list requests. + V2Name string `protobuf:"bytes,3,opt,name=v2_name,json=v2Name" json:"v2_name,omitempty"` +} + +func (m *DockerImage_Fingerprint) Reset() { *m = DockerImage_Fingerprint{} } +func (m *DockerImage_Fingerprint) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Fingerprint) ProtoMessage() {} +func (*DockerImage_Fingerprint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 1} } + +func (m *DockerImage_Fingerprint) GetV1Name() string { + if m != nil { + return m.V1Name + } + return "" +} + +func (m *DockerImage_Fingerprint) GetV2Blob() []string { + if m != nil { + return m.V2Blob + } + return nil +} + +func (m *DockerImage_Fingerprint) GetV2Name() string { + if m != nil { + return m.V2Name + } + return "" +} + +// Basis describes the base image portion (Note) of the DockerImage +// relationship. Linked occurrences are derived from this or an +// equivalent image via: +// FROM +// Or an equivalent reference, e.g. a tag of the resource_url. +type DockerImage_Basis struct { + // The resource_url for the resource representing the basis of + // associated occurrence images. + ResourceUrl string `protobuf:"bytes,1,opt,name=resource_url,json=resourceUrl" json:"resource_url,omitempty"` + // The fingerprint of the base image + Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,2,opt,name=fingerprint" json:"fingerprint,omitempty"` +} + +func (m *DockerImage_Basis) Reset() { *m = DockerImage_Basis{} } +func (m *DockerImage_Basis) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Basis) ProtoMessage() {} +func (*DockerImage_Basis) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 2} } + +func (m *DockerImage_Basis) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *DockerImage_Basis) GetFingerprint() *DockerImage_Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +// Derived describes the derived image portion (Occurrence) of the +// DockerImage relationship. This image would be produced from a Dockerfile +// with FROM . +type DockerImage_Derived struct { + // The fingerprint of the derived image + Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"` + // Output only. The number of layers by which this image differs from + // the associated image basis. + Distance uint32 `protobuf:"varint,2,opt,name=distance" json:"distance,omitempty"` + // This contains layer-specific metadata, if populated it + // has length "distance" and is ordered with [distance] being the + // layer immediately following the base image and [1] + // being the final layer. + LayerInfo []*DockerImage_Layer `protobuf:"bytes,3,rep,name=layer_info,json=layerInfo" json:"layer_info,omitempty"` + // Output only.This contains the base image url for the derived image + // Occurrence + BaseResourceUrl string `protobuf:"bytes,4,opt,name=base_resource_url,json=baseResourceUrl" json:"base_resource_url,omitempty"` +} + +func (m *DockerImage_Derived) Reset() { *m = DockerImage_Derived{} } +func (m *DockerImage_Derived) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Derived) ProtoMessage() {} +func (*DockerImage_Derived) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 3} } + +func (m *DockerImage_Derived) GetFingerprint() *DockerImage_Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +func (m *DockerImage_Derived) GetDistance() uint32 { + if m != nil { + return m.Distance + } + return 0 +} + +func (m *DockerImage_Derived) GetLayerInfo() []*DockerImage_Layer { + if m != nil { + return m.LayerInfo + } + return nil +} + +func (m *DockerImage_Derived) GetBaseResourceUrl() string { + if m != nil { + return m.BaseResourceUrl + } + return "" +} + +// A note that indicates a type of analysis a provider would perform. This note +// exists in a provider's project. A `Discovery` occurrence is created in a +// consumer's project at the start of analysis. The occurrence's operation will +// indicate the status of the analysis. Absence of an occurrence linked to this +// note for a resource indicates that analysis hasn't started. +type Discovery struct { + // The kind of analysis that is handled by this discovery. + AnalysisKind Note_Kind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,enum=grafeas.v1alpha1.api.Note_Kind" json:"analysis_kind,omitempty"` +} + +func (m *Discovery) Reset() { *m = Discovery{} } +func (m *Discovery) String() string { return proto.CompactTextString(m) } +func (*Discovery) ProtoMessage() {} +func (*Discovery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34} } + +func (m *Discovery) GetAnalysisKind() Note_Kind { + if m != nil { + return m.AnalysisKind + } + return Note_KIND_UNSPECIFIED +} + +// Provides information about the scan status of a discovered resource. +type Discovery_Discovered struct { + // Output only. An operation that indicates the status of the current scan. + Operation *google_longrunning.Operation `protobuf:"bytes,1,opt,name=operation" json:"operation,omitempty"` +} + +func (m *Discovery_Discovered) Reset() { *m = Discovery_Discovered{} } +func (m *Discovery_Discovered) String() string { return proto.CompactTextString(m) } +func (*Discovery_Discovered) ProtoMessage() {} +func (*Discovery_Discovered) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34, 0} } + +func (m *Discovery_Discovered) GetOperation() *google_longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +// Container message for hashes of byte content of files, used in Source +// messages to verify integrity of source input to the build. +type FileHashes struct { + // Collection of file hashes. + FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash" json:"file_hash,omitempty"` +} + +func (m *FileHashes) Reset() { *m = FileHashes{} } +func (m *FileHashes) String() string { return proto.CompactTextString(m) } +func (*FileHashes) ProtoMessage() {} +func (*FileHashes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{35} } + +func (m *FileHashes) GetFileHash() []*Hash { + if m != nil { + return m.FileHash + } + return nil +} + +// Container message for hash values. +type Hash struct { + // The type of hash that was performed. + Type Hash_HashType `protobuf:"varint,1,opt,name=type,enum=grafeas.v1alpha1.api.Hash_HashType" json:"type,omitempty"` + // The hash value. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{36} } + +func (m *Hash) GetType() Hash_HashType { + if m != nil { + return m.Type + } + return Hash_NONE +} + +func (m *Hash) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// Provides a detailed description of a `Note`. +type Note struct { + // The name of the note in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // A one sentence description of this `Note`. + ShortDescription string `protobuf:"bytes,3,opt,name=short_description,json=shortDescription" json:"short_description,omitempty"` + // A detailed description of this `Note`. + LongDescription string `protobuf:"bytes,4,opt,name=long_description,json=longDescription" json:"long_description,omitempty"` + // Output only. This explicitly denotes which kind of note is specified. This + // field can be used as a filter in list requests. + Kind Note_Kind `protobuf:"varint,9,opt,name=kind,enum=grafeas.v1alpha1.api.Note_Kind" json:"kind,omitempty"` + // The type of note. + // + // Types that are valid to be assigned to NoteType: + // *Note_VulnerabilityType + // *Note_BuildType + // *Note_BaseImage + // *Note_Package + // *Note_Deployable + // *Note_Discovery + NoteType isNote_NoteType `protobuf_oneof:"note_type"` + // URLs associated with this note + RelatedUrl []*Note_RelatedUrl `protobuf:"bytes,7,rep,name=related_url,json=relatedUrl" json:"related_url,omitempty"` + // Time of expiration for this note, null if note does not expire. + ExpirationTime *google_protobuf3.Timestamp `protobuf:"bytes,10,opt,name=expiration_time,json=expirationTime" json:"expiration_time,omitempty"` + // Output only. The time this note was created. This field can be used as a + // filter in list requests. + CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,11,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + // Output only. The time this note was last updated. This field can be used as + // a filter in list requests. + UpdateTime *google_protobuf3.Timestamp `protobuf:"bytes,12,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` +} + +func (m *Note) Reset() { *m = Note{} } +func (m *Note) String() string { return proto.CompactTextString(m) } +func (*Note) ProtoMessage() {} +func (*Note) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37} } + +type isNote_NoteType interface { + isNote_NoteType() +} + +type Note_VulnerabilityType struct { + VulnerabilityType *VulnerabilityType `protobuf:"bytes,6,opt,name=vulnerability_type,json=vulnerabilityType,oneof"` +} +type Note_BuildType struct { + BuildType *BuildType `protobuf:"bytes,8,opt,name=build_type,json=buildType,oneof"` +} +type Note_BaseImage struct { + BaseImage *DockerImage_Basis `protobuf:"bytes,13,opt,name=base_image,json=baseImage,oneof"` +} +type Note_Package struct { + Package *PackageManager_Package `protobuf:"bytes,14,opt,name=package,oneof"` +} +type Note_Deployable struct { + Deployable *Deployable `protobuf:"bytes,17,opt,name=deployable,oneof"` +} +type Note_Discovery struct { + Discovery *Discovery `protobuf:"bytes,18,opt,name=discovery,oneof"` +} + +func (*Note_VulnerabilityType) isNote_NoteType() {} +func (*Note_BuildType) isNote_NoteType() {} +func (*Note_BaseImage) isNote_NoteType() {} +func (*Note_Package) isNote_NoteType() {} +func (*Note_Deployable) isNote_NoteType() {} +func (*Note_Discovery) isNote_NoteType() {} + +func (m *Note) GetNoteType() isNote_NoteType { + if m != nil { + return m.NoteType + } + return nil +} + +func (m *Note) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Note) GetShortDescription() string { + if m != nil { + return m.ShortDescription + } + return "" +} + +func (m *Note) GetLongDescription() string { + if m != nil { + return m.LongDescription + } + return "" +} + +func (m *Note) GetKind() Note_Kind { + if m != nil { + return m.Kind + } + return Note_KIND_UNSPECIFIED +} + +func (m *Note) GetVulnerabilityType() *VulnerabilityType { + if x, ok := m.GetNoteType().(*Note_VulnerabilityType); ok { + return x.VulnerabilityType + } + return nil +} + +func (m *Note) GetBuildType() *BuildType { + if x, ok := m.GetNoteType().(*Note_BuildType); ok { + return x.BuildType + } + return nil +} + +func (m *Note) GetBaseImage() *DockerImage_Basis { + if x, ok := m.GetNoteType().(*Note_BaseImage); ok { + return x.BaseImage + } + return nil +} + +func (m *Note) GetPackage() *PackageManager_Package { + if x, ok := m.GetNoteType().(*Note_Package); ok { + return x.Package + } + return nil +} + +func (m *Note) GetDeployable() *Deployable { + if x, ok := m.GetNoteType().(*Note_Deployable); ok { + return x.Deployable + } + return nil +} + +func (m *Note) GetDiscovery() *Discovery { + if x, ok := m.GetNoteType().(*Note_Discovery); ok { + return x.Discovery + } + return nil +} + +func (m *Note) GetRelatedUrl() []*Note_RelatedUrl { + if m != nil { + return m.RelatedUrl + } + return nil +} + +func (m *Note) GetExpirationTime() *google_protobuf3.Timestamp { + if m != nil { + return m.ExpirationTime + } + return nil +} + +func (m *Note) GetCreateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Note) GetUpdateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Note) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Note_OneofMarshaler, _Note_OneofUnmarshaler, _Note_OneofSizer, []interface{}{ + (*Note_VulnerabilityType)(nil), + (*Note_BuildType)(nil), + (*Note_BaseImage)(nil), + (*Note_Package)(nil), + (*Note_Deployable)(nil), + (*Note_Discovery)(nil), + } +} + +func _Note_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Note) + // note_type + switch x := m.NoteType.(type) { + case *Note_VulnerabilityType: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VulnerabilityType); err != nil { + return err + } + case *Note_BuildType: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildType); err != nil { + return err + } + case *Note_BaseImage: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BaseImage); err != nil { + return err + } + case *Note_Package: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Package); err != nil { + return err + } + case *Note_Deployable: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployable); err != nil { + return err + } + case *Note_Discovery: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Note.NoteType has unexpected type %T", x) + } + return nil +} + +func _Note_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Note) + switch tag { + case 6: // note_type.vulnerability_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VulnerabilityType) + err := b.DecodeMessage(msg) + m.NoteType = &Note_VulnerabilityType{msg} + return true, err + case 8: // note_type.build_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildType) + err := b.DecodeMessage(msg) + m.NoteType = &Note_BuildType{msg} + return true, err + case 13: // note_type.base_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DockerImage_Basis) + err := b.DecodeMessage(msg) + m.NoteType = &Note_BaseImage{msg} + return true, err + case 14: // note_type.package + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PackageManager_Package) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Package{msg} + return true, err + case 17: // note_type.deployable + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Deployable) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Deployable{msg} + return true, err + case 18: // note_type.discovery + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Discovery) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Discovery{msg} + return true, err + default: + return false, nil + } +} + +func _Note_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Note) + // note_type + switch x := m.NoteType.(type) { + case *Note_VulnerabilityType: + s := proto.Size(x.VulnerabilityType) + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_BuildType: + s := proto.Size(x.BuildType) + n += proto.SizeVarint(8<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_BaseImage: + s := proto.Size(x.BaseImage) + n += proto.SizeVarint(13<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Package: + s := proto.Size(x.Package) + n += proto.SizeVarint(14<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Deployable: + s := proto.Size(x.Deployable) + n += proto.SizeVarint(17<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Discovery: + s := proto.Size(x.Discovery) + n += proto.SizeVarint(18<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Metadata for any related URL information +type Note_RelatedUrl struct { + // Specific URL to associate with the note + Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` + // Label to describe usage of the URL + Label string `protobuf:"bytes,2,opt,name=label" json:"label,omitempty"` +} + +func (m *Note_RelatedUrl) Reset() { *m = Note_RelatedUrl{} } +func (m *Note_RelatedUrl) String() string { return proto.CompactTextString(m) } +func (*Note_RelatedUrl) ProtoMessage() {} +func (*Note_RelatedUrl) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37, 0} } + +func (m *Note_RelatedUrl) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Note_RelatedUrl) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +// `Occurrence` includes information about analysis occurrences for an image. +type Occurrence struct { + // Output only. The name of the `Occurrence` in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The unique URL of the image or the container for which the `Occurrence` + // applies. For example, https://gcr.io/project/image@sha256:foo This field + // can be used as a filter in list requests. + ResourceUrl string `protobuf:"bytes,2,opt,name=resource_url,json=resourceUrl" json:"resource_url,omitempty"` + // An analysis note associated with this image, in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + // This field can be used as a filter in list requests. + NoteName string `protobuf:"bytes,3,opt,name=note_name,json=noteName" json:"note_name,omitempty"` + // Output only. This explicitly denotes which of the `Occurrence` details are + // specified. This field can be used as a filter in list requests. + Kind Note_Kind `protobuf:"varint,6,opt,name=kind,enum=grafeas.v1alpha1.api.Note_Kind" json:"kind,omitempty"` + // Describes the details of the vulnerability `Note` found in this resource. + // + // Types that are valid to be assigned to Details: + // *Occurrence_VulnerabilityDetails + // *Occurrence_BuildDetails + // *Occurrence_DerivedImage + // *Occurrence_Installation + // *Occurrence_Deployment + // *Occurrence_Discovered + Details isOccurrence_Details `protobuf_oneof:"details"` + // A description of actions that can be taken to remedy the `Note` + Remediation string `protobuf:"bytes,5,opt,name=remediation" json:"remediation,omitempty"` + // Output only. The time this `Occurrence` was created. + CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + // Output only. The time this `Occurrence` was last updated. + UpdateTime *google_protobuf3.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` +} + +func (m *Occurrence) Reset() { *m = Occurrence{} } +func (m *Occurrence) String() string { return proto.CompactTextString(m) } +func (*Occurrence) ProtoMessage() {} +func (*Occurrence) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{38} } + +type isOccurrence_Details interface { + isOccurrence_Details() +} + +type Occurrence_VulnerabilityDetails struct { + VulnerabilityDetails *VulnerabilityType_VulnerabilityDetails `protobuf:"bytes,8,opt,name=vulnerability_details,json=vulnerabilityDetails,oneof"` +} +type Occurrence_BuildDetails struct { + BuildDetails *BuildDetails `protobuf:"bytes,7,opt,name=build_details,json=buildDetails,oneof"` +} +type Occurrence_DerivedImage struct { + DerivedImage *DockerImage_Derived `protobuf:"bytes,11,opt,name=derived_image,json=derivedImage,oneof"` +} +type Occurrence_Installation struct { + Installation *PackageManager_Installation `protobuf:"bytes,12,opt,name=installation,oneof"` +} +type Occurrence_Deployment struct { + Deployment *Deployable_Deployment `protobuf:"bytes,14,opt,name=deployment,oneof"` +} +type Occurrence_Discovered struct { + Discovered *Discovery_Discovered `protobuf:"bytes,15,opt,name=discovered,oneof"` +} + +func (*Occurrence_VulnerabilityDetails) isOccurrence_Details() {} +func (*Occurrence_BuildDetails) isOccurrence_Details() {} +func (*Occurrence_DerivedImage) isOccurrence_Details() {} +func (*Occurrence_Installation) isOccurrence_Details() {} +func (*Occurrence_Deployment) isOccurrence_Details() {} +func (*Occurrence_Discovered) isOccurrence_Details() {} + +func (m *Occurrence) GetDetails() isOccurrence_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *Occurrence) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Occurrence) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *Occurrence) GetNoteName() string { + if m != nil { + return m.NoteName + } + return "" +} + +func (m *Occurrence) GetKind() Note_Kind { + if m != nil { + return m.Kind + } + return Note_KIND_UNSPECIFIED +} + +func (m *Occurrence) GetVulnerabilityDetails() *VulnerabilityType_VulnerabilityDetails { + if x, ok := m.GetDetails().(*Occurrence_VulnerabilityDetails); ok { + return x.VulnerabilityDetails + } + return nil +} + +func (m *Occurrence) GetBuildDetails() *BuildDetails { + if x, ok := m.GetDetails().(*Occurrence_BuildDetails); ok { + return x.BuildDetails + } + return nil +} + +func (m *Occurrence) GetDerivedImage() *DockerImage_Derived { + if x, ok := m.GetDetails().(*Occurrence_DerivedImage); ok { + return x.DerivedImage + } + return nil +} + +func (m *Occurrence) GetInstallation() *PackageManager_Installation { + if x, ok := m.GetDetails().(*Occurrence_Installation); ok { + return x.Installation + } + return nil +} + +func (m *Occurrence) GetDeployment() *Deployable_Deployment { + if x, ok := m.GetDetails().(*Occurrence_Deployment); ok { + return x.Deployment + } + return nil +} + +func (m *Occurrence) GetDiscovered() *Discovery_Discovered { + if x, ok := m.GetDetails().(*Occurrence_Discovered); ok { + return x.Discovered + } + return nil +} + +func (m *Occurrence) GetRemediation() string { + if m != nil { + return m.Remediation + } + return "" +} + +func (m *Occurrence) GetCreateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Occurrence) GetUpdateTime() *google_protobuf3.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Occurrence) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Occurrence_OneofMarshaler, _Occurrence_OneofUnmarshaler, _Occurrence_OneofSizer, []interface{}{ + (*Occurrence_VulnerabilityDetails)(nil), + (*Occurrence_BuildDetails)(nil), + (*Occurrence_DerivedImage)(nil), + (*Occurrence_Installation)(nil), + (*Occurrence_Deployment)(nil), + (*Occurrence_Discovered)(nil), + } +} + +func _Occurrence_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_VulnerabilityDetails: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VulnerabilityDetails); err != nil { + return err + } + case *Occurrence_BuildDetails: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildDetails); err != nil { + return err + } + case *Occurrence_DerivedImage: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DerivedImage); err != nil { + return err + } + case *Occurrence_Installation: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Installation); err != nil { + return err + } + case *Occurrence_Deployment: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployment); err != nil { + return err + } + case *Occurrence_Discovered: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovered); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Occurrence.Details has unexpected type %T", x) + } + return nil +} + +func _Occurrence_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Occurrence) + switch tag { + case 8: // details.vulnerability_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VulnerabilityType_VulnerabilityDetails) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_VulnerabilityDetails{msg} + return true, err + case 7: // details.build_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildDetails) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_BuildDetails{msg} + return true, err + case 11: // details.derived_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DockerImage_Derived) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_DerivedImage{msg} + return true, err + case 12: // details.installation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PackageManager_Installation) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Installation{msg} + return true, err + case 14: // details.deployment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Deployable_Deployment) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Deployment{msg} + return true, err + case 15: // details.discovered + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Discovery_Discovered) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Discovered{msg} + return true, err + default: + return false, nil + } +} + +func _Occurrence_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_VulnerabilityDetails: + s := proto.Size(x.VulnerabilityDetails) + n += proto.SizeVarint(8<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_BuildDetails: + s := proto.Size(x.BuildDetails) + n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_DerivedImage: + s := proto.Size(x.DerivedImage) + n += proto.SizeVarint(11<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Installation: + s := proto.Size(x.Installation) + n += proto.SizeVarint(12<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Deployment: + s := proto.Size(x.Deployment) + n += proto.SizeVarint(14<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Discovered: + s := proto.Size(x.Discovered) + n += proto.SizeVarint(15<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// PackageManager provides metadata about available / installed packages. +type PackageManager struct { +} + +func (m *PackageManager) Reset() { *m = PackageManager{} } +func (m *PackageManager) String() string { return proto.CompactTextString(m) } +func (*PackageManager) ProtoMessage() {} +func (*PackageManager) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39} } + +// This represents a particular channel of distribution for a given package. +// e.g. Debian's jessie-backports dpkg mirror +type PackageManager_Distribution struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri" json:"cpe_uri,omitempty"` + // The CPU architecture for which packages in this distribution + // channel were built + Architecture PackageManager_Architecture `protobuf:"varint,2,opt,name=architecture,enum=grafeas.v1alpha1.api.PackageManager_Architecture" json:"architecture,omitempty"` + // The latest available version of this package in + // this distribution channel. + LatestVersion *VulnerabilityType_Version `protobuf:"bytes,3,opt,name=latest_version,json=latestVersion" json:"latest_version,omitempty"` + // A freeform string denoting the maintainer of this package. + Maintainer string `protobuf:"bytes,4,opt,name=maintainer" json:"maintainer,omitempty"` + // The distribution channel-specific homepage for this package. + Url string `protobuf:"bytes,6,opt,name=url" json:"url,omitempty"` + // The distribution channel-specific description of this package. + Description string `protobuf:"bytes,7,opt,name=description" json:"description,omitempty"` +} + +func (m *PackageManager_Distribution) Reset() { *m = PackageManager_Distribution{} } +func (m *PackageManager_Distribution) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Distribution) ProtoMessage() {} +func (*PackageManager_Distribution) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 0} } + +func (m *PackageManager_Distribution) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *PackageManager_Distribution) GetArchitecture() PackageManager_Architecture { + if m != nil { + return m.Architecture + } + return PackageManager_ARCHITECTURE_UNSPECIFIED +} + +func (m *PackageManager_Distribution) GetLatestVersion() *VulnerabilityType_Version { + if m != nil { + return m.LatestVersion + } + return nil +} + +func (m *PackageManager_Distribution) GetMaintainer() string { + if m != nil { + return m.Maintainer + } + return "" +} + +func (m *PackageManager_Distribution) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *PackageManager_Distribution) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// An occurrence of a particular package installation found within a +// system's filesystem. +// e.g. glibc was found in /var/lib/dpkg/status +type PackageManager_Location struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri" json:"cpe_uri,omitempty"` + // The version installed at this location. + Version *VulnerabilityType_Version `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"` + // The path from which we gathered that this package/version is installed. + Path string `protobuf:"bytes,3,opt,name=path" json:"path,omitempty"` +} + +func (m *PackageManager_Location) Reset() { *m = PackageManager_Location{} } +func (m *PackageManager_Location) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Location) ProtoMessage() {} +func (*PackageManager_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 1} } + +func (m *PackageManager_Location) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *PackageManager_Location) GetVersion() *VulnerabilityType_Version { + if m != nil { + return m.Version + } + return nil +} + +func (m *PackageManager_Location) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +// This represents a particular package that is distributed over +// various channels. +// e.g. glibc (aka libc6) is distributed by many, at various versions. +type PackageManager_Package struct { + // The name of the package. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The various channels by which a package is distributed. + Distribution []*PackageManager_Distribution `protobuf:"bytes,10,rep,name=distribution" json:"distribution,omitempty"` +} + +func (m *PackageManager_Package) Reset() { *m = PackageManager_Package{} } +func (m *PackageManager_Package) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Package) ProtoMessage() {} +func (*PackageManager_Package) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 2} } + +func (m *PackageManager_Package) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PackageManager_Package) GetDistribution() []*PackageManager_Distribution { + if m != nil { + return m.Distribution + } + return nil +} + +// This represents how a particular software package may be installed on +// a system. +type PackageManager_Installation struct { + // Output only. The name of the installed package. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // All of the places within the filesystem versions of this package + // have been found. + Location []*PackageManager_Location `protobuf:"bytes,2,rep,name=location" json:"location,omitempty"` +} + +func (m *PackageManager_Installation) Reset() { *m = PackageManager_Installation{} } +func (m *PackageManager_Installation) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Installation) ProtoMessage() {} +func (*PackageManager_Installation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 3} } + +func (m *PackageManager_Installation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PackageManager_Installation) GetLocation() []*PackageManager_Location { + if m != nil { + return m.Location + } + return nil +} + +// An attestation wrapper with a PGP-compatible signature. +// This message only supports ATTACHED signatures, where the payload that is +// signed is included alongside the signature itself in the same file. +type PgpSignedAttestation struct { + // The raw content of the signature, as output by gpg or equivalent. Since + // this message only supports attached signatures, the payload that was signed + // must be attached. While the signature format supported is dependent on the + // verification implementation, currently only ASCII-armored (`--armor` to + // gpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are + // supported. + // Concretely, `gpg --sign --armor --output=signature.gpg payload.json` will + // create the signature content expected in this field in `signature.gpg` for + // the `payload.json` attestation payload. + Signature string `protobuf:"bytes,1,opt,name=signature" json:"signature,omitempty"` + // Type (e.g. schema) of the attestation payload that was signed. + // The verifier must ensure that the provided type is one that the verifier + // supports, and that the attestation payload is a valid instantiation of that + // type (e.g. by validating a JSON schema). + ContentType PgpSignedAttestation_ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,enum=grafeas.v1alpha1.api.PgpSignedAttestation_ContentType" json:"content_type,omitempty"` + // This field is used by verifiers to select the public key used to validate + // the signature. Note that the policy of the verifier ultimately determines + // which public keys verify a signature based on the context of the + // verification. There is no guarantee validation will succeed if the + // verifier has no key matching this ID, even if it has a key under a + // different ID that would verify the signature. Note that this ID should also + // be present in the signature content above, but that is not expected to be + // used by the verifier. + // + // Types that are valid to be assigned to KeyId: + // *PgpSignedAttestation_PgpKeyId + KeyId isPgpSignedAttestation_KeyId `protobuf_oneof:"key_id"` +} + +func (m *PgpSignedAttestation) Reset() { *m = PgpSignedAttestation{} } +func (m *PgpSignedAttestation) String() string { return proto.CompactTextString(m) } +func (*PgpSignedAttestation) ProtoMessage() {} +func (*PgpSignedAttestation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{40} } + +type isPgpSignedAttestation_KeyId interface { + isPgpSignedAttestation_KeyId() +} + +type PgpSignedAttestation_PgpKeyId struct { + PgpKeyId string `protobuf:"bytes,2,opt,name=pgp_key_id,json=pgpKeyId,oneof"` +} + +func (*PgpSignedAttestation_PgpKeyId) isPgpSignedAttestation_KeyId() {} + +func (m *PgpSignedAttestation) GetKeyId() isPgpSignedAttestation_KeyId { + if m != nil { + return m.KeyId + } + return nil +} + +func (m *PgpSignedAttestation) GetSignature() string { + if m != nil { + return m.Signature + } + return "" +} + +func (m *PgpSignedAttestation) GetContentType() PgpSignedAttestation_ContentType { + if m != nil { + return m.ContentType + } + return PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED +} + +func (m *PgpSignedAttestation) GetPgpKeyId() string { + if x, ok := m.GetKeyId().(*PgpSignedAttestation_PgpKeyId); ok { + return x.PgpKeyId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PgpSignedAttestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PgpSignedAttestation_OneofMarshaler, _PgpSignedAttestation_OneofUnmarshaler, _PgpSignedAttestation_OneofSizer, []interface{}{ + (*PgpSignedAttestation_PgpKeyId)(nil), + } +} + +func _PgpSignedAttestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PgpKeyId) + case nil: + default: + return fmt.Errorf("PgpSignedAttestation.KeyId has unexpected type %T", x) + } + return nil +} + +func _PgpSignedAttestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PgpSignedAttestation) + switch tag { + case 2: // key_id.pgp_key_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.KeyId = &PgpSignedAttestation_PgpKeyId{x} + return true, err + default: + return false, nil + } +} + +func _PgpSignedAttestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.PgpKeyId))) + n += len(x.PgpKeyId) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Source describes the location of the source used for the build. +type Source struct { + // Source location information. + // + // Types that are valid to be assigned to Source: + // *Source_StorageSource + // *Source_RepoSource + Source isSource_Source `protobuf_oneof:"source"` + // If provided, the input binary artifacts for the build came from this + // location. + ArtifactStorageSource *StorageSource `protobuf:"bytes,4,opt,name=artifact_storage_source,json=artifactStorageSource" json:"artifact_storage_source,omitempty"` + // Hash(es) of the build source, which can be used to verify that the original + // source integrity was maintained in the build. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (.tar.gz), the FileHash will be for the single path to that file. + FileHashes map[string]*FileHashes `protobuf:"bytes,3,rep,name=file_hashes,json=fileHashes" json:"file_hashes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // If provided, the source code used for the build came from this location. + Context *SourceContext `protobuf:"bytes,7,opt,name=context" json:"context,omitempty"` + // If provided, some of the source code used for the build may be found in + // these locations, in the case where the source repository had multiple + // remotes or submodules. This list will not include the context specified in + // the context field. + AdditionalContexts []*SourceContext `protobuf:"bytes,8,rep,name=additional_contexts,json=additionalContexts" json:"additional_contexts,omitempty"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{41} } + +type isSource_Source interface { + isSource_Source() +} + +type Source_StorageSource struct { + StorageSource *StorageSource `protobuf:"bytes,1,opt,name=storage_source,json=storageSource,oneof"` +} +type Source_RepoSource struct { + RepoSource *RepoSource `protobuf:"bytes,2,opt,name=repo_source,json=repoSource,oneof"` +} + +func (*Source_StorageSource) isSource_Source() {} +func (*Source_RepoSource) isSource_Source() {} + +func (m *Source) GetSource() isSource_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Source) GetStorageSource() *StorageSource { + if x, ok := m.GetSource().(*Source_StorageSource); ok { + return x.StorageSource + } + return nil +} + +func (m *Source) GetRepoSource() *RepoSource { + if x, ok := m.GetSource().(*Source_RepoSource); ok { + return x.RepoSource + } + return nil +} + +func (m *Source) GetArtifactStorageSource() *StorageSource { + if m != nil { + return m.ArtifactStorageSource + } + return nil +} + +func (m *Source) GetFileHashes() map[string]*FileHashes { + if m != nil { + return m.FileHashes + } + return nil +} + +func (m *Source) GetContext() *SourceContext { + if m != nil { + return m.Context + } + return nil +} + +func (m *Source) GetAdditionalContexts() []*SourceContext { + if m != nil { + return m.AdditionalContexts + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Source) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Source_OneofMarshaler, _Source_OneofUnmarshaler, _Source_OneofSizer, []interface{}{ + (*Source_StorageSource)(nil), + (*Source_RepoSource)(nil), + } +} + +func _Source_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StorageSource); err != nil { + return err + } + case *Source_RepoSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RepoSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Source.Source has unexpected type %T", x) + } + return nil +} + +func _Source_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Source) + switch tag { + case 1: // source.storage_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StorageSource) + err := b.DecodeMessage(msg) + m.Source = &Source_StorageSource{msg} + return true, err + case 2: // source.repo_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RepoSource) + err := b.DecodeMessage(msg) + m.Source = &Source_RepoSource{msg} + return true, err + default: + return false, nil + } +} + +func _Source_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + s := proto.Size(x.StorageSource) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Source_RepoSource: + s := proto.Size(x.RepoSource) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// RepoSource describes the location of the source in a Google Cloud Source +// Repository. +type RepoSource struct { + // ID of the project that owns the repo. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"` + // Name of the repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName" json:"repo_name,omitempty"` + // A revision within the source repository must be specified in + // one of these ways. + // + // Types that are valid to be assigned to Revision: + // *RepoSource_BranchName + // *RepoSource_TagName + // *RepoSource_CommitSha + Revision isRepoSource_Revision `protobuf_oneof:"revision"` +} + +func (m *RepoSource) Reset() { *m = RepoSource{} } +func (m *RepoSource) String() string { return proto.CompactTextString(m) } +func (*RepoSource) ProtoMessage() {} +func (*RepoSource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{42} } + +type isRepoSource_Revision interface { + isRepoSource_Revision() +} + +type RepoSource_BranchName struct { + BranchName string `protobuf:"bytes,3,opt,name=branch_name,json=branchName,oneof"` +} +type RepoSource_TagName struct { + TagName string `protobuf:"bytes,4,opt,name=tag_name,json=tagName,oneof"` +} +type RepoSource_CommitSha struct { + CommitSha string `protobuf:"bytes,5,opt,name=commit_sha,json=commitSha,oneof"` +} + +func (*RepoSource_BranchName) isRepoSource_Revision() {} +func (*RepoSource_TagName) isRepoSource_Revision() {} +func (*RepoSource_CommitSha) isRepoSource_Revision() {} + +func (m *RepoSource) GetRevision() isRepoSource_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *RepoSource) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RepoSource) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +func (m *RepoSource) GetBranchName() string { + if x, ok := m.GetRevision().(*RepoSource_BranchName); ok { + return x.BranchName + } + return "" +} + +func (m *RepoSource) GetTagName() string { + if x, ok := m.GetRevision().(*RepoSource_TagName); ok { + return x.TagName + } + return "" +} + +func (m *RepoSource) GetCommitSha() string { + if x, ok := m.GetRevision().(*RepoSource_CommitSha); ok { + return x.CommitSha + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoSource) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoSource_OneofMarshaler, _RepoSource_OneofUnmarshaler, _RepoSource_OneofSizer, []interface{}{ + (*RepoSource_BranchName)(nil), + (*RepoSource_TagName)(nil), + (*RepoSource_CommitSha)(nil), + } +} + +func _RepoSource_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.BranchName) + case *RepoSource_TagName: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TagName) + case *RepoSource_CommitSha: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CommitSha) + case nil: + default: + return fmt.Errorf("RepoSource.Revision has unexpected type %T", x) + } + return nil +} + +func _RepoSource_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoSource) + switch tag { + case 3: // revision.branch_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_BranchName{x} + return true, err + case 4: // revision.tag_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_TagName{x} + return true, err + case 5: // revision.commit_sha + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_CommitSha{x} + return true, err + default: + return false, nil + } +} + +func _RepoSource_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.BranchName))) + n += len(x.BranchName) + case *RepoSource_TagName: + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.TagName))) + n += len(x.TagName) + case *RepoSource_CommitSha: + n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.CommitSha))) + n += len(x.CommitSha) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// StorageSource describes the location of the source in an archive file in +// Google Cloud Storage. +type StorageSource struct { + // Google Cloud Storage bucket containing source (see [Bucket Name + // Requirements] + // (https://cloud.google.com/storage/docs/bucket-naming#requirements)). + Bucket string `protobuf:"bytes,1,opt,name=bucket" json:"bucket,omitempty"` + // Google Cloud Storage object containing source. + Object string `protobuf:"bytes,2,opt,name=object" json:"object,omitempty"` + // Google Cloud Storage generation for the object. + Generation int64 `protobuf:"varint,3,opt,name=generation" json:"generation,omitempty"` +} + +func (m *StorageSource) Reset() { *m = StorageSource{} } +func (m *StorageSource) String() string { return proto.CompactTextString(m) } +func (*StorageSource) ProtoMessage() {} +func (*StorageSource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{43} } + +func (m *StorageSource) GetBucket() string { + if m != nil { + return m.Bucket + } + return "" +} + +func (m *StorageSource) GetObject() string { + if m != nil { + return m.Object + } + return "" +} + +func (m *StorageSource) GetGeneration() int64 { + if m != nil { + return m.Generation + } + return 0 +} + +// VulnerabilityType provides metadata about a security vulnerability. +type VulnerabilityType struct { + // The CVSS score for this Vulnerability. + CvssScore float32 `protobuf:"fixed32,2,opt,name=cvss_score,json=cvssScore" json:"cvss_score,omitempty"` + // Note provider assigned impact of the vulnerability + Severity VulnerabilityType_Severity `protobuf:"varint,3,opt,name=severity,enum=grafeas.v1alpha1.api.VulnerabilityType_Severity" json:"severity,omitempty"` + // All information about the package to specifically identify this + // vulnerability. One entry per (version range and cpe_uri) the + // package vulnerability has manifested in. + Details []*VulnerabilityType_Detail `protobuf:"bytes,4,rep,name=details" json:"details,omitempty"` +} + +func (m *VulnerabilityType) Reset() { *m = VulnerabilityType{} } +func (m *VulnerabilityType) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType) ProtoMessage() {} +func (*VulnerabilityType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44} } + +func (m *VulnerabilityType) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *VulnerabilityType) GetSeverity() VulnerabilityType_Severity { + if m != nil { + return m.Severity + } + return VulnerabilityType_SEVERITY_UNSPECIFIED +} + +func (m *VulnerabilityType) GetDetails() []*VulnerabilityType_Detail { + if m != nil { + return m.Details + } + return nil +} + +// Version contains structured information about the version of the package. +// For a discussion of this in Debian/Ubuntu: +// http://serverfault.com/questions/604541/debian-packages-version-convention +// For a discussion of this in Redhat/Fedora/Centos: +// http://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/ +type VulnerabilityType_Version struct { + // Used to correct mistakes in the version numbering scheme. + Epoch int32 `protobuf:"varint,1,opt,name=epoch" json:"epoch,omitempty"` + // The main part of the version name. + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + // The iteration of the package build from the above version. + Revision string `protobuf:"bytes,3,opt,name=revision" json:"revision,omitempty"` + // Distinguish between sentinel MIN/MAX versions and normal versions. + // If kind is not NORMAL, then the other fields are ignored. + Kind VulnerabilityType_Version_VersionKind `protobuf:"varint,5,opt,name=kind,enum=grafeas.v1alpha1.api.VulnerabilityType_Version_VersionKind" json:"kind,omitempty"` +} + +func (m *VulnerabilityType_Version) Reset() { *m = VulnerabilityType_Version{} } +func (m *VulnerabilityType_Version) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Version) ProtoMessage() {} +func (*VulnerabilityType_Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44, 0} } + +func (m *VulnerabilityType_Version) GetEpoch() int32 { + if m != nil { + return m.Epoch + } + return 0 +} + +func (m *VulnerabilityType_Version) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VulnerabilityType_Version) GetRevision() string { + if m != nil { + return m.Revision + } + return "" +} + +func (m *VulnerabilityType_Version) GetKind() VulnerabilityType_Version_VersionKind { + if m != nil { + return m.Kind + } + return VulnerabilityType_Version_NORMAL +} + +// Identifies all occurrences of this vulnerability in the package for a +// specific distro/location +// For example: glibc in cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2 +type VulnerabilityType_Detail struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in + // which the vulnerability manifests. Examples include distro or storage + // location for vulnerable jar. + // This field can be used as a filter in list requests. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri" json:"cpe_uri,omitempty"` + // The name of the package where the vulnerability was found. + // This field can be used as a filter in list requests. + Package string `protobuf:"bytes,8,opt,name=package" json:"package,omitempty"` + // The min version of the package in which the vulnerability exists. + MinAffectedVersion *VulnerabilityType_Version `protobuf:"bytes,6,opt,name=min_affected_version,json=minAffectedVersion" json:"min_affected_version,omitempty"` + // The max version of the package in which the vulnerability exists. + // This field can be used as a filter in list requests. + MaxAffectedVersion *VulnerabilityType_Version `protobuf:"bytes,7,opt,name=max_affected_version,json=maxAffectedVersion" json:"max_affected_version,omitempty"` + // The severity (eg: distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,4,opt,name=severity_name,json=severityName" json:"severity_name,omitempty"` + // A vendor-specific description of this note. + Description string `protobuf:"bytes,9,opt,name=description" json:"description,omitempty"` + // The fix for this specific package version. + FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,5,opt,name=fixed_location,json=fixedLocation" json:"fixed_location,omitempty"` + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + PackageType string `protobuf:"bytes,10,opt,name=package_type,json=packageType" json:"package_type,omitempty"` +} + +func (m *VulnerabilityType_Detail) Reset() { *m = VulnerabilityType_Detail{} } +func (m *VulnerabilityType_Detail) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Detail) ProtoMessage() {} +func (*VulnerabilityType_Detail) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44, 1} } + +func (m *VulnerabilityType_Detail) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *VulnerabilityType_Detail) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *VulnerabilityType_Detail) GetMinAffectedVersion() *VulnerabilityType_Version { + if m != nil { + return m.MinAffectedVersion + } + return nil +} + +func (m *VulnerabilityType_Detail) GetMaxAffectedVersion() *VulnerabilityType_Version { + if m != nil { + return m.MaxAffectedVersion + } + return nil +} + +func (m *VulnerabilityType_Detail) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +func (m *VulnerabilityType_Detail) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *VulnerabilityType_Detail) GetFixedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *VulnerabilityType_Detail) GetPackageType() string { + if m != nil { + return m.PackageType + } + return "" +} + +// Used by Occurrence to point to where the vulnerability exists and how +// to fix it. +type VulnerabilityType_VulnerabilityDetails struct { + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + Type string `protobuf:"bytes,3,opt,name=type" json:"type,omitempty"` + // Output only. The note provider assigned Severity of the vulnerability. + Severity VulnerabilityType_Severity `protobuf:"varint,4,opt,name=severity,enum=grafeas.v1alpha1.api.VulnerabilityType_Severity" json:"severity,omitempty"` + // Output only. The CVSS score of this vulnerability. CVSS score is on a + // scale of 0-10 where 0 indicates low severity and 10 indicates high + // severity. + CvssScore float32 `protobuf:"fixed32,5,opt,name=cvss_score,json=cvssScore" json:"cvss_score,omitempty"` + // The set of affected locations and their fixes (if available) within + // the associated resource. + PackageIssue []*VulnerabilityType_PackageIssue `protobuf:"bytes,6,rep,name=package_issue,json=packageIssue" json:"package_issue,omitempty"` +} + +func (m *VulnerabilityType_VulnerabilityDetails) Reset() { + *m = VulnerabilityType_VulnerabilityDetails{} +} +func (m *VulnerabilityType_VulnerabilityDetails) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_VulnerabilityDetails) ProtoMessage() {} +func (*VulnerabilityType_VulnerabilityDetails) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{44, 2} +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetSeverity() VulnerabilityType_Severity { + if m != nil { + return m.Severity + } + return VulnerabilityType_SEVERITY_UNSPECIFIED +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetPackageIssue() []*VulnerabilityType_PackageIssue { + if m != nil { + return m.PackageIssue + } + return nil +} + +// This message wraps a location affected by a vulnerability and its +// associated fix (if one is available). +type VulnerabilityType_PackageIssue struct { + // The location of the vulnerability. + AffectedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,1,opt,name=affected_location,json=affectedLocation" json:"affected_location,omitempty"` + // The location of the available fix for vulnerability. + FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,2,opt,name=fixed_location,json=fixedLocation" json:"fixed_location,omitempty"` + // The severity (eg: distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName" json:"severity_name,omitempty"` +} + +func (m *VulnerabilityType_PackageIssue) Reset() { *m = VulnerabilityType_PackageIssue{} } +func (m *VulnerabilityType_PackageIssue) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_PackageIssue) ProtoMessage() {} +func (*VulnerabilityType_PackageIssue) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{44, 3} +} + +func (m *VulnerabilityType_PackageIssue) GetAffectedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.AffectedLocation + } + return nil +} + +func (m *VulnerabilityType_PackageIssue) GetFixedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *VulnerabilityType_PackageIssue) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +// The location of the vulnerability +type VulnerabilityType_VulnerabilityLocation struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) + // format. Examples include distro or storage location for vulnerable jar. + // This field can be used as a filter in list requests. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri" json:"cpe_uri,omitempty"` + // The package being described. + Package string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` + // The version of the package being described. + // This field can be used as a filter in list requests. + Version *VulnerabilityType_Version `protobuf:"bytes,4,opt,name=version" json:"version,omitempty"` +} + +func (m *VulnerabilityType_VulnerabilityLocation) Reset() { + *m = VulnerabilityType_VulnerabilityLocation{} +} +func (m *VulnerabilityType_VulnerabilityLocation) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_VulnerabilityLocation) ProtoMessage() {} +func (*VulnerabilityType_VulnerabilityLocation) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{44, 4} +} + +func (m *VulnerabilityType_VulnerabilityLocation) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityLocation) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityLocation) GetVersion() *VulnerabilityType_Version { + if m != nil { + return m.Version + } + return nil +} + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +type SourceContext struct { + // A SourceContext can refer any one of the following types of repositories. + // + // Types that are valid to be assigned to Context: + // *SourceContext_CloudRepo + // *SourceContext_Gerrit + // *SourceContext_Git + Context isSourceContext_Context `protobuf_oneof:"context"` + // Labels with user defined metadata. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{45} } + +type isSourceContext_Context interface { + isSourceContext_Context() +} + +type SourceContext_CloudRepo struct { + CloudRepo *CloudRepoSourceContext `protobuf:"bytes,1,opt,name=cloud_repo,json=cloudRepo,oneof"` +} +type SourceContext_Gerrit struct { + Gerrit *GerritSourceContext `protobuf:"bytes,2,opt,name=gerrit,oneof"` +} +type SourceContext_Git struct { + Git *GitSourceContext `protobuf:"bytes,3,opt,name=git,oneof"` +} + +func (*SourceContext_CloudRepo) isSourceContext_Context() {} +func (*SourceContext_Gerrit) isSourceContext_Context() {} +func (*SourceContext_Git) isSourceContext_Context() {} + +func (m *SourceContext) GetContext() isSourceContext_Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *SourceContext) GetCloudRepo() *CloudRepoSourceContext { + if x, ok := m.GetContext().(*SourceContext_CloudRepo); ok { + return x.CloudRepo + } + return nil +} + +func (m *SourceContext) GetGerrit() *GerritSourceContext { + if x, ok := m.GetContext().(*SourceContext_Gerrit); ok { + return x.Gerrit + } + return nil +} + +func (m *SourceContext) GetGit() *GitSourceContext { + if x, ok := m.GetContext().(*SourceContext_Git); ok { + return x.Git + } + return nil +} + +func (m *SourceContext) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SourceContext_OneofMarshaler, _SourceContext_OneofUnmarshaler, _SourceContext_OneofSizer, []interface{}{ + (*SourceContext_CloudRepo)(nil), + (*SourceContext_Gerrit)(nil), + (*SourceContext_Git)(nil), + } +} + +func _SourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudRepo); err != nil { + return err + } + case *SourceContext_Gerrit: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gerrit); err != nil { + return err + } + case *SourceContext_Git: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Git); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SourceContext.Context has unexpected type %T", x) + } + return nil +} + +func _SourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SourceContext) + switch tag { + case 1: // context.cloud_repo + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudRepoSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_CloudRepo{msg} + return true, err + case 2: // context.gerrit + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GerritSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Gerrit{msg} + return true, err + case 3: // context.git + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GitSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Git{msg} + return true, err + default: + return false, nil + } +} + +func _SourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + s := proto.Size(x.CloudRepo) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Gerrit: + s := proto.Size(x.Gerrit) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Git: + s := proto.Size(x.Git) + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An alias to a repo revision. +type AliasContext struct { + // The alias kind. + Kind AliasContext_Kind `protobuf:"varint,1,opt,name=kind,enum=grafeas.v1alpha1.api.AliasContext_Kind" json:"kind,omitempty"` + // The alias name. + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` +} + +func (m *AliasContext) Reset() { *m = AliasContext{} } +func (m *AliasContext) String() string { return proto.CompactTextString(m) } +func (*AliasContext) ProtoMessage() {} +func (*AliasContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{46} } + +func (m *AliasContext) GetKind() AliasContext_Kind { + if m != nil { + return m.Kind + } + return AliasContext_KIND_UNSPECIFIED +} + +func (m *AliasContext) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A CloudRepoSourceContext denotes a particular revision in a Google Cloud +// Source Repo. +type CloudRepoSourceContext struct { + // The ID of the repo. + RepoId *RepoId `protobuf:"bytes,1,opt,name=repo_id,json=repoId" json:"repo_id,omitempty"` + // A revision in a Cloud Repo can be identified by either its revision ID or + // its alias. + // + // Types that are valid to be assigned to Revision: + // *CloudRepoSourceContext_RevisionId + // *CloudRepoSourceContext_AliasContext + Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` +} + +func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } +func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudRepoSourceContext) ProtoMessage() {} +func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{47} } + +type isCloudRepoSourceContext_Revision interface { + isCloudRepoSourceContext_Revision() +} + +type CloudRepoSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,oneof"` +} +type CloudRepoSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,3,opt,name=alias_context,json=aliasContext,oneof"` +} + +func (*CloudRepoSourceContext_RevisionId) isCloudRepoSourceContext_Revision() {} +func (*CloudRepoSourceContext_AliasContext) isCloudRepoSourceContext_Revision() {} + +func (m *CloudRepoSourceContext) GetRevision() isCloudRepoSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *CloudRepoSourceContext) GetRepoId() *RepoId { + if m != nil { + return m.RepoId + } + return nil +} + +func (m *CloudRepoSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *CloudRepoSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CloudRepoSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CloudRepoSourceContext_OneofMarshaler, _CloudRepoSourceContext_OneofUnmarshaler, _CloudRepoSourceContext_OneofSizer, []interface{}{ + (*CloudRepoSourceContext_RevisionId)(nil), + (*CloudRepoSourceContext_AliasContext)(nil), + } +} + +func _CloudRepoSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudRepoSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _CloudRepoSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CloudRepoSourceContext) + switch tag { + case 2: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &CloudRepoSourceContext_RevisionId{x} + return true, err + case 3: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &CloudRepoSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _CloudRepoSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A SourceContext referring to a Gerrit project. +type GerritSourceContext struct { + // The URI of a running Gerrit instance. + HostUri string `protobuf:"bytes,1,opt,name=host_uri,json=hostUri" json:"host_uri,omitempty"` + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. The "repo name" is + // the hostURI/project. + GerritProject string `protobuf:"bytes,2,opt,name=gerrit_project,json=gerritProject" json:"gerrit_project,omitempty"` + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + // + // Types that are valid to be assigned to Revision: + // *GerritSourceContext_RevisionId + // *GerritSourceContext_AliasContext + Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` +} + +func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } +func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } +func (*GerritSourceContext) ProtoMessage() {} +func (*GerritSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{48} } + +type isGerritSourceContext_Revision interface { + isGerritSourceContext_Revision() +} + +type GerritSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,3,opt,name=revision_id,json=revisionId,oneof"` +} +type GerritSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,4,opt,name=alias_context,json=aliasContext,oneof"` +} + +func (*GerritSourceContext_RevisionId) isGerritSourceContext_Revision() {} +func (*GerritSourceContext_AliasContext) isGerritSourceContext_Revision() {} + +func (m *GerritSourceContext) GetRevision() isGerritSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *GerritSourceContext) GetHostUri() string { + if m != nil { + return m.HostUri + } + return "" +} + +func (m *GerritSourceContext) GetGerritProject() string { + if m != nil { + return m.GerritProject + } + return "" +} + +func (m *GerritSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*GerritSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *GerritSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*GerritSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GerritSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GerritSourceContext_OneofMarshaler, _GerritSourceContext_OneofUnmarshaler, _GerritSourceContext_OneofSizer, []interface{}{ + (*GerritSourceContext_RevisionId)(nil), + (*GerritSourceContext_AliasContext)(nil), + } +} + +func _GerritSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *GerritSourceContext_AliasContext: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GerritSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _GerritSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GerritSourceContext) + switch tag { + case 3: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &GerritSourceContext_RevisionId{x} + return true, err + case 4: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &GerritSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _GerritSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *GerritSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g., GitHub). +type GitSourceContext struct { + // Git repository URL. + Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` + // Required. + // Git commit hash. + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId" json:"revision_id,omitempty"` +} + +func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } +func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } +func (*GitSourceContext) ProtoMessage() {} +func (*GitSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{49} } + +func (m *GitSourceContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *GitSourceContext) GetRevisionId() string { + if m != nil { + return m.RevisionId + } + return "" +} + +// A unique identifier for a Cloud Repo. +type RepoId struct { + // A cloud repo can be identified by either its project ID and repository name + // combination, or its globally unique identifier. + // + // Types that are valid to be assigned to Id: + // *RepoId_ProjectRepoId + // *RepoId_Uid + Id isRepoId_Id `protobuf_oneof:"id"` +} + +func (m *RepoId) Reset() { *m = RepoId{} } +func (m *RepoId) String() string { return proto.CompactTextString(m) } +func (*RepoId) ProtoMessage() {} +func (*RepoId) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{50} } + +type isRepoId_Id interface { + isRepoId_Id() +} + +type RepoId_ProjectRepoId struct { + ProjectRepoId *ProjectRepoId `protobuf:"bytes,1,opt,name=project_repo_id,json=projectRepoId,oneof"` +} +type RepoId_Uid struct { + Uid string `protobuf:"bytes,2,opt,name=uid,oneof"` +} + +func (*RepoId_ProjectRepoId) isRepoId_Id() {} +func (*RepoId_Uid) isRepoId_Id() {} + +func (m *RepoId) GetId() isRepoId_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *RepoId) GetProjectRepoId() *ProjectRepoId { + if x, ok := m.GetId().(*RepoId_ProjectRepoId); ok { + return x.ProjectRepoId + } + return nil +} + +func (m *RepoId) GetUid() string { + if x, ok := m.GetId().(*RepoId_Uid); ok { + return x.Uid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoId) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoId_OneofMarshaler, _RepoId_OneofUnmarshaler, _RepoId_OneofSizer, []interface{}{ + (*RepoId_ProjectRepoId)(nil), + (*RepoId_Uid)(nil), + } +} + +func _RepoId_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProjectRepoId); err != nil { + return err + } + case *RepoId_Uid: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uid) + case nil: + default: + return fmt.Errorf("RepoId.Id has unexpected type %T", x) + } + return nil +} + +func _RepoId_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoId) + switch tag { + case 1: // id.project_repo_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProjectRepoId) + err := b.DecodeMessage(msg) + m.Id = &RepoId_ProjectRepoId{msg} + return true, err + case 2: // id.uid + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Id = &RepoId_Uid{x} + return true, err + default: + return false, nil + } +} + +func _RepoId_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + s := proto.Size(x.ProjectRepoId) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *RepoId_Uid: + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Uid))) + n += len(x.Uid) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Selects a repo using a Google Cloud Platform project ID (e.g., +// winged-cargo-31) and a repo name within that project. +type ProjectRepoId struct { + // The ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"` + // The name of the repo. Leave empty for the default repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName" json:"repo_name,omitempty"` +} + +func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } +func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } +func (*ProjectRepoId) ProtoMessage() {} +func (*ProjectRepoId) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{51} } + +func (m *ProjectRepoId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ProjectRepoId) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +func init() { + proto.RegisterType((*CreateProjectRequest)(nil), "grafeas.v1alpha1.api.CreateProjectRequest") + proto.RegisterType((*GetProjectRequest)(nil), "grafeas.v1alpha1.api.GetProjectRequest") + proto.RegisterType((*ListProjectsRequest)(nil), "grafeas.v1alpha1.api.ListProjectsRequest") + proto.RegisterType((*DeleteProjectRequest)(nil), "grafeas.v1alpha1.api.DeleteProjectRequest") + proto.RegisterType((*GetOccurrenceRequest)(nil), "grafeas.v1alpha1.api.GetOccurrenceRequest") + proto.RegisterType((*ListOccurrencesRequest)(nil), "grafeas.v1alpha1.api.ListOccurrencesRequest") + proto.RegisterType((*DeleteOccurrenceRequest)(nil), "grafeas.v1alpha1.api.DeleteOccurrenceRequest") + proto.RegisterType((*CreateOccurrenceRequest)(nil), "grafeas.v1alpha1.api.CreateOccurrenceRequest") + proto.RegisterType((*UpdateOccurrenceRequest)(nil), "grafeas.v1alpha1.api.UpdateOccurrenceRequest") + proto.RegisterType((*GetNoteRequest)(nil), "grafeas.v1alpha1.api.GetNoteRequest") + proto.RegisterType((*GetOccurrenceNoteRequest)(nil), "grafeas.v1alpha1.api.GetOccurrenceNoteRequest") + proto.RegisterType((*ListNotesRequest)(nil), "grafeas.v1alpha1.api.ListNotesRequest") + proto.RegisterType((*DeleteNoteRequest)(nil), "grafeas.v1alpha1.api.DeleteNoteRequest") + proto.RegisterType((*CreateNoteRequest)(nil), "grafeas.v1alpha1.api.CreateNoteRequest") + proto.RegisterType((*UpdateNoteRequest)(nil), "grafeas.v1alpha1.api.UpdateNoteRequest") + proto.RegisterType((*ListNoteOccurrencesRequest)(nil), "grafeas.v1alpha1.api.ListNoteOccurrencesRequest") + proto.RegisterType((*ListProjectsResponse)(nil), "grafeas.v1alpha1.api.ListProjectsResponse") + proto.RegisterType((*ListNoteOccurrencesResponse)(nil), "grafeas.v1alpha1.api.ListNoteOccurrencesResponse") + proto.RegisterType((*ListNotesResponse)(nil), "grafeas.v1alpha1.api.ListNotesResponse") + proto.RegisterType((*ListOccurrencesResponse)(nil), "grafeas.v1alpha1.api.ListOccurrencesResponse") + proto.RegisterType((*ListOperationsResponse)(nil), "grafeas.v1alpha1.api.ListOperationsResponse") + proto.RegisterType((*UpdateOperationRequest)(nil), "grafeas.v1alpha1.api.UpdateOperationRequest") + proto.RegisterType((*CreateOperationRequest)(nil), "grafeas.v1alpha1.api.CreateOperationRequest") + proto.RegisterType((*Project)(nil), "grafeas.v1alpha1.api.Project") + proto.RegisterType((*OperationMetadata)(nil), "grafeas.v1alpha1.api.OperationMetadata") + proto.RegisterType((*Artifact)(nil), "grafeas.v1alpha1.api.Artifact") + proto.RegisterType((*AttestationAuthority)(nil), "grafeas.v1alpha1.api.AttestationAuthority") + proto.RegisterType((*AttestationAuthority_AttestationAuthorityHint)(nil), "grafeas.v1alpha1.api.AttestationAuthority.AttestationAuthorityHint") + proto.RegisterType((*AttestationAuthority_Attestation)(nil), "grafeas.v1alpha1.api.AttestationAuthority.Attestation") + proto.RegisterType((*BuildDetails)(nil), "grafeas.v1alpha1.api.BuildDetails") + proto.RegisterType((*BuildProvenance)(nil), "grafeas.v1alpha1.api.BuildProvenance") + proto.RegisterType((*BuildSignature)(nil), "grafeas.v1alpha1.api.BuildSignature") + proto.RegisterType((*BuildType)(nil), "grafeas.v1alpha1.api.BuildType") + proto.RegisterType((*Command)(nil), "grafeas.v1alpha1.api.Command") + proto.RegisterType((*Deployable)(nil), "grafeas.v1alpha1.api.Deployable") + proto.RegisterType((*Deployable_Deployment)(nil), "grafeas.v1alpha1.api.Deployable.Deployment") + proto.RegisterType((*DockerImage)(nil), "grafeas.v1alpha1.api.DockerImage") + proto.RegisterType((*DockerImage_Layer)(nil), "grafeas.v1alpha1.api.DockerImage.Layer") + proto.RegisterType((*DockerImage_Fingerprint)(nil), "grafeas.v1alpha1.api.DockerImage.Fingerprint") + proto.RegisterType((*DockerImage_Basis)(nil), "grafeas.v1alpha1.api.DockerImage.Basis") + proto.RegisterType((*DockerImage_Derived)(nil), "grafeas.v1alpha1.api.DockerImage.Derived") + proto.RegisterType((*Discovery)(nil), "grafeas.v1alpha1.api.Discovery") + proto.RegisterType((*Discovery_Discovered)(nil), "grafeas.v1alpha1.api.Discovery.Discovered") + proto.RegisterType((*FileHashes)(nil), "grafeas.v1alpha1.api.FileHashes") + proto.RegisterType((*Hash)(nil), "grafeas.v1alpha1.api.Hash") + proto.RegisterType((*Note)(nil), "grafeas.v1alpha1.api.Note") + proto.RegisterType((*Note_RelatedUrl)(nil), "grafeas.v1alpha1.api.Note.RelatedUrl") + proto.RegisterType((*Occurrence)(nil), "grafeas.v1alpha1.api.Occurrence") + proto.RegisterType((*PackageManager)(nil), "grafeas.v1alpha1.api.PackageManager") + proto.RegisterType((*PackageManager_Distribution)(nil), "grafeas.v1alpha1.api.PackageManager.Distribution") + proto.RegisterType((*PackageManager_Location)(nil), "grafeas.v1alpha1.api.PackageManager.Location") + proto.RegisterType((*PackageManager_Package)(nil), "grafeas.v1alpha1.api.PackageManager.Package") + proto.RegisterType((*PackageManager_Installation)(nil), "grafeas.v1alpha1.api.PackageManager.Installation") + proto.RegisterType((*PgpSignedAttestation)(nil), "grafeas.v1alpha1.api.PgpSignedAttestation") + proto.RegisterType((*Source)(nil), "grafeas.v1alpha1.api.Source") + proto.RegisterType((*RepoSource)(nil), "grafeas.v1alpha1.api.RepoSource") + proto.RegisterType((*StorageSource)(nil), "grafeas.v1alpha1.api.StorageSource") + proto.RegisterType((*VulnerabilityType)(nil), "grafeas.v1alpha1.api.VulnerabilityType") + proto.RegisterType((*VulnerabilityType_Version)(nil), "grafeas.v1alpha1.api.VulnerabilityType.Version") + proto.RegisterType((*VulnerabilityType_Detail)(nil), "grafeas.v1alpha1.api.VulnerabilityType.Detail") + proto.RegisterType((*VulnerabilityType_VulnerabilityDetails)(nil), "grafeas.v1alpha1.api.VulnerabilityType.VulnerabilityDetails") + proto.RegisterType((*VulnerabilityType_PackageIssue)(nil), "grafeas.v1alpha1.api.VulnerabilityType.PackageIssue") + proto.RegisterType((*VulnerabilityType_VulnerabilityLocation)(nil), "grafeas.v1alpha1.api.VulnerabilityType.VulnerabilityLocation") + proto.RegisterType((*SourceContext)(nil), "grafeas.v1alpha1.api.SourceContext") + proto.RegisterType((*AliasContext)(nil), "grafeas.v1alpha1.api.AliasContext") + proto.RegisterType((*CloudRepoSourceContext)(nil), "grafeas.v1alpha1.api.CloudRepoSourceContext") + proto.RegisterType((*GerritSourceContext)(nil), "grafeas.v1alpha1.api.GerritSourceContext") + proto.RegisterType((*GitSourceContext)(nil), "grafeas.v1alpha1.api.GitSourceContext") + proto.RegisterType((*RepoId)(nil), "grafeas.v1alpha1.api.RepoId") + proto.RegisterType((*ProjectRepoId)(nil), "grafeas.v1alpha1.api.ProjectRepoId") + proto.RegisterEnum("grafeas.v1alpha1.api.BuildSignature_KeyType", BuildSignature_KeyType_name, BuildSignature_KeyType_value) + proto.RegisterEnum("grafeas.v1alpha1.api.Deployable_Deployment_Platform", Deployable_Deployment_Platform_name, Deployable_Deployment_Platform_value) + proto.RegisterEnum("grafeas.v1alpha1.api.DockerImage_Layer_Directive", DockerImage_Layer_Directive_name, DockerImage_Layer_Directive_value) + proto.RegisterEnum("grafeas.v1alpha1.api.Hash_HashType", Hash_HashType_name, Hash_HashType_value) + proto.RegisterEnum("grafeas.v1alpha1.api.Note_Kind", Note_Kind_name, Note_Kind_value) + proto.RegisterEnum("grafeas.v1alpha1.api.PackageManager_Architecture", PackageManager_Architecture_name, PackageManager_Architecture_value) + proto.RegisterEnum("grafeas.v1alpha1.api.PgpSignedAttestation_ContentType", PgpSignedAttestation_ContentType_name, PgpSignedAttestation_ContentType_value) + proto.RegisterEnum("grafeas.v1alpha1.api.VulnerabilityType_Severity", VulnerabilityType_Severity_name, VulnerabilityType_Severity_value) + proto.RegisterEnum("grafeas.v1alpha1.api.VulnerabilityType_Version_VersionKind", VulnerabilityType_Version_VersionKind_name, VulnerabilityType_Version_VersionKind_value) + proto.RegisterEnum("grafeas.v1alpha1.api.AliasContext_Kind", AliasContext_Kind_name, AliasContext_Kind_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for Grafeas service + +type GrafeasClient interface { + // Returns the requested `Occurrence`. + GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Lists active `Occurrences` for a given project matching the filters. + ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) + // Deletes the given `Occurrence` from the system. Use this when + // an `Occurrence` is no longer applicable for the given resource. + DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + // Creates a new `Occurrence`. Use this method to create `Occurrences` + // for a resource. + CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Updates an existing occurrence. + UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Gets the `Note` attached to the given `Occurrence`. + GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Creates a new `Operation`. + CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) + // Updates an existing operation returns an error if operation + // does not exist. The only valid operations are to update mark the done bit + // change the result. + UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) + // Returns the requested `Note`. + GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists all `Notes` for a given project. + ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) + // Deletes the given `Note` from the system. + DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + // Creates a new `Note`. + CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Updates an existing `Note`. + UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists `Occurrences` referencing the specified `Note`. Use this method to + // get all occurrences referencing your `Note` across all your customer + // projects. + ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) +} + +type grafeasClient struct { + cc *grpc.ClientConn +} + +func NewGrafeasClient(cc *grpc.ClientConn) GrafeasClient { + return &grafeasClient{cc} +} + +func (c *grafeasClient) GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/GetOccurrence", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) { + out := new(ListOccurrencesResponse) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/ListOccurrences", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { + out := new(google_protobuf.Empty) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/DeleteOccurrence", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/CreateOccurrence", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/UpdateOccurrence", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/GetOccurrenceNote", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) { + out := new(google_longrunning.Operation) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/CreateOperation", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) { + out := new(google_longrunning.Operation) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/UpdateOperation", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/GetNote", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) { + out := new(ListNotesResponse) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/ListNotes", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { + out := new(google_protobuf.Empty) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/DeleteNote", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/CreateNote", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/UpdateNote", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasClient) ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) { + out := new(ListNoteOccurrencesResponse) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/ListNoteOccurrences", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for Grafeas service + +type GrafeasServer interface { + // Returns the requested `Occurrence`. + GetOccurrence(context.Context, *GetOccurrenceRequest) (*Occurrence, error) + // Lists active `Occurrences` for a given project matching the filters. + ListOccurrences(context.Context, *ListOccurrencesRequest) (*ListOccurrencesResponse, error) + // Deletes the given `Occurrence` from the system. Use this when + // an `Occurrence` is no longer applicable for the given resource. + DeleteOccurrence(context.Context, *DeleteOccurrenceRequest) (*google_protobuf.Empty, error) + // Creates a new `Occurrence`. Use this method to create `Occurrences` + // for a resource. + CreateOccurrence(context.Context, *CreateOccurrenceRequest) (*Occurrence, error) + // Updates an existing occurrence. + UpdateOccurrence(context.Context, *UpdateOccurrenceRequest) (*Occurrence, error) + // Gets the `Note` attached to the given `Occurrence`. + GetOccurrenceNote(context.Context, *GetOccurrenceNoteRequest) (*Note, error) + // Creates a new `Operation`. + CreateOperation(context.Context, *CreateOperationRequest) (*google_longrunning.Operation, error) + // Updates an existing operation returns an error if operation + // does not exist. The only valid operations are to update mark the done bit + // change the result. + UpdateOperation(context.Context, *UpdateOperationRequest) (*google_longrunning.Operation, error) + // Returns the requested `Note`. + GetNote(context.Context, *GetNoteRequest) (*Note, error) + // Lists all `Notes` for a given project. + ListNotes(context.Context, *ListNotesRequest) (*ListNotesResponse, error) + // Deletes the given `Note` from the system. + DeleteNote(context.Context, *DeleteNoteRequest) (*google_protobuf.Empty, error) + // Creates a new `Note`. + CreateNote(context.Context, *CreateNoteRequest) (*Note, error) + // Updates an existing `Note`. + UpdateNote(context.Context, *UpdateNoteRequest) (*Note, error) + // Lists `Occurrences` referencing the specified `Note`. Use this method to + // get all occurrences referencing your `Note` across all your customer + // projects. + ListNoteOccurrences(context.Context, *ListNoteOccurrencesRequest) (*ListNoteOccurrencesResponse, error) +} + +func RegisterGrafeasServer(s *grpc.Server, srv GrafeasServer) { + s.RegisterService(&_Grafeas_serviceDesc, srv) +} + +func _Grafeas_GetOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).GetOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/GetOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).GetOccurrence(ctx, req.(*GetOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_ListOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).ListOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/ListOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).ListOccurrences(ctx, req.(*ListOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_DeleteOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).DeleteOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/DeleteOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).DeleteOccurrence(ctx, req.(*DeleteOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_CreateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).CreateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/CreateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).CreateOccurrence(ctx, req.(*CreateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_UpdateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).UpdateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/UpdateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).UpdateOccurrence(ctx, req.(*UpdateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_GetOccurrenceNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).GetOccurrenceNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/GetOccurrenceNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).GetOccurrenceNote(ctx, req.(*GetOccurrenceNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_CreateOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).CreateOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/CreateOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).CreateOperation(ctx, req.(*CreateOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_UpdateOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).UpdateOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/UpdateOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).UpdateOperation(ctx, req.(*UpdateOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_GetNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).GetNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/GetNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).GetNote(ctx, req.(*GetNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_ListNotes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNotesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).ListNotes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/ListNotes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).ListNotes(ctx, req.(*ListNotesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_DeleteNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).DeleteNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/DeleteNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).DeleteNote(ctx, req.(*DeleteNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_CreateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).CreateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/CreateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).CreateNote(ctx, req.(*CreateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_UpdateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).UpdateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/UpdateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).UpdateNote(ctx, req.(*UpdateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Grafeas_ListNoteOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNoteOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasServer).ListNoteOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.Grafeas/ListNoteOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasServer).ListNoteOccurrences(ctx, req.(*ListNoteOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Grafeas_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grafeas.v1alpha1.api.Grafeas", + HandlerType: (*GrafeasServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetOccurrence", + Handler: _Grafeas_GetOccurrence_Handler, + }, + { + MethodName: "ListOccurrences", + Handler: _Grafeas_ListOccurrences_Handler, + }, + { + MethodName: "DeleteOccurrence", + Handler: _Grafeas_DeleteOccurrence_Handler, + }, + { + MethodName: "CreateOccurrence", + Handler: _Grafeas_CreateOccurrence_Handler, + }, + { + MethodName: "UpdateOccurrence", + Handler: _Grafeas_UpdateOccurrence_Handler, + }, + { + MethodName: "GetOccurrenceNote", + Handler: _Grafeas_GetOccurrenceNote_Handler, + }, + { + MethodName: "CreateOperation", + Handler: _Grafeas_CreateOperation_Handler, + }, + { + MethodName: "UpdateOperation", + Handler: _Grafeas_UpdateOperation_Handler, + }, + { + MethodName: "GetNote", + Handler: _Grafeas_GetNote_Handler, + }, + { + MethodName: "ListNotes", + Handler: _Grafeas_ListNotes_Handler, + }, + { + MethodName: "DeleteNote", + Handler: _Grafeas_DeleteNote_Handler, + }, + { + MethodName: "CreateNote", + Handler: _Grafeas_CreateNote_Handler, + }, + { + MethodName: "UpdateNote", + Handler: _Grafeas_UpdateNote_Handler, + }, + { + MethodName: "ListNoteOccurrences", + Handler: _Grafeas_ListNoteOccurrences_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "v1alpha1/proto/grafeas.proto", +} + +// Client API for GrafeasProjects service + +type GrafeasProjectsClient interface { + // Creates a new `Project`. + CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + // Returns the requested `Project`. + GetProject(ctx context.Context, in *GetProjectRequest, opts ...grpc.CallOption) (*Project, error) + // Lists `Projects` + ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) + // Deletes the given `Project` from the system. + DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) +} + +type grafeasProjectsClient struct { + cc *grpc.ClientConn +} + +func NewGrafeasProjectsClient(cc *grpc.ClientConn) GrafeasProjectsClient { + return &grafeasProjectsClient{cc} +} + +func (c *grafeasProjectsClient) CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { + out := new(google_protobuf.Empty) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/CreateProject", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasProjectsClient) GetProject(ctx context.Context, in *GetProjectRequest, opts ...grpc.CallOption) (*Project, error) { + out := new(Project) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/GetProject", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasProjectsClient) ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) { + out := new(ListProjectsResponse) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/ListProjects", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasProjectsClient) DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { + out := new(google_protobuf.Empty) + err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/DeleteProject", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for GrafeasProjects service + +type GrafeasProjectsServer interface { + // Creates a new `Project`. + CreateProject(context.Context, *CreateProjectRequest) (*google_protobuf.Empty, error) + // Returns the requested `Project`. + GetProject(context.Context, *GetProjectRequest) (*Project, error) + // Lists `Projects` + ListProjects(context.Context, *ListProjectsRequest) (*ListProjectsResponse, error) + // Deletes the given `Project` from the system. + DeleteProject(context.Context, *DeleteProjectRequest) (*google_protobuf.Empty, error) +} + +func RegisterGrafeasProjectsServer(s *grpc.Server, srv GrafeasProjectsServer) { + s.RegisterService(&_GrafeasProjects_serviceDesc, srv) +} + +func _GrafeasProjects_CreateProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasProjectsServer).CreateProject(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.GrafeasProjects/CreateProject", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasProjectsServer).CreateProject(ctx, req.(*CreateProjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasProjects_GetProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasProjectsServer).GetProject(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.GrafeasProjects/GetProject", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasProjectsServer).GetProject(ctx, req.(*GetProjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasProjects_ListProjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProjectsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasProjectsServer).ListProjects(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.GrafeasProjects/ListProjects", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasProjectsServer).ListProjects(ctx, req.(*ListProjectsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasProjects_DeleteProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasProjectsServer).DeleteProject(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1alpha1.api.GrafeasProjects/DeleteProject", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasProjectsServer).DeleteProject(ctx, req.(*DeleteProjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GrafeasProjects_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grafeas.v1alpha1.api.GrafeasProjects", + HandlerType: (*GrafeasProjectsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateProject", + Handler: _GrafeasProjects_CreateProject_Handler, + }, + { + MethodName: "GetProject", + Handler: _GrafeasProjects_GetProject_Handler, + }, + { + MethodName: "ListProjects", + Handler: _GrafeasProjects_ListProjects_Handler, + }, + { + MethodName: "DeleteProject", + Handler: _GrafeasProjects_DeleteProject_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "v1alpha1/proto/grafeas.proto", +} + +func init() { proto.RegisterFile("v1alpha1/proto/grafeas.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 4608 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x3b, 0x5d, 0x6f, 0xe3, 0xd8, + 0x75, 0xa6, 0xbe, 0x75, 0x24, 0xdb, 0xf4, 0x1d, 0xcf, 0x8c, 0x57, 0xb3, 0x93, 0x99, 0x70, 0x76, + 0x33, 0xb3, 0xde, 0xac, 0xbc, 0xe3, 0xfd, 0xca, 0xee, 0x76, 0xba, 0x91, 0x25, 0x8e, 0xc5, 0x58, + 0x1f, 0x06, 0x25, 0xcf, 0xce, 0xb4, 0x45, 0x59, 0x5a, 0xbc, 0x96, 0x19, 0x4b, 0xa4, 0x42, 0x52, + 0xce, 0x78, 0x83, 0x0d, 0xda, 0x60, 0x83, 0x22, 0x40, 0x81, 0x06, 0x48, 0x03, 0xf4, 0xa1, 0x0f, + 0x6d, 0x1f, 0xfa, 0x50, 0xf4, 0xad, 0x28, 0x50, 0xa0, 0xe8, 0x5b, 0x81, 0xa0, 0x8f, 0x41, 0xd1, + 0x7f, 0x50, 0xb4, 0x68, 0x0b, 0x14, 0x45, 0x5f, 0xfa, 0xd8, 0xe2, 0x7e, 0xf0, 0x43, 0x32, 0x25, + 0x73, 0x76, 0x12, 0xe4, 0xc5, 0xd6, 0x3d, 0x3c, 0xe7, 0xdc, 0x73, 0xcf, 0xf7, 0xbd, 0xe4, 0x85, + 0x57, 0xcf, 0x1f, 0xea, 0xa3, 0xc9, 0xa9, 0xfe, 0x70, 0x67, 0xe2, 0xd8, 0x9e, 0xbd, 0x33, 0x74, + 0xf4, 0x13, 0xac, 0xbb, 0x55, 0x3a, 0x42, 0x9b, 0xfe, 0xd0, 0xc7, 0xaa, 0xea, 0x13, 0xb3, 0x72, + 0x6b, 0x68, 0xdb, 0xc3, 0x11, 0x66, 0x14, 0xc7, 0xd3, 0x93, 0x1d, 0x3c, 0x9e, 0x78, 0x17, 0x8c, + 0xa4, 0xf2, 0x2a, 0x7f, 0xa8, 0x4f, 0xcc, 0x1d, 0xdd, 0xb2, 0x6c, 0x4f, 0xf7, 0x4c, 0xdb, 0xe2, + 0x0c, 0x2b, 0x77, 0xe7, 0x49, 0x4f, 0x4c, 0x3c, 0x32, 0xb4, 0xb1, 0xee, 0x9e, 0x71, 0x8c, 0x3b, + 0xf3, 0x18, 0x9e, 0x39, 0xc6, 0xae, 0xa7, 0x8f, 0x27, 0x1c, 0xe1, 0x1e, 0x47, 0x18, 0xd9, 0xd6, + 0xd0, 0x99, 0x5a, 0x96, 0x69, 0x0d, 0x77, 0xec, 0x09, 0x76, 0xa2, 0xf3, 0x48, 0xdb, 0xb0, 0x59, + 0x77, 0xb0, 0xee, 0xe1, 0x43, 0xc7, 0xfe, 0x36, 0x1e, 0x78, 0x2a, 0xfe, 0xce, 0x14, 0xbb, 0x1e, + 0x42, 0x90, 0xb1, 0xf4, 0x31, 0xde, 0x12, 0xee, 0x0a, 0x0f, 0x8a, 0x2a, 0xfd, 0x2d, 0xdd, 0x87, + 0x8d, 0x7d, 0xec, 0x25, 0x40, 0x34, 0xe1, 0x5a, 0xcb, 0x74, 0x7d, 0x4c, 0xd7, 0x47, 0xbd, 0x01, + 0xb9, 0x13, 0x73, 0xe4, 0x61, 0x87, 0x23, 0xf3, 0x11, 0xba, 0x05, 0xc5, 0x89, 0x3e, 0xc4, 0x9a, + 0x6b, 0x7e, 0x86, 0xb7, 0x52, 0x77, 0x85, 0x07, 0x59, 0xb5, 0x40, 0x00, 0x3d, 0xf3, 0x33, 0x8c, + 0x6e, 0x03, 0xd0, 0x87, 0x9e, 0x7d, 0x86, 0xad, 0xad, 0x34, 0x25, 0xa4, 0xe8, 0x7d, 0x02, 0x20, + 0xf2, 0x37, 0xf0, 0x08, 0x27, 0x92, 0x7f, 0x1b, 0x36, 0xf7, 0xb1, 0xd7, 0x1d, 0x0c, 0xa6, 0x8e, + 0x83, 0xad, 0x01, 0x5e, 0x86, 0xfb, 0x85, 0x00, 0x37, 0xc8, 0x1a, 0x42, 0xec, 0xe8, 0x32, 0x26, + 0xba, 0x83, 0x2d, 0x6f, 0x2b, 0xcb, 0x96, 0xc1, 0x46, 0x91, 0xe5, 0xa5, 0x16, 0x2f, 0x2f, 0xbd, + 0x74, 0x79, 0x99, 0xf9, 0xe5, 0xbd, 0x05, 0x37, 0xd9, 0xf2, 0x92, 0x49, 0xed, 0xc2, 0x4d, 0x66, + 0xcd, 0xcb, 0xe8, 0xa1, 0xd4, 0xe9, 0x19, 0xa9, 0xbf, 0x09, 0x60, 0x07, 0xc8, 0x54, 0xf2, 0xd2, + 0xee, 0xdd, 0x6a, 0x9c, 0x3b, 0x57, 0x23, 0x4c, 0x23, 0x34, 0xd2, 0x5f, 0x09, 0x70, 0xf3, 0x68, + 0x62, 0xe8, 0x09, 0x85, 0x7c, 0xf9, 0x19, 0xd1, 0xc7, 0x50, 0x9a, 0xd2, 0x09, 0x69, 0x3c, 0xd0, + 0x05, 0x95, 0x76, 0x2b, 0x55, 0xe6, 0xef, 0x55, 0x3f, 0x20, 0xaa, 0x8f, 0x49, 0xc8, 0xb4, 0x75, + 0xf7, 0x4c, 0x05, 0x86, 0x4e, 0x7e, 0x4b, 0xaf, 0xc1, 0xda, 0x3e, 0xf6, 0x3a, 0xb6, 0xb7, 0x54, + 0x93, 0x55, 0xd8, 0x9a, 0xf1, 0x95, 0xab, 0xf0, 0xbf, 0x0f, 0x22, 0x71, 0x17, 0x82, 0xf6, 0x2b, + 0x71, 0x94, 0xfb, 0xb0, 0xc1, 0x1c, 0xe5, 0x2a, 0x41, 0x3d, 0xd8, 0x60, 0x2e, 0x12, 0x45, 0x0c, + 0x25, 0xcd, 0xcc, 0x48, 0x7a, 0x13, 0xf2, 0x96, 0xed, 0x61, 0xcd, 0x34, 0x7c, 0x51, 0xc9, 0x50, + 0x31, 0x50, 0x15, 0x32, 0xe4, 0x57, 0xa8, 0xfa, 0x38, 0xeb, 0xd1, 0x19, 0x28, 0x9e, 0xf4, 0x47, + 0x02, 0x6c, 0x30, 0x1f, 0xb9, 0x42, 0xbe, 0x80, 0x73, 0x2a, 0x19, 0xe7, 0x97, 0xf3, 0x85, 0x2f, + 0x04, 0xa8, 0xf8, 0x66, 0x8b, 0x89, 0xf4, 0x38, 0xf9, 0x7e, 0x19, 0xc6, 0xbb, 0x80, 0xcd, 0xd9, + 0x7c, 0xe9, 0x4e, 0x6c, 0xcb, 0xc5, 0xe8, 0x43, 0x28, 0x4c, 0x38, 0x6c, 0x4b, 0xb8, 0x9b, 0x7e, + 0x50, 0xda, 0xbd, 0x1d, 0xaf, 0x0f, 0x3f, 0xf9, 0x05, 0xe8, 0xe8, 0x6b, 0xb0, 0x6e, 0xe1, 0xe7, + 0x9e, 0x16, 0x99, 0x96, 0xc9, 0xbb, 0x4a, 0xc0, 0x87, 0xc1, 0xd4, 0x3f, 0x12, 0xe0, 0x56, 0xac, + 0x06, 0xb8, 0x08, 0x7b, 0x50, 0x0a, 0x03, 0xcf, 0x97, 0xe2, 0xea, 0x68, 0x8d, 0x12, 0x25, 0x96, + 0x65, 0x0c, 0x1b, 0x91, 0x18, 0xe2, 0x02, 0xbc, 0x0d, 0x59, 0x62, 0x67, 0x7f, 0xea, 0x65, 0x0e, + 0xc1, 0x10, 0x13, 0x4f, 0xf7, 0x43, 0x01, 0x6e, 0x5e, 0x4a, 0xf1, 0xbf, 0x82, 0x65, 0x7f, 0xce, + 0x2b, 0x4d, 0x50, 0x9a, 0x03, 0x29, 0x5e, 0x83, 0x59, 0x54, 0xee, 0x88, 0xb3, 0x40, 0xf4, 0x08, + 0x20, 0x2c, 0xeb, 0x5b, 0x29, 0xdf, 0x4f, 0x58, 0x00, 0x44, 0x8a, 0x7f, 0x35, 0x98, 0x41, 0x8d, + 0x10, 0x48, 0x26, 0xdc, 0xe0, 0xd9, 0x3b, 0x78, 0xbc, 0xc4, 0xfd, 0x3f, 0x86, 0x62, 0x40, 0xcb, + 0x83, 0xed, 0x8a, 0xb9, 0x42, 0x7c, 0xe9, 0xc7, 0x02, 0xdc, 0xe0, 0xf5, 0x69, 0x7e, 0xae, 0x30, + 0x03, 0x09, 0x33, 0x19, 0xe8, 0xab, 0x50, 0x0e, 0xe8, 0xc3, 0x34, 0x54, 0x0a, 0x60, 0x8a, 0xf1, + 0x72, 0x22, 0xdd, 0x86, 0x3c, 0x0f, 0x9e, 0xd8, 0x6c, 0xf9, 0xfb, 0x02, 0x6c, 0x04, 0x74, 0x6d, + 0xec, 0xe9, 0x86, 0xee, 0xe9, 0x24, 0xe7, 0x0c, 0xe8, 0x32, 0x34, 0xd2, 0x73, 0x51, 0x82, 0xb8, + 0x9c, 0xd3, 0xf7, 0x1b, 0x32, 0x15, 0x18, 0x3a, 0x01, 0xa0, 0xf7, 0xa0, 0x80, 0x2d, 0x83, 0x51, + 0xa6, 0xae, 0xa4, 0xcc, 0x63, 0xcb, 0x20, 0x23, 0xe9, 0x77, 0xa0, 0x50, 0x73, 0x3c, 0xf3, 0x44, + 0x8f, 0x97, 0x14, 0x55, 0xa0, 0x30, 0x38, 0xc5, 0x83, 0x33, 0x77, 0x3a, 0xe6, 0x4a, 0x0a, 0xc6, + 0x68, 0x0d, 0x52, 0xa6, 0xc1, 0xeb, 0x7e, 0xca, 0x34, 0xd0, 0x26, 0x64, 0x09, 0x8d, 0xbb, 0x95, + 0xb9, 0x9b, 0x7e, 0x50, 0x54, 0xd9, 0x40, 0xfa, 0xbb, 0x14, 0x6c, 0xd6, 0x3c, 0x8f, 0x4c, 0x4c, + 0x56, 0x5b, 0x9b, 0x7a, 0xa7, 0xb6, 0x63, 0x7a, 0x17, 0xe8, 0x53, 0xc8, 0x9c, 0x9a, 0xdc, 0x32, + 0xa5, 0xdd, 0x7a, 0x7c, 0x14, 0xc4, 0x51, 0xc6, 0x02, 0x9b, 0xa6, 0xe5, 0xa9, 0x94, 0x61, 0xe5, + 0x5b, 0xb0, 0xb5, 0x08, 0x03, 0x55, 0xe1, 0xda, 0xe9, 0x74, 0xac, 0x5b, 0x9a, 0x83, 0x75, 0x43, + 0x3f, 0x1e, 0x61, 0x2d, 0xb2, 0xe4, 0x0d, 0xfa, 0x48, 0xe5, 0x4f, 0x3a, 0xfa, 0x18, 0x57, 0xbe, + 0x0f, 0xa5, 0x08, 0x2f, 0x74, 0x0c, 0x37, 0x26, 0xc3, 0x89, 0xe6, 0x9a, 0x43, 0x0b, 0x1b, 0x9a, + 0x1e, 0x3e, 0xe1, 0xab, 0xd8, 0x5e, 0x90, 0x48, 0x87, 0x93, 0x1e, 0x25, 0x89, 0xf0, 0x6a, 0xae, + 0xa8, 0x9b, 0x93, 0x18, 0xf8, 0x5e, 0x09, 0x8a, 0x84, 0xbf, 0xee, 0x4d, 0x1d, 0x2c, 0xfd, 0xae, + 0x00, 0xe5, 0xbd, 0xa9, 0x39, 0x32, 0x1a, 0xd8, 0xd3, 0xcd, 0x91, 0x8b, 0x64, 0x80, 0x89, 0x63, + 0x9f, 0x63, 0x4b, 0x27, 0x6d, 0x0e, 0x9b, 0xf5, 0xf5, 0xf8, 0x59, 0x29, 0xdd, 0x61, 0x80, 0xac, + 0x46, 0x08, 0xd1, 0x1b, 0x20, 0x86, 0x23, 0xed, 0xf8, 0x82, 0xa4, 0x42, 0x66, 0xdf, 0xf5, 0x10, + 0xbe, 0x47, 0xc0, 0xd2, 0xbf, 0x66, 0x61, 0x7d, 0x8e, 0x15, 0x37, 0xbd, 0x10, 0x98, 0xfe, 0x36, + 0x95, 0x8a, 0xf8, 0x7b, 0x18, 0x4d, 0x45, 0x0e, 0x51, 0x0c, 0x52, 0x71, 0x06, 0xf6, 0x78, 0xac, + 0x5b, 0x86, 0xbb, 0x95, 0x5d, 0x56, 0x71, 0xea, 0x0c, 0x4b, 0x0d, 0xd0, 0xd1, 0x3e, 0xac, 0x1f, + 0x4f, 0xcd, 0x91, 0xa7, 0xe9, 0xdc, 0x4d, 0xdd, 0xad, 0x1c, 0xe5, 0xf0, 0x95, 0x05, 0x0e, 0xc3, + 0xd1, 0xd4, 0x35, 0x4a, 0xe6, 0x0f, 0xdd, 0xf9, 0xe8, 0xca, 0xbf, 0x50, 0x74, 0x7d, 0x08, 0xe0, + 0x7a, 0xba, 0xe3, 0x31, 0xda, 0xc2, 0x95, 0xb4, 0x45, 0x8a, 0x4d, 0x49, 0x3f, 0x86, 0xd2, 0x89, + 0x69, 0x99, 0xee, 0x29, 0xa3, 0x2d, 0x5e, 0x3d, 0x2f, 0x43, 0xa7, 0xc4, 0x5b, 0x90, 0xa7, 0x52, + 0xd8, 0xce, 0x56, 0x89, 0x2a, 0xd5, 0x1f, 0xa2, 0x3b, 0x50, 0x1a, 0xd9, 0x43, 0x57, 0x3b, 0x9e, + 0x0e, 0xce, 0xb0, 0xb7, 0xb5, 0x4a, 0x9f, 0x02, 0x01, 0xed, 0x51, 0x08, 0x52, 0x60, 0xc3, 0xb5, + 0xa7, 0xce, 0x00, 0x6b, 0x11, 0x7f, 0x59, 0xa3, 0xb3, 0xbf, 0x1a, 0xaf, 0xba, 0x1e, 0x45, 0x57, + 0x45, 0x46, 0x16, 0xb1, 0xf6, 0x6d, 0x00, 0xcf, 0x31, 0x87, 0x43, 0xec, 0x10, 0xeb, 0xae, 0x33, + 0xeb, 0x72, 0x88, 0x62, 0xa0, 0xdf, 0x82, 0x55, 0xa2, 0x6b, 0x43, 0xb3, 0x27, 0xac, 0x58, 0x88, + 0xd4, 0x40, 0x1f, 0x24, 0xf2, 0x4a, 0x36, 0xee, 0x32, 0x4a, 0xd9, 0xf2, 0x9c, 0x0b, 0xb5, 0x7c, + 0x1c, 0x01, 0xa1, 0xfb, 0xcc, 0x01, 0x0c, 0xec, 0x68, 0xe7, 0xd8, 0x71, 0x49, 0xac, 0x6d, 0x50, + 0x09, 0xd6, 0x38, 0xf8, 0x09, 0x83, 0x56, 0x3e, 0x81, 0x8d, 0x4b, 0xbc, 0x90, 0x08, 0xe9, 0x33, + 0x7c, 0xc1, 0x3d, 0x95, 0xfc, 0x24, 0x59, 0xea, 0x5c, 0x1f, 0x4d, 0x31, 0xf7, 0x52, 0x36, 0xf8, + 0x28, 0xf5, 0x0d, 0x41, 0xfa, 0x5f, 0x01, 0xd6, 0x28, 0x87, 0x9e, 0x1f, 0x7e, 0xd4, 0xaf, 0xa7, + 0xc7, 0x23, 0x73, 0xa0, 0x85, 0x5c, 0x8a, 0x0c, 0x72, 0x80, 0x2f, 0xd0, 0xab, 0x91, 0x50, 0xf5, + 0xbd, 0x3e, 0x00, 0xa0, 0xeb, 0x90, 0x3b, 0xc3, 0x17, 0x5a, 0x90, 0x23, 0xb3, 0x67, 0xf8, 0x42, + 0x31, 0xd0, 0x3e, 0x14, 0x08, 0xd8, 0xbb, 0x98, 0x60, 0xda, 0xb3, 0xad, 0xed, 0x7e, 0x7d, 0x89, + 0xa6, 0x02, 0x59, 0xaa, 0x07, 0xf8, 0xa2, 0x7f, 0x31, 0xc1, 0x6a, 0xfe, 0x8c, 0xfd, 0x90, 0x9a, + 0x90, 0xe7, 0x30, 0xb4, 0x05, 0x9b, 0x07, 0xf2, 0x33, 0xad, 0xff, 0xec, 0x50, 0xd6, 0x8e, 0x3a, + 0xbd, 0x43, 0xb9, 0xae, 0x3c, 0x56, 0xe4, 0x86, 0xb8, 0x82, 0xae, 0xc3, 0xc6, 0xe1, 0xfe, 0xa1, + 0x56, 0xeb, 0xd5, 0x15, 0x45, 0xab, 0xa9, 0xed, 0xae, 0x2a, 0x37, 0x44, 0x01, 0x95, 0xa1, 0x70, + 0x78, 0xa0, 0x3c, 0xd5, 0x0e, 0xe5, 0xb6, 0x98, 0x92, 0x9e, 0x43, 0x91, 0x4e, 0x46, 0x79, 0xc5, + 0x28, 0x5c, 0x88, 0x53, 0x38, 0xda, 0x9b, 0x5f, 0x7d, 0x69, 0xf7, 0xb5, 0x24, 0x2b, 0x51, 0x67, + 0xf3, 0x5b, 0x9e, 0x07, 0x7d, 0x6c, 0xfd, 0x11, 0x21, 0x8d, 0xad, 0x73, 0xda, 0x7e, 0x14, 0x55, + 0xf2, 0x93, 0x60, 0xe9, 0xce, 0xd0, 0xdd, 0x4a, 0x53, 0x10, 0xfd, 0x4d, 0xb0, 0x0c, 0xd3, 0xe1, + 0x1d, 0x30, 0xf9, 0xc9, 0x13, 0x54, 0x36, 0x48, 0x50, 0xaf, 0x40, 0xe1, 0xbb, 0xba, 0xe9, 0x69, + 0x27, 0xb6, 0x43, 0xf3, 0x47, 0x51, 0xcd, 0x93, 0xf1, 0x63, 0xdb, 0x91, 0x7e, 0x96, 0x06, 0x68, + 0xe0, 0xc9, 0xc8, 0xbe, 0x20, 0x59, 0x9f, 0xb4, 0x06, 0x0e, 0xe6, 0x91, 0x33, 0x75, 0x4c, 0xda, + 0xa4, 0x15, 0xd5, 0x92, 0x0f, 0x3b, 0x72, 0xcc, 0xca, 0x4f, 0x03, 0x8a, 0x31, 0x69, 0x26, 0x6e, + 0x03, 0x4c, 0x5d, 0xec, 0x68, 0x78, 0xac, 0x9b, 0x23, 0xdf, 0x49, 0x08, 0x44, 0x26, 0x00, 0x92, + 0x00, 0x0c, 0x8a, 0x9c, 0xb4, 0x38, 0x03, 0x43, 0xa7, 0x09, 0xe0, 0x13, 0x58, 0x9d, 0x5a, 0x51, + 0xf2, 0xf4, 0x95, 0xe4, 0x65, 0x9f, 0x80, 0x32, 0xb8, 0x01, 0xb9, 0x81, 0x6d, 0x9d, 0x98, 0x43, + 0x9a, 0xb5, 0x8a, 0x2a, 0x1f, 0x91, 0xcc, 0xa2, 0x1b, 0x86, 0x83, 0x5d, 0x97, 0x6b, 0xc9, 0x1f, + 0x5e, 0x52, 0x40, 0xee, 0x92, 0x02, 0xd0, 0x21, 0x14, 0x26, 0x23, 0xdd, 0x3b, 0xb1, 0x9d, 0x31, + 0x4d, 0xa4, 0x6b, 0xbb, 0xef, 0xc6, 0x1b, 0x3e, 0xd4, 0x6b, 0x35, 0x54, 0x58, 0xf5, 0x90, 0xd3, + 0xaa, 0x01, 0x17, 0xa9, 0x0e, 0x05, 0x1f, 0x4a, 0x9c, 0xf9, 0xb0, 0x55, 0xeb, 0x3f, 0xee, 0xaa, + 0xed, 0x39, 0x67, 0xce, 0x43, 0x7a, 0xff, 0x40, 0x16, 0x05, 0x54, 0x80, 0xcc, 0xe3, 0x96, 0xfc, + 0x54, 0x4c, 0x21, 0x80, 0x5c, 0xfd, 0xa8, 0xd7, 0xef, 0xb6, 0xc5, 0xb4, 0xf4, 0xf7, 0x39, 0x28, + 0x35, 0xec, 0xc1, 0x19, 0x76, 0x94, 0xb1, 0x3e, 0xc4, 0x95, 0x7f, 0x4f, 0x41, 0xb6, 0xa5, 0x5f, + 0x60, 0x07, 0x75, 0xa1, 0x68, 0x98, 0x0e, 0x1e, 0x78, 0xe6, 0x39, 0xf3, 0xaf, 0xb5, 0xdd, 0x87, + 0x0b, 0x24, 0x0e, 0xe9, 0xab, 0x94, 0xb6, 0xda, 0xf0, 0x09, 0xd5, 0x90, 0x07, 0x89, 0x7c, 0xdd, + 0x19, 0x4e, 0xc9, 0x72, 0xfc, 0xc2, 0x19, 0x02, 0xa4, 0xff, 0x16, 0xa0, 0x18, 0x90, 0xa1, 0x57, + 0xe0, 0x7a, 0x43, 0x51, 0xe5, 0x7a, 0x5f, 0x79, 0x32, 0x1f, 0x9d, 0x6b, 0x00, 0xed, 0x9a, 0xd2, + 0xe9, 0xd7, 0x94, 0x8e, 0xac, 0x8a, 0x02, 0x59, 0xa0, 0x7a, 0xd4, 0x11, 0x53, 0xe4, 0x47, 0xbd, + 0xdd, 0x10, 0xd3, 0xa8, 0x08, 0xd9, 0x56, 0x6d, 0x4f, 0x6e, 0x89, 0x19, 0xb2, 0x54, 0xf9, 0xe9, + 0x61, 0xb7, 0x27, 0x8b, 0x59, 0xf2, 0x5c, 0xee, 0x3c, 0x11, 0x73, 0xe4, 0x47, 0xad, 0xd1, 0x10, + 0xf3, 0x44, 0x25, 0xf5, 0xee, 0xe1, 0x33, 0xb1, 0x40, 0x98, 0xca, 0x9d, 0xbe, 0xfa, 0xec, 0xb0, + 0xab, 0x74, 0xfa, 0x62, 0x91, 0xd0, 0x3d, 0xe9, 0xb6, 0x8e, 0xda, 0xb2, 0x08, 0x04, 0xeb, 0xa8, + 0x27, 0xab, 0x62, 0x09, 0x95, 0x20, 0xff, 0x69, 0x57, 0x3d, 0x68, 0x28, 0xaa, 0x58, 0xa6, 0x5c, + 0xd4, 0x7d, 0x71, 0x95, 0x40, 0xbb, 0x9d, 0xbd, 0x23, 0xa5, 0xd5, 0x10, 0xd7, 0x08, 0xa3, 0x5e, + 0xbf, 0x7b, 0xd8, 0x53, 0xf6, 0x3b, 0xb5, 0x96, 0xb8, 0x8e, 0xd6, 0xa1, 0xd4, 0x94, 0x6b, 0xad, + 0x7e, 0xb3, 0xde, 0x94, 0xeb, 0x07, 0xa2, 0x48, 0x84, 0xeb, 0x35, 0xe5, 0x56, 0x4b, 0xdc, 0xa8, + 0x3c, 0x85, 0xd2, 0x63, 0xd3, 0x1a, 0x62, 0x67, 0xe2, 0x98, 0x6c, 0x8b, 0x7f, 0xfe, 0x30, 0xda, + 0x5b, 0xe5, 0xce, 0x1f, 0x92, 0x86, 0x8a, 0x3e, 0xd8, 0xd5, 0x8e, 0x47, 0xf6, 0x31, 0x0f, 0xea, + 0xdc, 0xf9, 0xee, 0xde, 0xc8, 0x3e, 0xe6, 0x0f, 0x28, 0x05, 0x3f, 0x4a, 0x3a, 0xdf, 0xa5, 0x2d, + 0xd8, 0xf7, 0x20, 0xbb, 0xa7, 0xbb, 0xe6, 0xbc, 0x63, 0xfa, 0x91, 0x16, 0x71, 0xcc, 0x11, 0xea, + 0xd2, 0x62, 0xeb, 0x4b, 0xc1, 0x63, 0xed, 0xad, 0xab, 0x2d, 0x1d, 0x11, 0x5d, 0x8d, 0x72, 0xa8, + 0xfc, 0x87, 0x00, 0xf9, 0x06, 0x76, 0xcc, 0x73, 0x6c, 0xcc, 0x33, 0x17, 0x5e, 0x96, 0x39, 0x69, + 0xae, 0x0d, 0xd3, 0xf5, 0x74, 0xff, 0xc0, 0x6a, 0x55, 0x0d, 0xc6, 0xe8, 0x31, 0xc0, 0x88, 0xb8, + 0x9f, 0x66, 0x5a, 0x27, 0x36, 0x4d, 0x76, 0xa5, 0xdd, 0xfb, 0x09, 0x5d, 0x56, 0x2d, 0x52, 0x52, + 0xc5, 0x3a, 0xb1, 0xd1, 0x36, 0x6c, 0x1c, 0xeb, 0x2e, 0xd6, 0x66, 0x34, 0xc7, 0x12, 0xe5, 0x3a, + 0x79, 0xa0, 0x86, 0xda, 0x93, 0xfe, 0x84, 0xba, 0xad, 0x3b, 0xb0, 0xcf, 0xb1, 0x73, 0x81, 0x1a, + 0xb0, 0xaa, 0x5b, 0xfa, 0xe8, 0xc2, 0x35, 0x5d, 0xed, 0xcc, 0xb4, 0x0c, 0x1e, 0x37, 0x77, 0x16, + 0x6f, 0x95, 0xab, 0x07, 0xa6, 0x65, 0xa8, 0x65, 0x9f, 0x8a, 0x8c, 0x2a, 0x0a, 0x80, 0xcf, 0x12, + 0xcf, 0x6d, 0xaa, 0x84, 0x17, 0xdc, 0x54, 0xc9, 0x00, 0x8f, 0xcd, 0x11, 0x6e, 0xea, 0xee, 0x29, + 0x76, 0xd1, 0x07, 0x50, 0x3c, 0x31, 0x47, 0x58, 0x3b, 0xd5, 0xdd, 0xd3, 0xe5, 0xbb, 0x78, 0x42, + 0xa0, 0x16, 0x4e, 0x38, 0xa9, 0xf4, 0x5d, 0xc8, 0x90, 0xff, 0xe8, 0x03, 0xc8, 0xd0, 0x1a, 0xcc, + 0x96, 0x75, 0x6f, 0x31, 0x2d, 0xfd, 0x43, 0x4b, 0x2f, 0x25, 0x98, 0xed, 0x20, 0xca, 0xbc, 0x83, + 0x90, 0xee, 0x42, 0xc1, 0xc7, 0x23, 0x51, 0xd6, 0xe9, 0x76, 0x64, 0x71, 0x85, 0xc4, 0x5e, 0xaf, + 0x59, 0xdb, 0x7d, 0xef, 0x7d, 0x51, 0x90, 0xfe, 0x96, 0x80, 0x6d, 0x0f, 0xc7, 0x16, 0xba, 0x37, + 0x61, 0xc3, 0x3d, 0xb5, 0x1d, 0x4f, 0x33, 0xb0, 0x3b, 0x70, 0xcc, 0x49, 0xb0, 0xed, 0x2c, 0xaa, + 0x22, 0x7d, 0xd0, 0x08, 0xe1, 0xa4, 0x7b, 0x27, 0xda, 0x9a, 0xc1, 0xe5, 0x36, 0x25, 0xf0, 0x28, + 0xea, 0x3b, 0x90, 0xa1, 0xc6, 0x2b, 0x26, 0x33, 0x1e, 0x45, 0x46, 0x4f, 0x01, 0x9d, 0x4f, 0x47, + 0x16, 0x76, 0xf4, 0x63, 0x73, 0x64, 0x7a, 0xbc, 0x59, 0xc9, 0x51, 0x7b, 0x2d, 0x70, 0xc2, 0x27, + 0x51, 0x7c, 0xa2, 0x84, 0xe6, 0x8a, 0xba, 0x71, 0x3e, 0x0f, 0x44, 0xdf, 0x04, 0x60, 0xbd, 0x22, + 0xe5, 0xc8, 0x1a, 0xe9, 0x3b, 0x4b, 0x9a, 0x06, 0xce, 0xa9, 0x78, 0x1c, 0xb4, 0x27, 0x4d, 0x00, + 0xea, 0xd0, 0x26, 0xf1, 0x77, 0xda, 0xf7, 0x26, 0x0a, 0x0c, 0x9a, 0x42, 0x28, 0x27, 0xdd, 0xc5, + 0x14, 0x84, 0x9a, 0x90, 0x9f, 0xe8, 0x83, 0x33, 0xc2, 0x86, 0xf5, 0xc5, 0x0b, 0xfa, 0xb0, 0x43, + 0x86, 0xd4, 0xd6, 0x2d, 0x7d, 0x88, 0x1d, 0x7f, 0xd8, 0x5c, 0x51, 0x7d, 0x72, 0xb4, 0x07, 0xbc, + 0x66, 0x93, 0x4a, 0x47, 0xdb, 0xd3, 0x85, 0xc7, 0x3a, 0x61, 0x45, 0x6c, 0xae, 0xa8, 0x11, 0x2a, + 0xf4, 0x09, 0x29, 0x51, 0x3c, 0xf6, 0xb6, 0xd0, 0x32, 0xc5, 0x04, 0x21, 0x4a, 0x96, 0x13, 0xd0, + 0xa0, 0xc7, 0x50, 0x72, 0xf0, 0x48, 0xf7, 0xb0, 0x41, 0x63, 0x3c, 0x4f, 0x43, 0xe2, 0xf5, 0x25, + 0x06, 0x57, 0x19, 0xf6, 0x91, 0x33, 0x52, 0xc1, 0x09, 0x7e, 0xa3, 0x3a, 0xac, 0xe3, 0xe7, 0x13, + 0x93, 0x1f, 0x8e, 0xd0, 0xa6, 0x03, 0xae, 0x6c, 0x3a, 0xd6, 0x42, 0x12, 0x7f, 0xd7, 0x13, 0xdd, + 0x6d, 0x95, 0x5e, 0x68, 0xb7, 0x15, 0x1e, 0xbe, 0x52, 0xe2, 0xf2, 0xd5, 0xc4, 0x0c, 0x9d, 0x00, + 0x2a, 0xef, 0x02, 0x84, 0x0b, 0x23, 0x9d, 0x61, 0x58, 0x2a, 0xc8, 0x4f, 0x12, 0xbd, 0x23, 0xfd, + 0x18, 0x8f, 0xfc, 0xfe, 0x9f, 0x0e, 0xa4, 0x3f, 0x14, 0x20, 0x43, 0x02, 0x00, 0x6d, 0x82, 0x78, + 0xa0, 0x74, 0x1a, 0x73, 0x75, 0xfa, 0x15, 0xb8, 0x7e, 0x58, 0xab, 0x1f, 0xd4, 0xf6, 0x65, 0xed, + 0xc9, 0x51, 0xab, 0x23, 0xab, 0xb5, 0x3d, 0xa5, 0xa5, 0xf4, 0x9f, 0x89, 0x29, 0xb4, 0x01, 0xab, + 0xb4, 0x5e, 0x6a, 0x0d, 0xb9, 0x5f, 0x53, 0x5a, 0x3d, 0x31, 0x4d, 0xea, 0xa4, 0xd2, 0x26, 0xb8, + 0x7b, 0xb5, 0x9e, 0xd2, 0x13, 0x33, 0xe8, 0x1a, 0xac, 0xfb, 0xe4, 0xed, 0x5a, 0xa7, 0xb6, 0x2f, + 0xab, 0x62, 0x96, 0x54, 0xd7, 0x86, 0x7c, 0xd8, 0xea, 0x3e, 0xab, 0xed, 0xb5, 0x64, 0x31, 0x87, + 0x56, 0xa1, 0xd8, 0x50, 0x7a, 0xf5, 0xee, 0x13, 0x59, 0x7d, 0x26, 0xe6, 0xf7, 0x4a, 0x50, 0xa4, + 0x87, 0xe4, 0x24, 0x50, 0xa4, 0x7f, 0xc8, 0x01, 0x84, 0x07, 0x82, 0xb1, 0x09, 0x64, 0xbe, 0x3a, + 0xa6, 0x2e, 0x57, 0xc7, 0x5b, 0x9c, 0x65, 0xa4, 0xc8, 0x16, 0x08, 0x80, 0x16, 0x66, 0x3f, 0x51, + 0xe4, 0x5e, 0x24, 0x51, 0xb8, 0x70, 0x7d, 0x36, 0x51, 0x18, 0xec, 0x98, 0x82, 0x47, 0xf6, 0xaf, + 0x25, 0xcc, 0x15, 0xb3, 0x10, 0x7e, 0xd4, 0xd1, 0x5c, 0x51, 0x37, 0xcf, 0x63, 0xe0, 0x48, 0xf1, + 0xf7, 0x9b, 0xfe, 0x64, 0x6c, 0x2f, 0x2f, 0x2d, 0x49, 0x23, 0x21, 0x4b, 0xb6, 0xb9, 0xf4, 0x59, + 0x1d, 0xc2, 0xaa, 0xc1, 0xaa, 0x3b, 0xcf, 0x27, 0xcc, 0x51, 0xdf, 0xb8, 0x3a, 0x9f, 0xf0, 0xa6, + 0x80, 0x70, 0xe4, 0x1c, 0x58, 0x52, 0xf9, 0x14, 0xca, 0xa6, 0xe5, 0x7a, 0xfa, 0x68, 0xc4, 0x8a, + 0x1c, 0x73, 0xde, 0x87, 0x89, 0x32, 0x8b, 0x12, 0x21, 0x24, 0x8c, 0xa3, 0x8c, 0x50, 0xdb, 0xcf, + 0x31, 0xa4, 0xc5, 0xe4, 0x09, 0xeb, 0xcd, 0x17, 0xe8, 0xba, 0xc3, 0x74, 0x43, 0x37, 0x2d, 0x2d, + 0x00, 0x23, 0xa8, 0xcb, 0x74, 0x4f, 0xbf, 0xf0, 0xf4, 0x2a, 0xc8, 0x37, 0xd5, 0xb0, 0x92, 0x53, + 0x6e, 0x61, 0x5d, 0xbf, 0x4b, 0x72, 0xcf, 0x18, 0x1b, 0x26, 0x5b, 0x74, 0xd6, 0xf7, 0xbd, 0x00, + 0x34, 0x9f, 0x10, 0x8a, 0x2f, 0x93, 0x10, 0xe0, 0x45, 0x12, 0xc2, 0x5e, 0x11, 0xf2, 0xdc, 0x51, + 0xa4, 0x7f, 0xcb, 0xc2, 0xda, 0xac, 0xce, 0x2b, 0x7f, 0x99, 0x82, 0x72, 0xc3, 0x74, 0x3d, 0xc7, + 0x3c, 0x9e, 0x52, 0x41, 0x6f, 0x42, 0x7e, 0x30, 0xf1, 0xb7, 0x7e, 0x6c, 0xc7, 0x34, 0xa1, 0x9b, + 0x9e, 0x23, 0x28, 0xeb, 0xce, 0xe0, 0xd4, 0xf4, 0xf0, 0x20, 0xd8, 0xf1, 0xae, 0x25, 0xb4, 0x6c, + 0x2d, 0x42, 0xa8, 0xce, 0xb0, 0x41, 0x4f, 0x60, 0x8d, 0x64, 0x2b, 0xd7, 0x0b, 0x76, 0xdb, 0x6c, + 0x8b, 0xb7, 0x93, 0x38, 0x76, 0x18, 0x99, 0xba, 0xca, 0xd8, 0xf8, 0xbb, 0xf3, 0xaf, 0x00, 0x8c, + 0x75, 0xd3, 0xf2, 0x74, 0xd3, 0xc2, 0xfe, 0xd6, 0x38, 0x02, 0xf1, 0x33, 0x63, 0x2e, 0xcc, 0x8c, + 0x77, 0xc9, 0x46, 0x35, 0x6c, 0x28, 0xf2, 0xcc, 0x88, 0x11, 0x50, 0xe5, 0x07, 0x02, 0x14, 0x5a, + 0xf6, 0x40, 0x5f, 0xae, 0x28, 0x05, 0xf2, 0xfe, 0x52, 0x52, 0x5f, 0x6e, 0x29, 0x3e, 0x3d, 0x49, + 0x74, 0x13, 0xdd, 0x3b, 0xe5, 0xc9, 0x8a, 0xfe, 0xae, 0x78, 0x90, 0xe7, 0xda, 0x8d, 0xcd, 0x83, + 0x47, 0x50, 0x36, 0x22, 0xf6, 0xdc, 0x02, 0x5a, 0x07, 0x93, 0x99, 0x29, 0xea, 0x08, 0xea, 0x0c, + 0x9b, 0xca, 0x18, 0xca, 0xd1, 0xf0, 0x8c, 0x9d, 0x5a, 0x81, 0xc2, 0x88, 0x6b, 0x87, 0xbf, 0x30, + 0x79, 0x2b, 0xd1, 0xb4, 0xbe, 0x4a, 0xd5, 0x80, 0x5c, 0xfa, 0x75, 0x28, 0x47, 0x7d, 0x06, 0xbd, + 0x0a, 0x5b, 0x35, 0xb5, 0xde, 0x54, 0xfa, 0x72, 0xbd, 0x7f, 0xa4, 0xca, 0x97, 0xf7, 0xc5, 0x4f, + 0xbf, 0xf1, 0x3e, 0xdb, 0x3f, 0x3e, 0x7d, 0xff, 0x5d, 0x31, 0x25, 0xfd, 0x9f, 0x00, 0x9b, 0x71, + 0xa7, 0xce, 0xb3, 0x47, 0x56, 0xc2, 0xfc, 0x91, 0xd5, 0x33, 0x28, 0x0f, 0x6c, 0xcb, 0xc3, 0x96, + 0xc7, 0x1a, 0xb4, 0x34, 0xf5, 0xf1, 0xf7, 0x93, 0x9f, 0x6a, 0x57, 0xeb, 0x8c, 0x9c, 0xb6, 0xcb, + 0xa5, 0x41, 0x38, 0x20, 0xfe, 0x38, 0x19, 0x4e, 0x34, 0x7e, 0x22, 0x46, 0xab, 0x53, 0x73, 0x45, + 0x2d, 0x4c, 0x86, 0x93, 0x03, 0x7c, 0xa1, 0x18, 0x52, 0x03, 0x4a, 0x11, 0x5a, 0xb2, 0xe0, 0x7a, + 0xb7, 0xd3, 0x97, 0x3b, 0xfd, 0xb8, 0x53, 0xad, 0x9b, 0x70, 0xad, 0xa7, 0xb4, 0x0f, 0x5b, 0xb2, + 0x46, 0x36, 0xa7, 0x4a, 0x67, 0x5f, 0xfb, 0x56, 0xaf, 0xdb, 0x11, 0x85, 0xbd, 0x82, 0x7f, 0xe6, + 0x26, 0xfd, 0x2c, 0x03, 0x39, 0x76, 0xa2, 0x89, 0x5a, 0xb0, 0xe6, 0x7a, 0xb6, 0x43, 0xdf, 0xa3, + 0x52, 0x08, 0xdf, 0x7a, 0x2c, 0xe8, 0xf9, 0x7b, 0x0c, 0x97, 0x11, 0x37, 0x57, 0xd4, 0x55, 0x37, + 0x0a, 0x40, 0x75, 0x92, 0xeb, 0x26, 0xb6, 0xcf, 0x6a, 0xe9, 0x97, 0x06, 0x2a, 0x9e, 0xd8, 0x01, + 0x1f, 0x70, 0x82, 0x11, 0xfa, 0x4d, 0xb8, 0xe9, 0x1f, 0x68, 0x6b, 0x73, 0xb2, 0x65, 0x12, 0xcb, + 0xa6, 0x5e, 0xf7, 0x79, 0xcc, 0x80, 0x51, 0x9b, 0x6c, 0x54, 0xf9, 0xd6, 0x08, 0xbb, 0x7c, 0xf3, + 0xf8, 0xf5, 0x65, 0x87, 0xbe, 0xd5, 0x70, 0x63, 0xc5, 0xce, 0x60, 0xe1, 0x24, 0xdc, 0x69, 0x3d, + 0x82, 0x3c, 0x35, 0xe4, 0x73, 0x8f, 0x57, 0xda, 0x7b, 0xcb, 0x58, 0xd5, 0x19, 0xaa, 0xea, 0xd3, + 0xa0, 0x3e, 0x5c, 0xd3, 0x0d, 0xc3, 0x24, 0xde, 0xa1, 0x8f, 0x34, 0x0e, 0x25, 0x1d, 0x42, 0x3a, + 0x29, 0x2b, 0x14, 0xd2, 0x73, 0x90, 0x5b, 0xd1, 0x60, 0x7d, 0x4e, 0xe6, 0x98, 0xb3, 0xde, 0xf7, + 0xa3, 0x3b, 0xb5, 0x85, 0x46, 0x0a, 0xf9, 0x44, 0x4e, 0x83, 0x89, 0x27, 0x31, 0x83, 0x48, 0x7f, + 0x23, 0x90, 0x96, 0x32, 0x30, 0xdd, 0xec, 0xbb, 0x0e, 0x61, 0xfe, 0x5d, 0xc7, 0x2d, 0x28, 0x52, + 0xf7, 0xa0, 0xd9, 0x81, 0xbf, 0x32, 0x23, 0x80, 0x0e, 0x6b, 0xd2, 0x4a, 0xc7, 0x8e, 0x6e, 0x0d, + 0x4e, 0x23, 0x3d, 0x18, 0xf1, 0x0c, 0x06, 0xa4, 0x28, 0xb7, 0xa0, 0xe0, 0xe9, 0x43, 0xf6, 0x3c, + 0xc3, 0x9f, 0xe7, 0x3d, 0x7d, 0x48, 0x1f, 0xde, 0x01, 0x18, 0xd8, 0xe3, 0xb1, 0xe9, 0x69, 0xee, + 0xa9, 0xce, 0xca, 0x2c, 0xd9, 0x04, 0x30, 0x58, 0xef, 0x54, 0xdf, 0x03, 0x28, 0x38, 0xf8, 0xdc, + 0x24, 0xc9, 0x53, 0xd2, 0x60, 0x75, 0xd6, 0x2f, 0x6e, 0x40, 0x8e, 0xbf, 0x2e, 0xe0, 0x09, 0x9b, + 0x8d, 0x08, 0xdc, 0x3e, 0x26, 0xe2, 0xfb, 0x1f, 0x1f, 0xb0, 0x11, 0x09, 0xd9, 0x21, 0xb6, 0xa2, + 0xef, 0x40, 0xd3, 0x6a, 0x04, 0x22, 0xfd, 0x67, 0x19, 0x36, 0x2e, 0x25, 0x71, 0xa2, 0x9f, 0xc1, + 0xb9, 0xeb, 0x6a, 0xee, 0xc0, 0xe6, 0x55, 0x32, 0xa5, 0x16, 0x09, 0xa4, 0x47, 0x00, 0xa8, 0x05, + 0x05, 0x17, 0x9f, 0x63, 0xc7, 0xf4, 0x2e, 0x78, 0x7a, 0x79, 0x3b, 0x69, 0x79, 0xe8, 0x71, 0x3a, + 0x35, 0xe0, 0x40, 0xf6, 0x70, 0x7e, 0x17, 0x98, 0xa1, 0x0e, 0x55, 0x4d, 0xca, 0x8c, 0xb5, 0x80, + 0xaa, 0x4f, 0x5e, 0xf9, 0x67, 0x01, 0xf2, 0x7e, 0xed, 0xdc, 0x84, 0x2c, 0x9e, 0xd8, 0x83, 0x53, + 0xaa, 0xa7, 0xac, 0xca, 0x06, 0x41, 0xca, 0x4f, 0xcd, 0xbe, 0x1f, 0xf5, 0xf5, 0xed, 0x77, 0xd4, + 0xfe, 0x18, 0x75, 0x79, 0x47, 0x9d, 0xa5, 0xab, 0xfc, 0xf8, 0x05, 0x8b, 0xa0, 0xff, 0x3f, 0xec, + 0xb6, 0xa5, 0x77, 0xa0, 0x14, 0x01, 0x22, 0x80, 0x5c, 0xa7, 0xab, 0xb6, 0x6b, 0x2d, 0x71, 0x05, + 0x95, 0x20, 0xdf, 0x56, 0x3a, 0x4a, 0xfb, 0xa8, 0x2d, 0x0a, 0x74, 0x50, 0x7b, 0x4a, 0x07, 0xa9, + 0xca, 0xcf, 0xd3, 0x90, 0x63, 0x6b, 0x5d, 0x5c, 0xb1, 0xb7, 0xc2, 0x9d, 0x30, 0x3b, 0x25, 0x0e, + 0x76, 0xb6, 0x3a, 0x6c, 0x8e, 0x4d, 0x4b, 0xd3, 0x4f, 0x4e, 0xf0, 0x80, 0xec, 0x2c, 0xfd, 0xc2, + 0x9e, 0xfb, 0x72, 0x85, 0x1d, 0x8d, 0x4d, 0xab, 0xc6, 0x79, 0xf9, 0xca, 0x26, 0x53, 0xe8, 0xcf, + 0x2f, 0x4f, 0x91, 0xff, 0xb2, 0x53, 0xe8, 0xcf, 0xe7, 0xa7, 0xb8, 0x07, 0xab, 0xbe, 0xc7, 0x44, + 0x02, 0x4b, 0x2d, 0xfb, 0x40, 0x1a, 0x5b, 0x73, 0xed, 0x4f, 0xf1, 0x52, 0xfb, 0x83, 0x0c, 0x58, + 0x3b, 0x31, 0x9f, 0x63, 0x43, 0x0b, 0xaa, 0x7c, 0x96, 0xca, 0xf8, 0xe8, 0x4b, 0x6d, 0x73, 0x82, + 0xaa, 0xbf, 0x4a, 0x99, 0x06, 0x7d, 0xd5, 0x57, 0xa1, 0xcc, 0xb5, 0xcf, 0x6a, 0x30, 0x30, 0x41, + 0x38, 0x8c, 0xf0, 0xa9, 0xfc, 0x8f, 0x00, 0x9b, 0x71, 0x5b, 0x26, 0xe2, 0xa2, 0x41, 0xdd, 0x2e, + 0xf2, 0xe3, 0xaa, 0x68, 0xc0, 0x65, 0x5e, 0x3a, 0xe0, 0x66, 0xa3, 0x3b, 0x3b, 0x1f, 0xdd, 0xcf, + 0x60, 0xd5, 0x17, 0xde, 0x74, 0xdd, 0x29, 0xe6, 0x2f, 0x6b, 0xdf, 0x4d, 0x3a, 0x23, 0xef, 0x8c, + 0x14, 0x42, 0xab, 0xfa, 0x7a, 0xa0, 0xa3, 0xca, 0x0f, 0x53, 0x50, 0x8e, 0x3e, 0x46, 0xdf, 0x86, + 0x8d, 0xc0, 0x69, 0x02, 0x8b, 0x08, 0xbf, 0x08, 0x8b, 0x88, 0x3e, 0xdf, 0xc0, 0x28, 0x97, 0x4d, + 0x9f, 0xfa, 0x25, 0x98, 0xfe, 0x92, 0x9f, 0xa6, 0x2f, 0xfb, 0x69, 0xe5, 0xa7, 0x02, 0x5c, 0x8f, + 0xe5, 0x96, 0x28, 0xbe, 0x53, 0xb3, 0xf1, 0x1d, 0xe9, 0xd5, 0x33, 0x2f, 0xd7, 0xab, 0x4b, 0xbf, + 0x0d, 0x05, 0xdf, 0x5f, 0xd0, 0x16, 0x6c, 0xf6, 0xe4, 0x27, 0xb2, 0xaa, 0xf4, 0x9f, 0xcd, 0x75, + 0x6e, 0x7e, 0xa2, 0xaa, 0xb5, 0x58, 0xbb, 0xda, 0xea, 0x7e, 0xca, 0xde, 0xe2, 0xb4, 0xe5, 0x86, + 0x72, 0xd4, 0x16, 0xd3, 0xa8, 0x00, 0x99, 0xa6, 0xb2, 0xdf, 0x14, 0x33, 0xa8, 0x0c, 0x85, 0xba, + 0xaa, 0xf4, 0x95, 0x7a, 0xad, 0x25, 0x66, 0xa5, 0xff, 0x4a, 0xc1, 0xea, 0x4c, 0x5f, 0x40, 0xb6, + 0xc4, 0x83, 0x91, 0x3d, 0x35, 0x34, 0x52, 0x5f, 0xb9, 0xe5, 0x17, 0xb4, 0x39, 0x75, 0x82, 0x17, + 0x16, 0x71, 0xce, 0x81, 0xd6, 0x4e, 0xff, 0x09, 0xaa, 0x43, 0x6e, 0x88, 0x1d, 0xc7, 0xf4, 0xdf, + 0x1b, 0x2c, 0x38, 0x05, 0xd8, 0xa7, 0x38, 0xf3, 0x7c, 0x38, 0x29, 0xfa, 0x08, 0xd2, 0x43, 0xd3, + 0xe3, 0x7b, 0xb8, 0xaf, 0x2d, 0xe0, 0x70, 0x99, 0x9c, 0x10, 0xa1, 0x7d, 0xc8, 0xd1, 0xd3, 0x28, + 0xbf, 0x96, 0xed, 0x24, 0x68, 0x8e, 0xaa, 0x2d, 0x4a, 0xc1, 0xba, 0x36, 0x4e, 0x5e, 0xf9, 0x10, + 0x4a, 0x11, 0xf0, 0x8b, 0xbc, 0x04, 0x27, 0xbb, 0x65, 0xde, 0xa2, 0x49, 0x7f, 0x2a, 0x40, 0xb9, + 0x36, 0x32, 0x75, 0xd7, 0xd7, 0xf7, 0xc7, 0xbc, 0xa0, 0xb1, 0x13, 0xf3, 0x05, 0x87, 0xae, 0x51, + 0x8a, 0xe8, 0x51, 0x51, 0x4c, 0xf5, 0x94, 0x3e, 0x59, 0x7a, 0xe8, 0x56, 0x84, 0xec, 0x63, 0xe5, + 0x29, 0x7d, 0x5d, 0x4d, 0xbc, 0xa6, 0xfb, 0x84, 0x1e, 0x94, 0xa5, 0x08, 0xbc, 0xdb, 0x6f, 0xca, + 0xaa, 0x98, 0x91, 0xfe, 0x51, 0x80, 0x1b, 0xf1, 0xa6, 0x45, 0xef, 0x41, 0x9e, 0xf6, 0x61, 0xbc, + 0x47, 0x5b, 0xf8, 0xd5, 0x03, 0xa1, 0x54, 0x0c, 0x35, 0xe7, 0xd0, 0xff, 0xa4, 0x43, 0xf3, 0x0b, + 0x78, 0x74, 0x9f, 0x02, 0x3e, 0x50, 0x31, 0x90, 0x02, 0xab, 0x3a, 0x59, 0xa4, 0xdf, 0xcb, 0x72, + 0x63, 0x4b, 0x57, 0xeb, 0xa3, 0xb9, 0xa2, 0x96, 0xf5, 0xc8, 0x78, 0xa6, 0x5d, 0xfb, 0xb9, 0x00, + 0xd7, 0x62, 0x7c, 0x0b, 0xbd, 0x02, 0x85, 0x53, 0xdb, 0xf5, 0x22, 0x61, 0x9d, 0x27, 0x63, 0x12, + 0xd7, 0xaf, 0xc3, 0x1a, 0x73, 0x3b, 0x8d, 0xf7, 0x9f, 0xfe, 0xa7, 0x80, 0x0c, 0xea, 0x7f, 0x82, + 0x36, 0xb7, 0xa6, 0x74, 0x92, 0x35, 0x65, 0x7e, 0x21, 0x6b, 0x92, 0x41, 0x9c, 0x77, 0xf6, 0x98, + 0x23, 0xd9, 0x3b, 0x31, 0x3a, 0x8f, 0x4a, 0x27, 0x7d, 0x07, 0x72, 0xcc, 0x4c, 0xa8, 0x0d, 0xeb, + 0x7e, 0xf3, 0x3d, 0x6b, 0xdd, 0x7b, 0xcb, 0x3f, 0x61, 0xa5, 0xd4, 0x64, 0x2f, 0x37, 0x89, 0x02, + 0x10, 0x82, 0xf4, 0x34, 0x62, 0x65, 0x32, 0xd8, 0xcb, 0x40, 0xca, 0x34, 0xa4, 0x03, 0x58, 0x9d, + 0xa1, 0x7d, 0x99, 0xb6, 0x7f, 0xf7, 0x2f, 0x44, 0xc8, 0xef, 0x33, 0xf1, 0xd0, 0x8f, 0x05, 0x58, + 0x9d, 0xf9, 0x06, 0x1c, 0x6d, 0x2f, 0xca, 0x33, 0x97, 0x2f, 0x15, 0x54, 0xae, 0xfc, 0x58, 0x54, + 0x7a, 0xf8, 0x83, 0x7f, 0xfa, 0x97, 0x9f, 0xa4, 0xde, 0x44, 0x6f, 0xec, 0x04, 0x57, 0x4b, 0xbe, + 0x47, 0x84, 0x7a, 0xe4, 0x7f, 0xc4, 0xbb, 0xb3, 0xbd, 0x13, 0xf9, 0x9e, 0x74, 0x67, 0xfb, 0x73, + 0xf4, 0x67, 0x02, 0xac, 0xcf, 0x7d, 0xb2, 0x8a, 0x16, 0xe4, 0xd1, 0xf8, 0xcb, 0x0b, 0x95, 0xb7, + 0x12, 0x62, 0xb3, 0x2f, 0x50, 0x63, 0x65, 0x64, 0x5f, 0x66, 0x46, 0xa4, 0xfc, 0x3c, 0x2a, 0x26, + 0xfa, 0x03, 0x01, 0xc4, 0xf9, 0x3b, 0x0b, 0x68, 0xd1, 0xcb, 0xd7, 0xf8, 0xbb, 0x0d, 0x95, 0x1b, + 0x97, 0x4e, 0x0c, 0xe5, 0xf1, 0xc4, 0xbb, 0xf0, 0xc5, 0xd9, 0x7e, 0x01, 0x95, 0xfd, 0xb9, 0x00, + 0xe2, 0xfc, 0x9d, 0x88, 0x45, 0xe2, 0x2c, 0xb8, 0x3b, 0x91, 0xc0, 0x96, 0x8f, 0xa8, 0x60, 0x1f, + 0x48, 0xc9, 0xf5, 0xf4, 0x51, 0xf4, 0x42, 0x03, 0x11, 0x72, 0xfe, 0x0a, 0xc5, 0x22, 0x21, 0x17, + 0x5c, 0xb5, 0x48, 0x2e, 0xe4, 0x6e, 0x72, 0xed, 0xcd, 0x08, 0xf9, 0xc7, 0x02, 0xbd, 0xff, 0x33, + 0x7b, 0x27, 0x02, 0x55, 0x13, 0xc4, 0x44, 0xe4, 0x9b, 0xff, 0xca, 0x92, 0x0f, 0xb8, 0xa5, 0x0f, + 0xa8, 0x80, 0x0f, 0xd1, 0x4e, 0x62, 0x01, 0x77, 0xd8, 0x27, 0xdf, 0x3f, 0x11, 0x60, 0x7d, 0xee, + 0xc3, 0xe2, 0x45, 0x71, 0x11, 0xff, 0xfd, 0x71, 0x65, 0xf9, 0xcb, 0x6d, 0xe9, 0x1d, 0x2a, 0xd9, + 0x5b, 0xd2, 0x83, 0x2b, 0xec, 0x1b, 0x7c, 0x56, 0xfd, 0x91, 0xb0, 0x4d, 0xa5, 0x9a, 0xfb, 0xb4, + 0x7a, 0x91, 0x54, 0xf1, 0x5f, 0x60, 0x27, 0x94, 0x6a, 0xf7, 0xc1, 0x32, 0x7d, 0x05, 0x22, 0x11, + 0x7b, 0x0a, 0xdb, 0xe8, 0x39, 0xe4, 0xf9, 0xfd, 0x17, 0xf4, 0xda, 0x42, 0xdb, 0x25, 0xb5, 0xd8, + 0x9b, 0x54, 0x82, 0xd7, 0xd1, 0xbd, 0x25, 0x12, 0x50, 0x13, 0x91, 0x50, 0xfc, 0x91, 0x00, 0xc5, + 0xe0, 0x03, 0x7f, 0xf4, 0xb5, 0xc5, 0x99, 0x28, 0x7a, 0x8b, 0xa6, 0x72, 0xff, 0x4a, 0x3c, 0x9e, + 0xab, 0xe2, 0x64, 0x89, 0xb1, 0x11, 0xf3, 0x98, 0xcf, 0x00, 0xc2, 0xfb, 0x32, 0xe8, 0xfe, 0xb2, + 0xf4, 0x14, 0xd5, 0xc5, 0xa2, 0xc4, 0xc4, 0xe7, 0xde, 0x4e, 0xa4, 0x87, 0x2f, 0x04, 0x80, 0xf0, + 0x0e, 0xce, 0xa2, 0xc9, 0x2f, 0xdd, 0xd2, 0x59, 0x6a, 0x08, 0x9e, 0x19, 0xa5, 0x24, 0x8b, 0xff, + 0x88, 0xdd, 0x9c, 0x21, 0x62, 0x84, 0x77, 0x72, 0x16, 0x89, 0x71, 0xe9, 0xd6, 0x4e, 0x12, 0x31, + 0x76, 0x93, 0xe8, 0x81, 0x8b, 0xf1, 0xd7, 0x02, 0xbb, 0x2d, 0x38, 0x77, 0x03, 0x05, 0xbd, 0xbd, + 0xdc, 0xee, 0x31, 0xb5, 0xed, 0xe1, 0x0b, 0x50, 0x70, 0x9f, 0x49, 0x92, 0x71, 0x7c, 0x79, 0xa3, + 0xa9, 0x67, 0xf7, 0x8b, 0x0c, 0xac, 0xf3, 0x46, 0xc1, 0xbf, 0xb6, 0x83, 0x9e, 0xc3, 0xea, 0xcc, + 0x5d, 0xca, 0x45, 0xfd, 0x42, 0xdc, 0x85, 0xcb, 0x85, 0x9e, 0x75, 0x8f, 0x4a, 0x78, 0x5b, 0xba, + 0xb5, 0x58, 0xc2, 0xcf, 0xd1, 0x05, 0x40, 0x78, 0x33, 0x73, 0x91, 0x25, 0x2f, 0xdd, 0xdd, 0xac, + 0x2c, 0xbf, 0x4d, 0xe4, 0x4f, 0x8d, 0x96, 0x4e, 0xfd, 0x7b, 0x02, 0x94, 0xa3, 0x97, 0x97, 0xd0, + 0x1b, 0x8b, 0xad, 0x30, 0x77, 0x21, 0xb4, 0xb2, 0x9d, 0x04, 0x95, 0x5b, 0xaa, 0x42, 0x85, 0xd9, + 0x44, 0x68, 0x27, 0x7a, 0x11, 0x37, 0x50, 0xfc, 0xcc, 0x25, 0xd0, 0x45, 0x8a, 0x8f, 0xbb, 0x29, + 0x7a, 0x95, 0xe2, 0xb7, 0x97, 0xad, 0x7e, 0xaf, 0xf8, 0x1b, 0x79, 0x3e, 0xd3, 0x71, 0x8e, 0xd2, + 0xbf, 0xf3, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf4, 0xa4, 0xcb, 0x85, 0x29, 0x3c, 0x00, 0x00, +} diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go new file mode 100644 index 00000000..4bfe6548 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go @@ -0,0 +1,1261 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: v1alpha1/proto/grafeas.proto + +/* +Package grafeas is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package grafeas + +import ( + "io" + "net/http" + + "github.com/golang/protobuf/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/utilities" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray + +func request_Grafeas_GetOccurrence_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetOccurrenceRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.GetOccurrence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_ListOccurrences_0 = &utilities.DoubleArray{Encoding: map[string]int{"parent": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_Grafeas_ListOccurrences_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListOccurrencesRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["parent"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "parent") + } + + protoReq.Parent, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "parent", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_ListOccurrences_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListOccurrences(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_DeleteOccurrence_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DeleteOccurrenceRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.DeleteOccurrence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_CreateOccurrence_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateOccurrenceRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["parent"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "parent") + } + + protoReq.Parent, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "parent", err) + } + + msg, err := client.CreateOccurrence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_UpdateOccurrence_0 = &utilities.DoubleArray{Encoding: map[string]int{"occurrence": 0, "name": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) + +func request_Grafeas_UpdateOccurrence_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq UpdateOccurrenceRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_UpdateOccurrence_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.UpdateOccurrence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_GetOccurrenceNote_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetOccurrenceNoteRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.GetOccurrenceNote(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_CreateOperation_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateOperationRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["parent"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "parent") + } + + protoReq.Parent, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "parent", err) + } + + msg, err := client.CreateOperation(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_UpdateOperation_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq UpdateOperationRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.UpdateOperation(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_GetNote_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetNoteRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.GetNote(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_ListNotes_0 = &utilities.DoubleArray{Encoding: map[string]int{"parent": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_Grafeas_ListNotes_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListNotesRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["parent"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "parent") + } + + protoReq.Parent, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "parent", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_ListNotes_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNotes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_Grafeas_DeleteNote_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DeleteNoteRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.DeleteNote(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_CreateNote_0 = &utilities.DoubleArray{Encoding: map[string]int{"note": 0, "parent": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) + +func request_Grafeas_CreateNote_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateNoteRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["parent"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "parent") + } + + protoReq.Parent, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "parent", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_CreateNote_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateNote(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_UpdateNote_0 = &utilities.DoubleArray{Encoding: map[string]int{"note": 0, "name": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) + +func request_Grafeas_UpdateNote_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq UpdateNoteRequest + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_UpdateNote_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.UpdateNote(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_Grafeas_ListNoteOccurrences_0 = &utilities.DoubleArray{Encoding: map[string]int{"name": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_Grafeas_ListNoteOccurrences_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListNoteOccurrencesRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_Grafeas_ListNoteOccurrences_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNoteOccurrences(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_GrafeasProjects_CreateProject_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasProjectsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateProjectRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.CreateProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_GrafeasProjects_GetProject_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasProjectsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetProjectRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.GetProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_GrafeasProjects_ListProjects_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_GrafeasProjects_ListProjects_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasProjectsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListProjectsRequest + var metadata runtime.ServerMetadata + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_GrafeasProjects_ListProjects_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListProjects(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_GrafeasProjects_DeleteProject_0(ctx context.Context, marshaler runtime.Marshaler, client GrafeasProjectsClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DeleteProjectRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.DeleteProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +// RegisterGrafeasHandlerFromEndpoint is same as RegisterGrafeasHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterGrafeasHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterGrafeasHandler(ctx, mux, conn) +} + +// RegisterGrafeasHandler registers the http handlers for service Grafeas to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterGrafeasHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterGrafeasHandlerClient(ctx, mux, NewGrafeasClient(conn)) +} + +// RegisterGrafeasHandler registers the http handlers for service Grafeas to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "GrafeasClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "GrafeasClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "GrafeasClient" to call the correct interceptors. +func RegisterGrafeasHandlerClient(ctx context.Context, mux *runtime.ServeMux, client GrafeasClient) error { + + mux.Handle("GET", pattern_Grafeas_GetOccurrence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_GetOccurrence_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_GetOccurrence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_Grafeas_ListOccurrences_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_ListOccurrences_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_ListOccurrences_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_Grafeas_DeleteOccurrence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_DeleteOccurrence_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_DeleteOccurrence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_Grafeas_CreateOccurrence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_CreateOccurrence_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_CreateOccurrence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PATCH", pattern_Grafeas_UpdateOccurrence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_UpdateOccurrence_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_UpdateOccurrence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_Grafeas_GetOccurrenceNote_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_GetOccurrenceNote_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_GetOccurrenceNote_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_Grafeas_CreateOperation_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_CreateOperation_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_CreateOperation_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PATCH", pattern_Grafeas_UpdateOperation_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_UpdateOperation_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_UpdateOperation_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_Grafeas_GetNote_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_GetNote_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_GetNote_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_Grafeas_ListNotes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_ListNotes_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_ListNotes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_Grafeas_DeleteNote_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_DeleteNote_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_DeleteNote_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_Grafeas_CreateNote_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_CreateNote_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_CreateNote_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PATCH", pattern_Grafeas_UpdateNote_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_UpdateNote_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_UpdateNote_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_Grafeas_ListNoteOccurrences_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Grafeas_ListNoteOccurrences_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Grafeas_ListNoteOccurrences_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_Grafeas_GetOccurrence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "occurrences", "name"}, "")) + + pattern_Grafeas_ListOccurrences_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2, 2, 3}, []string{"v1alpha1", "projects", "parent", "occurrences"}, "")) + + pattern_Grafeas_DeleteOccurrence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "occurrences", "name"}, "")) + + pattern_Grafeas_CreateOccurrence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2, 2, 3}, []string{"v1alpha1", "projects", "parent", "occurrences"}, "")) + + pattern_Grafeas_UpdateOccurrence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "occurrences", "name"}, "")) + + pattern_Grafeas_GetOccurrenceNote_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3, 2, 4}, []string{"v1alpha1", "projects", "occurrences", "name", "notes"}, "")) + + pattern_Grafeas_CreateOperation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2, 2, 3}, []string{"v1alpha1", "projects", "parent", "operations"}, "")) + + pattern_Grafeas_UpdateOperation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "operations", "name"}, "")) + + pattern_Grafeas_GetNote_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "notes", "name"}, "")) + + pattern_Grafeas_ListNotes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2, 2, 3}, []string{"v1alpha1", "projects", "parent", "notes"}, "")) + + pattern_Grafeas_DeleteNote_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "notes", "name"}, "")) + + pattern_Grafeas_CreateNote_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2, 2, 3}, []string{"v1alpha1", "projects", "parent", "notes"}, "")) + + pattern_Grafeas_UpdateNote_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3}, []string{"v1alpha1", "projects", "notes", "name"}, "")) + + pattern_Grafeas_ListNoteOccurrences_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 2, 2, 1, 0, 4, 4, 5, 3, 2, 4}, []string{"v1alpha1", "projects", "notes", "name", "occurrences"}, "")) +) + +var ( + forward_Grafeas_GetOccurrence_0 = runtime.ForwardResponseMessage + + forward_Grafeas_ListOccurrences_0 = runtime.ForwardResponseMessage + + forward_Grafeas_DeleteOccurrence_0 = runtime.ForwardResponseMessage + + forward_Grafeas_CreateOccurrence_0 = runtime.ForwardResponseMessage + + forward_Grafeas_UpdateOccurrence_0 = runtime.ForwardResponseMessage + + forward_Grafeas_GetOccurrenceNote_0 = runtime.ForwardResponseMessage + + forward_Grafeas_CreateOperation_0 = runtime.ForwardResponseMessage + + forward_Grafeas_UpdateOperation_0 = runtime.ForwardResponseMessage + + forward_Grafeas_GetNote_0 = runtime.ForwardResponseMessage + + forward_Grafeas_ListNotes_0 = runtime.ForwardResponseMessage + + forward_Grafeas_DeleteNote_0 = runtime.ForwardResponseMessage + + forward_Grafeas_CreateNote_0 = runtime.ForwardResponseMessage + + forward_Grafeas_UpdateNote_0 = runtime.ForwardResponseMessage + + forward_Grafeas_ListNoteOccurrences_0 = runtime.ForwardResponseMessage +) + +// RegisterGrafeasProjectsHandlerFromEndpoint is same as RegisterGrafeasProjectsHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterGrafeasProjectsHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterGrafeasProjectsHandler(ctx, mux, conn) +} + +// RegisterGrafeasProjectsHandler registers the http handlers for service GrafeasProjects to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterGrafeasProjectsHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterGrafeasProjectsHandlerClient(ctx, mux, NewGrafeasProjectsClient(conn)) +} + +// RegisterGrafeasProjectsHandler registers the http handlers for service GrafeasProjects to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "GrafeasProjectsClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "GrafeasProjectsClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "GrafeasProjectsClient" to call the correct interceptors. +func RegisterGrafeasProjectsHandlerClient(ctx context.Context, mux *runtime.ServeMux, client GrafeasProjectsClient) error { + + mux.Handle("POST", pattern_GrafeasProjects_CreateProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_GrafeasProjects_CreateProject_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GrafeasProjects_CreateProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_GrafeasProjects_GetProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_GrafeasProjects_GetProject_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GrafeasProjects_GetProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_GrafeasProjects_ListProjects_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_GrafeasProjects_ListProjects_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GrafeasProjects_ListProjects_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_GrafeasProjects_DeleteProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_GrafeasProjects_DeleteProject_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GrafeasProjects_DeleteProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_GrafeasProjects_CreateProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2}, []string{"v1alpha1", "projects", "name"}, "")) + + pattern_GrafeasProjects_GetProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2}, []string{"v1alpha1", "projects", "name"}, "")) + + pattern_GrafeasProjects_ListProjects_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1alpha1", "projects"}, "")) + + pattern_GrafeasProjects_DeleteProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2}, []string{"v1alpha1", "projects", "name"}, "")) +) + +var ( + forward_GrafeasProjects_CreateProject_0 = runtime.ForwardResponseMessage + + forward_GrafeasProjects_GetProject_0 = runtime.ForwardResponseMessage + + forward_GrafeasProjects_ListProjects_0 = runtime.ForwardResponseMessage + + forward_GrafeasProjects_DeleteProject_0 = runtime.ForwardResponseMessage +) diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto new file mode 100644 index 00000000..0ba5c78a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto @@ -0,0 +1,1454 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +import "google/protobuf/empty.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; +import "google/longrunning/operations.proto"; + +package grafeas.v1alpha1.api; +option go_package = "grafeas"; + +// Request to insert a new Project. +message CreateProjectRequest { + // The name of the project of the form + // "projects/{project_id}" + string name = 1; +} + +// Request to get a Project. +message GetProjectRequest { + // The name of the project of the form + // "projects/{project_id}" + string name = 1; +} + +// Request to list projects. +message ListProjectsRequest { + // The filter expression. + string filter = 1; + + // Number of projects to return in the list. + int32 page_size = 2; + + // Token to provide to skip to a particular spot in the list. + string page_token = 3; +} + +// Request to delete a project +message DeleteProjectRequest { + // The name of the project of the form + // "projects/{project_id}" + string name = 1; +} + +// Request to get a Occurrenceg. +message GetOccurrenceRequest { + // The name of the occurrence of the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + string name = 1; +} + +// Request to list occurrences. +message ListOccurrencesRequest { + // This contains the project Id for example: projects/{project_id}. + string parent = 5; + + // The filter expression. + string filter = 2; + + // Number of occurrences to return in the list. + int32 page_size = 3; + + // Token to provide to skip to a particular spot in the list. + string page_token = 4; +} + +// Request to delete a occurrence +message DeleteOccurrenceRequest { + // The name of the occurrence in the form of + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + string name = 1; +} + +// Request to insert a new occurrence. +message CreateOccurrenceRequest { + // This field contains the project Id for example: "projects/{project_id}" + string parent = 3; + + // The occurrence to be inserted + Occurrence occurrence = 2; +} + +// Request to update an existing occurrence +message UpdateOccurrenceRequest { + // The name of the occurrence. + // Should be of the form "projects/{project_id}/occurrences/{OCCURRENCE_ID}". + string name = 1; + + // The updated occurrence. + Occurrence occurrence = 2; + + // The fields to update. + google.protobuf.FieldMask update_mask = 3; +} + +// Request to get a Note. +message GetNoteRequest { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + string name = 1; +} + +// Request to get the note to which this occurrence is attached. +message GetOccurrenceNoteRequest { + // The name of the occurrence in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + string name = 1; +} + +// Request to list notes. +message ListNotesRequest { + // This field contains the project ID for example: "projects/{project_id}". + string parent = 5; + + // The filter expression. + string filter = 2; + + // Number of notes to return in the list. + int32 page_size = 3; + + // Token to provide to skip to a particular spot in the list. + string page_token = 4; +} + +// Request to delete a note +message DeleteNoteRequest { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + string name = 1; +} + +// Request to insert a new note +message CreateNoteRequest { + // This field contains the project Id for example: + // "project/{project_id} + string parent = 4; + + // The ID to use for this note. + string note_id = 2; + + // The Note to be inserted + Note note = 3; +} + +// Request to update an existing note +message UpdateNoteRequest { + // The name of the note. + // Should be of the form "projects/{provider_id}/notes/{note_id}". + string name = 1; + + // The updated note. + Note note = 2; + + // The fields to update. + google.protobuf.FieldMask update_mask = 3; +} + +// Request to list occurrences. +message ListNoteOccurrencesRequest { + // The name field will contain the note name for example: + // "provider/{provider_id}/notes/{note_id}" + string name = 1; + + // The filter expression. + string filter = 2; + + // Number of notes to return in the list. + int32 page_size = 3; + + // Token to provide to skip to a particular spot in the list. + string page_token = 4; +} + +// Response including listed projects +message ListProjectsResponse { + // The projects requested. + repeated Project projects = 1; + + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + string next_page_token = 2; +} + +// Response including listed occurrences for a note. +message ListNoteOccurrencesResponse { + // The occurrences attached to the specified note. + repeated Occurrence occurrences = 1; + + // Token to receive the next page of notes. + string next_page_token = 2; +} + +// Response including listed notes. +message ListNotesResponse { + // The occurrences requested + repeated Note notes = 1; + + // The next pagination token in the list response. It should be used as + // page_token for the following request. An empty value means no more result. + string next_page_token = 2; +} + +// Response including listed active occurrences. +message ListOccurrencesResponse { + // The occurrences requested. + repeated Occurrence occurrences = 1; + + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + string next_page_token = 2; +} + +// Response including listed operations. +message ListOperationsResponse { + // The next pagination token in the List response. It should be used as + // page_token for the following request. An empty value means no more results. + string nextPageToken = 1; + // The operations requested. + repeated google.longrunning.Operation operations = 2; +} + +// Request for updating an existing operation +message UpdateOperationRequest { + // The name of the Operation. + // Should be of the form "projects/{provider_id}/operations/{operation_id}". + string name = 1; + // The operation to create. + google.longrunning.Operation operation = 3; +} + +// Request for creating an operation +message CreateOperationRequest { + // The projectId that this operation should be created under. + string parent = 1; + // The ID to use for this operation. + string operation_id = 2; + // The operation to create. + google.longrunning.Operation operation = 3; +} + +// Provides detailed description of a `Project`. +message Project { + // The name of the project of the form + // "projects/{project_id}" + string name = 1; +} + +// Metadata for all operations used and required for all operations +// that created by Container Analysis Providers +message OperationMetadata { + // Output only. The time this operation was created. + google.protobuf.Timestamp create_time = 1; + + // Output only. The time that this operation was marked completed or failed. + google.protobuf.Timestamp end_time = 2; +} + +// Artifact describes a build product. +message Artifact { + // Name of the artifact. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. + // + // This field is deprecated in favor of the plural `names` field; it continues + // to exist here to allow existing BuildProvenance serialized to json in + // google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to + // deserialize back into proto. + string name = 1; + + // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a + // container. + string checksum = 2; + + // Artifact ID, if any; for container images, this will be a URL by digest + // like gcr.io/projectID/imagename@sha256:123456 + string id = 3; + + // Related artifact names. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. Note that a + // single Artifact ID can have multiple names, for example if two tags are + // applied to one image. + repeated string names = 4; +} + +// Note kind that represents a logical attestation "role" or "authority". For +// example, an organization might have one AttestationAuthority for "QA" and one +// for "build". This Note is intended to act strictly as a grouping mechanism +// for the attached Occurrences (Attestations). This grouping mechanism also +// provides a security boundary and provides a single point of lookup to find +// all attached Attestation Occurrences, even if they don't all live in the same +// project. +message AttestationAuthority { + AttestationAuthorityHint hint = 1; + + message AttestationAuthorityHint { + // The human readable name of this Attestation Authority, e.g. "qa". + string human_readable_name = 1; + } + message Attestation { + // The signature, generally over the `resource_url`, that verifies this + // attestation. The semantics of the signature veracity are ultimately + // determined by the verification engine. + oneof signature { + PgpSignedAttestation pgp_signed_attestation = 1; + } + } +} + +// Message encapsulating build provenance details. +message BuildDetails { + // The actual provenance + BuildProvenance provenance = 1; + + // Serialized JSON representation of the provenance, used in generating the + // `BuildSignature` in the corresponding Result. After verifying the + // signature, `provenance_bytes` can be unmarshalled and compared to the + // provenance to confirm that it is unchanged. A base64-encoded string + // representation of the provenance bytes is used for the signature in order + // to interoperate with openssl which expects this format for signature + // verification. + // + // The serialized form is captured both to avoid ambiguity in how the + // provenance is marshalled to json as well to prevent incompatibilities with + // future changes. + string provenance_bytes = 2; +} + +// Provenance of a build. Contains all information needed to verify the full +// details about the build from source to completion. +message BuildProvenance { + // Unique identifier of the build. + string id = 1; + + // ID of the project. + string project_id = 2; + + // Commands requested by the build. + repeated Command commands = 5; + + // Output of the build. + repeated Artifact built_artifacts = 6; + + // Time at which the build was created. + google.protobuf.Timestamp create_time = 7; + + // Time at which execution of the build was started. + google.protobuf.Timestamp start_time = 8; + + // Time at which execution of the build was finished. + google.protobuf.Timestamp finish_time = 9; + + // E-mail address of the user who initiated this build. Note that this was the + // user's e-mail address at the time the build was initiated; this address may + // not represent the same end-user for all time. + string creator = 11; + + // Google Cloud Storage bucket where logs were written. + string logs_bucket = 13; + + // Details of the Source input to the build. + Source source_provenance = 14; + + // Trigger identifier if the build was triggered automatically; empty if not. + string trigger_id = 15; + + // Special options applied to this build. This is a catch-all field where + // build providers can enter any desired additional details. + map build_options = 16; + + // Version string of the builder at the time this build was executed. + string builder_version = 17; +} + +// Message encapsulating the signature of the verified build. +message BuildSignature { + // Public key formats + enum KeyType { + // `KeyType` is not set. + KEY_TYPE_UNSPECIFIED = 0; + + // `PGP ASCII Armored` public key. + PGP_ASCII_ARMORED = 1; + + // `PKIX PEM` public key. + PKIX_PEM = 2; + } + + // Public key of the builder which can be used to verify that the related + // findings are valid and unchanged. If `key_type` is empty, this defaults + // to PEM encoded public keys. + // + // This field may be empty if `key_id` references an external key. + // + // For Cloud Container Builder based signatures, this is a PEM encoded public + // key. To verify the Cloud Container Builder signature, place the contents of + // this field into a file (public.pem). The signature field is base64-decoded + // into its binary representation in signature.bin, and the provenance bytes + // from `BuildDetails` are base64-decoded into a binary representation in + // signed.bin. OpenSSL can then verify the signature: + // `openssl sha256 -verify public.pem -signature signature.bin signed.bin` + string public_key = 1; + + // Signature of the related `BuildProvenance`, encoded in a base64 string. + string signature = 2; + + // An Id for the key used to sign. This could be either an Id for the key + // stored in `public_key` (such as the Id or fingerprint for a PGP key, or the + // CN for a cert), or a reference to an external key (such as a reference to a + // key in Cloud Key Management Service). + string key_id = 3; + + // The type of the key, either stored in `public_key` or referenced in + // `key_id` + KeyType key_type = 4; +} + +// Note holding the version of the provider's builder and the signature of +// the provenance message in linked BuildDetails. +message BuildType { + // Version of the builder which produced this Note. + string builder_version = 1; + + // Signature of the build in Occurrences pointing to the Note containing this + // `BuilderDetails`. + BuildSignature signature = 2; +} + +// Command describes a step performed as part of the build pipeline. +message Command { + // Name of the command, as presented on the command line, or if the command is + // packaged as a Docker container, as presented to `docker pull`. + string name = 1; + + // Environment variables set before running this Command. + repeated string env = 2; + + // Command-line arguments used when executing this Command. + repeated string args = 3; + + // Working directory (relative to project source root) used when running + // this Command. + string dir = 4; + + // Optional unique identifier for this Command, used in wait_for to reference + // this Command as a dependency. + string id = 5; + + // The ID(s) of the Command(s) that this Command depends on. + repeated string wait_for = 6; +} + +// An artifact that can be deployed in some runtime. +message Deployable { + // The period during which some deployable was active in a runtime. + message Deployment { + // Types of platforms. + enum Platform { + // Unknown + PLATFORM_UNSPECIFIED = 0; + + // Google Container Engine + GKE = 1; + + // Google App Engine: Flexible Environment + FLEX = 2; + + // Custom user-defined platform + CUSTOM = 3; + } + + // Identity of the user that triggered this deployment. + string user_email = 1; + + // Beginning of the lifetime of this deployment. + google.protobuf.Timestamp deploy_time = 2; + + // End of the lifetime of this deployment. + google.protobuf.Timestamp undeploy_time = 3; + + // Configuration used to create this deployment. + string config = 8; + + // Address of the runtime element hosting this deployment. + string address = 5; + + // Output only. Resource URI for the artifact being deployed taken from the + // deployable field with the same name. + repeated string resource_uri = 6; + + // Platform hosting this deployment. + Platform platform = 7; + } + + // Resource URI for the artifact being deployed. + repeated string resource_uri = 1; +} + +// DockerImage holds types defining base image notes +// and derived image occurrences. +message DockerImage { + // Layer holds metadata specific to a layer of a Docker image. + message Layer { + // Instructions from dockerfile + enum Directive { + // Default value for unsupported/missing directive + DIRECTIVE_UNSPECIFIED = 0; + + // https://docs.docker.com/reference/builder/#maintainer + MAINTAINER = 1; + + // https://docs.docker.com/reference/builder/#run + RUN = 2; + + // https://docs.docker.com/reference/builder/#cmd + CMD = 3; + + // https://docs.docker.com/reference/builder/#label + LABEL = 4; + + // https://docs.docker.com/reference/builder/#expose + EXPOSE = 5; + + // https://docs.docker.com/reference/builder/#env + ENV = 6; + + // https://docs.docker.com/reference/builder/#add + ADD = 7; + + // https://docs.docker.com/reference/builder/#copy + COPY = 8; + + // https://docs.docker.com/reference/builder/#entrypoint + ENTRYPOINT = 9; + + // https://docs.docker.com/reference/builder/#volume + VOLUME = 10; + + // https://docs.docker.com/reference/builder/#user + USER = 11; + + // https://docs.docker.com/reference/builder/#workdir + WORKDIR = 12; + + // https://docs.docker.com/reference/builder/#arg + ARG = 13; + + // https://docs.docker.com/reference/builder/#onbuild + ONBUILD = 14; + + // https://docs.docker.com/reference/builder/#stopsignal + STOPSIGNAL = 15; + + // https://docs.docker.com/reference/builder/#healthcheck + HEALTHCHECK = 16; + + // https://docs.docker.com/reference/builder/#shell + SHELL = 17; + } + + // The recovered Dockerfile directive used to construct this layer. + Directive directive = 1; + + // The recovered arguments to the Dockerfile directive. + string arguments = 2; + } + + // A set of properties that uniquely identify a given Docker image. + message Fingerprint { + // The layer-id of the final layer in the Docker image's v1 + // representation. + // This field can be used as a filter in list requests. + string v1_name = 1; + + // The ordered list of v2 blobs that represent a given image. + repeated string v2_blob = 2; + + // Output only. The name of the image's v2 blobs computed via: + // [bottom] := v2_blob[bottom] + // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) + // Only the name of the final blob is kept. + // This field can be used as a filter in list requests. + string v2_name = 3; + } + + // Basis describes the base image portion (Note) of the DockerImage + // relationship. Linked occurrences are derived from this or an + // equivalent image via: + // FROM + // Or an equivalent reference, e.g. a tag of the resource_url. + message Basis { + // The resource_url for the resource representing the basis of + // associated occurrence images. + string resource_url = 1; + + // The fingerprint of the base image + Fingerprint fingerprint = 2; + } + + // Derived describes the derived image portion (Occurrence) of the + // DockerImage relationship. This image would be produced from a Dockerfile + // with FROM . + message Derived { + // The fingerprint of the derived image + Fingerprint fingerprint = 1; + + // Output only. The number of layers by which this image differs from + // the associated image basis. + uint32 distance = 2; + + // This contains layer-specific metadata, if populated it + // has length "distance" and is ordered with [distance] being the + // layer immediately following the base image and [1] + // being the final layer. + repeated Layer layer_info = 3; + + // Output only.This contains the base image url for the derived image + // Occurrence + string base_resource_url = 4; + } +} + +// A note that indicates a type of analysis a provider would perform. This note +// exists in a provider's project. A `Discovery` occurrence is created in a +// consumer's project at the start of analysis. The occurrence's operation will +// indicate the status of the analysis. Absence of an occurrence linked to this +// note for a resource indicates that analysis hasn't started. +message Discovery { + // Provides information about the scan status of a discovered resource. + message Discovered { + // Output only. An operation that indicates the status of the current scan. + google.longrunning.Operation operation = 1; + } + + // The kind of analysis that is handled by this discovery. + Note.Kind analysis_kind = 1; +} + +// Container message for hashes of byte content of files, used in Source +// messages to verify integrity of source input to the build. +message FileHashes { + // Collection of file hashes. + repeated Hash file_hash = 1; +} + +// Container message for hash values. +message Hash { + // Specifies the hash algorithm, if any. + enum HashType { + // No hash requested. + NONE = 0; + + // A sha256 hash. + SHA256 = 1; + } + + // The type of hash that was performed. + HashType type = 1; + + // The hash value. + bytes value = 2; +} + +// Provides a detailed description of a `Note`. +message Note { + // Metadata for any related URL information + message RelatedUrl { + // Specific URL to associate with the note + string url = 1; + + // Label to describe usage of the URL + string label = 2; + } + + // This must be 1:1 with members of our oneofs, it can be used for filtering + // Note and Occurrence on their kind. + enum Kind { + // Unknown + KIND_UNSPECIFIED = 0; + + // The note and occurrence represent a package vulnerability. + PACKAGE_VULNERABILITY = 2; + + // The note and occurrence assert build provenance. + BUILD_DETAILS = 3; + + // This represents an image basis relationship. + IMAGE_BASIS = 4; + + // This represents a package installed via a package manager. + PACKAGE_MANAGER = 5; + + // The note and occurrence track deployment events. + DEPLOYABLE = 6; + + // The note and occurrence track the initial discovery status of a resource. + DISCOVERY = 7; + } + + // The name of the note in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + string name = 1; + + // A one sentence description of this `Note`. + string short_description = 3; + + // A detailed description of this `Note`. + string long_description = 4; + + // Output only. This explicitly denotes which kind of note is specified. This + // field can be used as a filter in list requests. + Kind kind = 9; + + // The type of note. + oneof note_type { + // A package vulnerability type of note. + VulnerabilityType vulnerability_type = 6; + + // Build provenance type for a verifiable build. + BuildType build_type = 8; + + // A note describing a base image. + DockerImage.Basis base_image = 13; + + // A note describing a package hosted by various package managers. + PackageManager.Package package = 14; + + // A note describing something that can be deployed. + Deployable deployable = 17; + + // A note describing a provider/analysis type. + Discovery discovery = 18; + } + + // URLs associated with this note + repeated RelatedUrl related_url = 7; + + // Time of expiration for this note, null if note does not expire. + google.protobuf.Timestamp expiration_time = 10; + + // Output only. The time this note was created. This field can be used as a + // filter in list requests. + google.protobuf.Timestamp create_time = 11; + + // Output only. The time this note was last updated. This field can be used as + // a filter in list requests. + google.protobuf.Timestamp update_time = 12; +} + +// `Occurrence` includes information about analysis occurrences for an image. +message Occurrence { + // Output only. The name of the `Occurrence` in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + string name = 1; + + // The unique URL of the image or the container for which the `Occurrence` + // applies. For example, https://gcr.io/project/image@sha256:foo This field + // can be used as a filter in list requests. + string resource_url = 2; + + // An analysis note associated with this image, in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + // This field can be used as a filter in list requests. + string note_name = 3; + + // Output only. This explicitly denotes which of the `Occurrence` details are + // specified. This field can be used as a filter in list requests. + Note.Kind kind = 6; + + // Describes the details of the vulnerability `Note` found in this resource. + oneof details { + // Details of a security vulnerability note. + VulnerabilityType.VulnerabilityDetails vulnerability_details = 8; + + // Build details for a verifiable build. + BuildDetails build_details = 7; + + // Describes how this resource derives from the basis + // in the associated note. + DockerImage.Derived derived_image = 11; + + // Describes the installation of a package on the linked resource. + PackageManager.Installation installation = 12; + + // Describes the deployment of an artifact on a runtime. + Deployable.Deployment deployment = 14; + + // Describes the initial scan status for this resource. + Discovery.Discovered discovered = 15; + } + + // A description of actions that can be taken to remedy the `Note` + string remediation = 5; + + // Output only. The time this `Occurrence` was created. + google.protobuf.Timestamp create_time = 9; + + // Output only. The time this `Occurrence` was last updated. + google.protobuf.Timestamp update_time = 10; +} + +// PackageManager provides metadata about available / installed packages. +message PackageManager { + // This represents a particular channel of distribution for a given package. + // e.g. Debian's jessie-backports dpkg mirror + message Distribution { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + string cpe_uri = 1; + + // The CPU architecture for which packages in this distribution + // channel were built + Architecture architecture = 2; + + // The latest available version of this package in + // this distribution channel. + VulnerabilityType.Version latest_version = 3; + + // A freeform string denoting the maintainer of this package. + string maintainer = 4; + + // The distribution channel-specific homepage for this package. + string url = 6; + + // The distribution channel-specific description of this package. + string description = 7; + } + + // An occurrence of a particular package installation found within a + // system's filesystem. + // e.g. glibc was found in /var/lib/dpkg/status + message Location { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + string cpe_uri = 1; + + // The version installed at this location. + VulnerabilityType.Version version = 2; + + // The path from which we gathered that this package/version is installed. + string path = 3; + } + + // This represents a particular package that is distributed over + // various channels. + // e.g. glibc (aka libc6) is distributed by many, at various versions. + message Package { + // The name of the package. + string name = 1; + + // The various channels by which a package is distributed. + repeated Distribution distribution = 10; + } + + // This represents how a particular software package may be installed on + // a system. + message Installation { + // Output only. The name of the installed package. + string name = 1; + + // All of the places within the filesystem versions of this package + // have been found. + repeated Location location = 2; + } + + // Instruction set architectures supported by various package managers. + enum Architecture { + // Unknown architecture + ARCHITECTURE_UNSPECIFIED = 0; + + // X86 architecture + X86 = 1; + + // X64 architecture + X64 = 2; + } +} + +// An attestation wrapper with a PGP-compatible signature. +// This message only supports ATTACHED signatures, where the payload that is +// signed is included alongside the signature itself in the same file. +message PgpSignedAttestation { + // The raw content of the signature, as output by gpg or equivalent. Since + // this message only supports attached signatures, the payload that was signed + // must be attached. While the signature format supported is dependent on the + // verification implementation, currently only ASCII-armored (`--armor` to + // gpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are + // supported. + // Concretely, `gpg --sign --armor --output=signature.gpg payload.json` will + // create the signature content expected in this field in `signature.gpg` for + // the `payload.json` attestation payload. + string signature = 1; + + // Type (e.g. schema) of the attestation payload that was signed. + enum ContentType { + // ContentType is not set. + CONTENT_TYPE_UNSPECIFIED = 0; + // Atomic format attestation signature. See + // https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md + // The payload extracted from `signature` is a JSON blob conforming to the + // linked schema. + SIMPLE_SIGNING_JSON = 1; + } + + // Type (e.g. schema) of the attestation payload that was signed. + // The verifier must ensure that the provided type is one that the verifier + // supports, and that the attestation payload is a valid instantiation of that + // type (e.g. by validating a JSON schema). + ContentType content_type = 3; + + // This field is used by verifiers to select the public key used to validate + // the signature. Note that the policy of the verifier ultimately determines + // which public keys verify a signature based on the context of the + // verification. There is no guarantee validation will succeed if the + // verifier has no key matching this ID, even if it has a key under a + // different ID that would verify the signature. Note that this ID should also + // be present in the signature content above, but that is not expected to be + // used by the verifier. + oneof key_id { + // The ID of the key, as output by `gpg --list-keys`. This should be 8 + // hexidecimal digits, capitalized. e.g. + // $ gpg --list-keys pub + // 2048R/A663AEEA 2017-08-01 uid Fake Name + // + // In the above example, the `key_id` is "A663AEEA". + // Note that in practice this ID is the last 64 bits of the key fingerprint. + string pgp_key_id = 2; + } +} + +// Source describes the location of the source used for the build. +message Source { + // Source location information. + oneof source { + // If provided, get the source from this location in in Google Cloud + // Storage. + StorageSource storage_source = 1; + + // If provided, get source from this location in a Cloud Repo. + RepoSource repo_source = 2; + } + + // If provided, the input binary artifacts for the build came from this + // location. + StorageSource artifact_storage_source = 4; + + // Hash(es) of the build source, which can be used to verify that the original + // source integrity was maintained in the build. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (.tar.gz), the FileHash will be for the single path to that file. + map file_hashes = 3; + + // If provided, the source code used for the build came from this location. + SourceContext context = 7; + + // If provided, some of the source code used for the build may be found in + // these locations, in the case where the source repository had multiple + // remotes or submodules. This list will not include the context specified in + // the context field. + repeated SourceContext additional_contexts = 8; +} + +// RepoSource describes the location of the source in a Google Cloud Source +// Repository. +message RepoSource { + // ID of the project that owns the repo. + string project_id = 1; + + // Name of the repo. + string repo_name = 2; + + // A revision within the source repository must be specified in + // one of these ways. + oneof revision { + // Name of the branch to build. + string branch_name = 3; + + // Name of the tag to build. + string tag_name = 4; + + // Explicit commit SHA to build. + string commit_sha = 5; + } +} + +// StorageSource describes the location of the source in an archive file in +// Google Cloud Storage. +message StorageSource { + // Google Cloud Storage bucket containing source (see [Bucket Name + // Requirements] + // (https://cloud.google.com/storage/docs/bucket-naming#requirements)). + string bucket = 1; + + // Google Cloud Storage object containing source. + string object = 2; + + // Google Cloud Storage generation for the object. + int64 generation = 3; +} + +// VulnerabilityType provides metadata about a security vulnerability. +message VulnerabilityType { + // Version contains structured information about the version of the package. + // For a discussion of this in Debian/Ubuntu: + // http://serverfault.com/questions/604541/debian-packages-version-convention + // For a discussion of this in Redhat/Fedora/Centos: + // http://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/ + message Version { + // Whether this is an ordinary package version or a + // sentinel MIN/MAX version. + enum VersionKind { + // A standard package version, defined by the other fields. + NORMAL = 0; + + // A special version representing negative infinity, + // other fields are ignored. + MINIMUM = 1; + + // A special version representing positive infinity, + // other fields are ignored. + MAXIMUM = 2; + } + + // Used to correct mistakes in the version numbering scheme. + int32 epoch = 1; + + // The main part of the version name. + string name = 2; + + // The iteration of the package build from the above version. + string revision = 3; + + // Distinguish between sentinel MIN/MAX versions and normal versions. + // If kind is not NORMAL, then the other fields are ignored. + VersionKind kind = 5; + } + + // Identifies all occurrences of this vulnerability in the package for a + // specific distro/location + // For example: glibc in cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2 + message Detail { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in + // which the vulnerability manifests. Examples include distro or storage + // location for vulnerable jar. + // This field can be used as a filter in list requests. + string cpe_uri = 1; + + // The name of the package where the vulnerability was found. + // This field can be used as a filter in list requests. + string package = 8; + + // The min version of the package in which the vulnerability exists. + Version min_affected_version = 6; + + // The max version of the package in which the vulnerability exists. + // This field can be used as a filter in list requests. + Version max_affected_version = 7; + + // The severity (eg: distro assigned severity) for this vulnerability. + string severity_name = 4; + + // A vendor-specific description of this note. + string description = 9; + + // The fix for this specific package version. + VulnerabilityLocation fixed_location = 5; + + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + string package_type = 10; + } + + // Used by Occurrence to point to where the vulnerability exists and how + // to fix it. + message VulnerabilityDetails { + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + string type = 3; + + // Output only. The note provider assigned Severity of the vulnerability. + Severity severity = 4; + + // Output only. The CVSS score of this vulnerability. CVSS score is on a + // scale of 0-10 where 0 indicates low severity and 10 indicates high + // severity. + float cvss_score = 5; + + // The set of affected locations and their fixes (if available) within + // the associated resource. + repeated PackageIssue package_issue = 6; + } + + // This message wraps a location affected by a vulnerability and its + // associated fix (if one is available). + message PackageIssue { + // The location of the vulnerability. + VulnerabilityLocation affected_location = 1; + + // The location of the available fix for vulnerability. + VulnerabilityLocation fixed_location = 2; + + // The severity (eg: distro assigned severity) for this vulnerability. + string severity_name = 3; + } + + // The location of the vulnerability + message VulnerabilityLocation { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) + // format. Examples include distro or storage location for vulnerable jar. + // This field can be used as a filter in list requests. + string cpe_uri = 1; + + // The package being described. + string package = 2; + + // The version of the package being described. + // This field can be used as a filter in list requests. + Version version = 4; + } + + // Note provider-assigned severity/impact ranking + enum Severity { + // Unknown Impact + SEVERITY_UNSPECIFIED = 0; + + // Minimal Impact + MINIMAL = 1; + + // Low Impact + LOW = 2; + + // Medium Impact + MEDIUM = 3; + + // High Impact + HIGH = 4; + + // Critical Impact + CRITICAL = 5; + } + + // The CVSS score for this Vulnerability. + float cvss_score = 2; + + // Note provider assigned impact of the vulnerability + Severity severity = 3; + + // All information about the package to specifically identify this + // vulnerability. One entry per (version range and cpe_uri) the + // package vulnerability has manifested in. + repeated Detail details = 4; +} + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +message SourceContext { + // A SourceContext can refer any one of the following types of repositories. + oneof context { + // A SourceContext referring to a revision in a Google Cloud Source Repo. + CloudRepoSourceContext cloud_repo = 1; + + // A SourceContext referring to a Gerrit project. + GerritSourceContext gerrit = 2; + + // A SourceContext referring to any third party Git repo (e.g., GitHub). + GitSourceContext git = 3; + } + + // Labels with user defined metadata. + map labels = 4; +} + +// An alias to a repo revision. +message AliasContext { + // The type of an alias. + enum Kind { + // Unknown. + KIND_UNSPECIFIED = 0; + + // Git tag. + FIXED = 1; + + // Git branch. + MOVABLE = 2; + + // Used to specify non-standard aliases. For example, if a Git repo has a + // ref named "refs/foo/bar". + OTHER = 4; + } + + // The alias kind. + Kind kind = 1; + + // The alias name. + string name = 2; +} + +// A CloudRepoSourceContext denotes a particular revision in a Google Cloud +// Source Repo. +message CloudRepoSourceContext { + // The ID of the repo. + RepoId repo_id = 1; + + // A revision in a Cloud Repo can be identified by either its revision ID or + // its alias. + oneof revision { + // A revision ID. + string revision_id = 2; + + // An alias, which may be a branch or tag. + AliasContext alias_context = 3; + } +} + +// A SourceContext referring to a Gerrit project. +message GerritSourceContext { + // The URI of a running Gerrit instance. + string host_uri = 1; + + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. The "repo name" is + // the hostURI/project. + string gerrit_project = 2; + + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + oneof revision { + // A revision (commit) ID. + string revision_id = 3; + + // An alias, which may be a branch or tag. + AliasContext alias_context = 4; + } +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g., GitHub). +message GitSourceContext { + // Git repository URL. + string url = 1; + + // Required. + // Git commit hash. + string revision_id = 2; +} + +// A unique identifier for a Cloud Repo. +message RepoId { + // A cloud repo can be identified by either its project ID and repository name + // combination, or its globally unique identifier. + oneof id { + // A combination of a project ID and a repo name. + ProjectRepoId project_repo_id = 1; + + // A server-assigned, globally unique identifier. + string uid = 2; + } +} + +// Selects a repo using a Google Cloud Platform project ID (e.g., +// winged-cargo-31) and a repo name within that project. +message ProjectRepoId { + // The ID of the project. + string project_id = 1; + + // The name of the repo. Leave empty for the default repo. + string repo_name = 2; +} + +// [Grafeas](grafeas.io) API. +// +// Retrieves the results of vulnerability scanning of cloud components such as +// container images. +// +// The vulnerability results are stored as a series of Occurrences. +// An `Occurrence` contains information about a specific vulnerability in a +// resource. An `Occurrence` references a `Note`. A `Note` contains details +// about the vulnerability and is stored in a stored in a separate project. +// Multiple `Occurrences` can reference the same `Note`. For example, an SSL +// vulnerability could affect multiple packages in an image. In this case, +// there would be one `Note` for the vulnerability and an `Occurrence` for +// each package with the vulnerability referencing that `Note`. +service Grafeas { + // Returns the requested `Occurrence`. + rpc GetOccurrence(GetOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/occurrences/*}" + }; + } + + // Lists active `Occurrences` for a given project matching the filters. + rpc ListOccurrences(ListOccurrencesRequest) + returns (ListOccurrencesResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*}/occurrences" + }; + } + + // Deletes the given `Occurrence` from the system. Use this when + // an `Occurrence` is no longer applicable for the given resource. + rpc DeleteOccurrence(DeleteOccurrenceRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/occurrences/*}" + }; + } + + // Creates a new `Occurrence`. Use this method to create `Occurrences` + // for a resource. + rpc CreateOccurrence(CreateOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*}/occurrences" + body: "occurrence" + }; + } + + // Updates an existing occurrence. + rpc UpdateOccurrence(UpdateOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + patch: "/v1alpha1/{name=projects/*/occurrences/*}" + body: "occurrence" + }; + } + + // Gets the `Note` attached to the given `Occurrence`. + rpc GetOccurrenceNote(GetOccurrenceNoteRequest) returns (Note) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/occurrences/*}/notes" + }; + } + + // Creates a new `Operation`. + rpc CreateOperation(CreateOperationRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*}/operations" + body: "*" + }; + }; + + // Updates an existing operation returns an error if operation + // does not exist. The only valid operations are to update mark the done bit + // change the result. + rpc UpdateOperation(UpdateOperationRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + patch: "/v1alpha1/{name=projects/*/operations/*}" + body: "*" + }; + }; + + // Returns the requested `Note`. + rpc GetNote(GetNoteRequest) returns (Note) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/notes/*}" + }; + } + + // Lists all `Notes` for a given project. + rpc ListNotes(ListNotesRequest) returns (ListNotesResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*}/notes" + }; + } + + // Deletes the given `Note` from the system. + rpc DeleteNote(DeleteNoteRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/notes/*}" + }; + } + + // Creates a new `Note`. + rpc CreateNote(CreateNoteRequest) returns (Note) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*}/notes" + body: "note" + }; + } + + // Updates an existing `Note`. + rpc UpdateNote(UpdateNoteRequest) returns (Note) { + option (google.api.http) = { + patch: "/v1alpha1/{name=projects/*/notes/*}" + body: "note" + }; + } + + // Lists `Occurrences` referencing the specified `Note`. Use this method to + // get all occurrences referencing your `Note` across all your customer + // projects. + rpc ListNoteOccurrences(ListNoteOccurrencesRequest) + returns (ListNoteOccurrencesResponse) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/notes/*}/occurrences" + }; + } +} + +// [GrafeasProjects](grafeas.io) API. +// +// Manages Grafeas `Projects`. `Projects` contain sets of other Grafeas +// entities such as `Notes`, `Occurrences` and `Operations`. +service GrafeasProjects { + // Creates a new `Project`. + rpc CreateProject(CreateProjectRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1alpha1/{name=projects/*}" + }; + } + + // Returns the requested `Project`. + rpc GetProject(GetProjectRequest) returns (Project) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*}" + }; + } + + // Lists `Projects` + rpc ListProjects(ListProjectsRequest) + returns (ListProjectsResponse) { + option (google.api.http) = { + get: "/v1alpha1/projects" + }; + } + + // Deletes the given `Project` from the system. + rpc DeleteProject(DeleteProjectRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*}" + }; + } +} \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json new file mode 100644 index 00000000..9368fb1a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json @@ -0,0 +1,1665 @@ +{ + "swagger": "2.0", + "info": { + "title": "v1alpha1/proto/grafeas.proto", + "version": "version not set" + }, + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/v1alpha1/projects": { + "get": { + "summary": "Lists `Projects`", + "operationId": "ListProjects", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiListProjectsResponse" + } + } + }, + "parameters": [ + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "Number of projects to return in the list.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "GrafeasProjects" + ] + } + }, + "/v1alpha1/{name}": { + "get": { + "summary": "Returns the requested `Project`.", + "operationId": "GetProject", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiProject" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "GrafeasProjects" + ] + }, + "delete": { + "summary": "Deletes the given `Project` from the system.", + "operationId": "DeleteProject", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "GrafeasProjects" + ] + }, + "post": { + "summary": "Creates a new `Project`.", + "operationId": "CreateProject", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "GrafeasProjects" + ] + }, + "patch": { + "summary": "Updates an existing `Note`.", + "operationId": "UpdateNote", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiNote" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiNote" + } + } + ], + "tags": [ + "Grafeas" + ] + } + }, + "/v1alpha1/{name}/notes": { + "get": { + "summary": "Gets the `Note` attached to the given `Occurrence`.", + "operationId": "GetOccurrenceNote", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiNote" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "Grafeas" + ] + } + }, + "/v1alpha1/{name}/occurrences": { + "get": { + "summary": "Lists `Occurrences` referencing the specified `Note`. Use this method to\nget all occurrences referencing your `Note` across all your customer\nprojects.", + "operationId": "ListNoteOccurrences", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiListNoteOccurrencesResponse" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "Number of notes to return in the list.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "Grafeas" + ] + } + }, + "/v1alpha1/{parent}/notes": { + "get": { + "summary": "Lists all `Notes` for a given project.", + "operationId": "ListNotes", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiListNotesResponse" + } + } + }, + "parameters": [ + { + "name": "parent", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "Number of notes to return in the list.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "Grafeas" + ] + }, + "post": { + "summary": "Creates a new `Note`.", + "operationId": "CreateNote", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiNote" + } + } + }, + "parameters": [ + { + "name": "parent", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiNote" + } + } + ], + "tags": [ + "Grafeas" + ] + } + }, + "/v1alpha1/{parent}/occurrences": { + "get": { + "summary": "Lists active `Occurrences` for a given project matching the filters.", + "operationId": "ListOccurrences", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiListOccurrencesResponse" + } + } + }, + "parameters": [ + { + "name": "parent", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "filter", + "description": "The filter expression.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "Number of occurrences to return in the list.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "description": "Token to provide to skip to a particular spot in the list.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "Grafeas" + ] + }, + "post": { + "summary": "Creates a new `Occurrence`. Use this method to create `Occurrences`\nfor a resource.", + "operationId": "CreateOccurrence", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiOccurrence" + } + } + }, + "parameters": [ + { + "name": "parent", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiOccurrence" + } + } + ], + "tags": [ + "Grafeas" + ] + } + }, + "/v1alpha1/{parent}/operations": { + "post": { + "summary": "Creates a new `Operation`.", + "operationId": "CreateOperation", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/longrunningOperation" + } + } + }, + "parameters": [ + { + "name": "parent", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiCreateOperationRequest" + } + } + ], + "tags": [ + "Grafeas" + ] + } + } + }, + "definitions": { + "BuildSignatureKeyType": { + "type": "string", + "enum": [ + "KEY_TYPE_UNSPECIFIED", + "PGP_ASCII_ARMORED", + "PKIX_PEM" + ], + "default": "KEY_TYPE_UNSPECIFIED", + "description": "- KEY_TYPE_UNSPECIFIED: `KeyType` is not set.\n - PGP_ASCII_ARMORED: `PGP ASCII Armored` public key.\n - PKIX_PEM: `PKIX PEM` public key.", + "title": "Public key formats" + }, + "DeployableDeployment": { + "type": "object", + "properties": { + "user_email": { + "type": "string", + "description": "Identity of the user that triggered this deployment." + }, + "deploy_time": { + "type": "string", + "format": "date-time", + "description": "Beginning of the lifetime of this deployment." + }, + "undeploy_time": { + "type": "string", + "format": "date-time", + "description": "End of the lifetime of this deployment." + }, + "config": { + "type": "string", + "description": "Configuration used to create this deployment." + }, + "address": { + "type": "string", + "description": "Address of the runtime element hosting this deployment." + }, + "resource_uri": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Output only. Resource URI for the artifact being deployed taken from the\ndeployable field with the same name." + }, + "platform": { + "$ref": "#/definitions/DeploymentPlatform", + "description": "Platform hosting this deployment." + } + }, + "description": "The period during which some deployable was active in a runtime." + }, + "DeploymentPlatform": { + "type": "string", + "enum": [ + "PLATFORM_UNSPECIFIED", + "GKE", + "FLEX", + "CUSTOM" + ], + "default": "PLATFORM_UNSPECIFIED", + "description": "Types of platforms.\n\n - PLATFORM_UNSPECIFIED: Unknown\n - GKE: Google Container Engine\n - FLEX: Google App Engine: Flexible Environment\n - CUSTOM: Custom user-defined platform" + }, + "DiscoveryDiscovered": { + "type": "object", + "properties": { + "operation": { + "$ref": "#/definitions/longrunningOperation", + "description": "Output only. An operation that indicates the status of the current scan." + } + }, + "description": "Provides information about the scan status of a discovered resource." + }, + "DockerImageBasis": { + "type": "object", + "properties": { + "resource_url": { + "type": "string", + "description": "The resource_url for the resource representing the basis of\nassociated occurrence images." + }, + "fingerprint": { + "$ref": "#/definitions/DockerImageFingerprint", + "title": "The fingerprint of the base image" + } + }, + "description": "Basis describes the base image portion (Note) of the DockerImage\nrelationship. Linked occurrences are derived from this or an\nequivalent image via:\n FROM \u003cBasis.resource_url\u003e\nOr an equivalent reference, e.g. a tag of the resource_url." + }, + "DockerImageDerived": { + "type": "object", + "properties": { + "fingerprint": { + "$ref": "#/definitions/DockerImageFingerprint", + "title": "The fingerprint of the derived image" + }, + "distance": { + "type": "integer", + "format": "int64", + "description": "Output only. The number of layers by which this image differs from\nthe associated image basis." + }, + "layer_info": { + "type": "array", + "items": { + "$ref": "#/definitions/DockerImageLayer" + }, + "description": "This contains layer-specific metadata, if populated it\nhas length \"distance\" and is ordered with [distance] being the\nlayer immediately following the base image and [1]\nbeing the final layer." + }, + "base_resource_url": { + "type": "string", + "title": "Output only.This contains the base image url for the derived image\nOccurrence" + } + }, + "description": "Derived describes the derived image portion (Occurrence) of the\nDockerImage relationship. This image would be produced from a Dockerfile\nwith FROM \u003cDockerImage.Basis in attached Note\u003e." + }, + "DockerImageFingerprint": { + "type": "object", + "properties": { + "v1_name": { + "type": "string", + "description": "The layer-id of the final layer in the Docker image's v1\nrepresentation.\nThis field can be used as a filter in list requests." + }, + "v2_blob": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The ordered list of v2 blobs that represent a given image." + }, + "v2_name": { + "type": "string", + "description": "Output only. The name of the image's v2 blobs computed via:\n [bottom] := v2_blob[bottom]\n [N] := sha256(v2_blob[N] + \" \" + v2_name[N+1])\nOnly the name of the final blob is kept.\nThis field can be used as a filter in list requests." + } + }, + "description": "A set of properties that uniquely identify a given Docker image." + }, + "DockerImageLayer": { + "type": "object", + "properties": { + "directive": { + "$ref": "#/definitions/LayerDirective", + "description": "The recovered Dockerfile directive used to construct this layer." + }, + "arguments": { + "type": "string", + "description": "The recovered arguments to the Dockerfile directive." + } + }, + "description": "Layer holds metadata specific to a layer of a Docker image." + }, + "HashHashType": { + "type": "string", + "enum": [ + "NONE", + "SHA256" + ], + "default": "NONE", + "description": "Specifies the hash algorithm, if any.\n\n - NONE: No hash requested.\n - SHA256: A sha256 hash." + }, + "LayerDirective": { + "type": "string", + "enum": [ + "DIRECTIVE_UNSPECIFIED", + "MAINTAINER", + "RUN", + "CMD", + "LABEL", + "EXPOSE", + "ENV", + "ADD", + "COPY", + "ENTRYPOINT", + "VOLUME", + "USER", + "WORKDIR", + "ARG", + "ONBUILD", + "STOPSIGNAL", + "HEALTHCHECK", + "SHELL" + ], + "default": "DIRECTIVE_UNSPECIFIED", + "description": "- DIRECTIVE_UNSPECIFIED: Default value for unsupported/missing directive\n - MAINTAINER: https://docs.docker.com/reference/builder/#maintainer\n - RUN: https://docs.docker.com/reference/builder/#run\n - CMD: https://docs.docker.com/reference/builder/#cmd\n - LABEL: https://docs.docker.com/reference/builder/#label\n - EXPOSE: https://docs.docker.com/reference/builder/#expose\n - ENV: https://docs.docker.com/reference/builder/#env\n - ADD: https://docs.docker.com/reference/builder/#add\n - COPY: https://docs.docker.com/reference/builder/#copy\n - ENTRYPOINT: https://docs.docker.com/reference/builder/#entrypoint\n - VOLUME: https://docs.docker.com/reference/builder/#volume\n - USER: https://docs.docker.com/reference/builder/#user\n - WORKDIR: https://docs.docker.com/reference/builder/#workdir\n - ARG: https://docs.docker.com/reference/builder/#arg\n - ONBUILD: https://docs.docker.com/reference/builder/#onbuild\n - STOPSIGNAL: https://docs.docker.com/reference/builder/#stopsignal\n - HEALTHCHECK: https://docs.docker.com/reference/builder/#healthcheck\n - SHELL: https://docs.docker.com/reference/builder/#shell", + "title": "Instructions from dockerfile" + }, + "NoteRelatedUrl": { + "type": "object", + "properties": { + "url": { + "type": "string", + "title": "Specific URL to associate with the note" + }, + "label": { + "type": "string", + "title": "Label to describe usage of the URL" + } + }, + "title": "Metadata for any related URL information" + }, + "PackageManagerArchitecture": { + "type": "string", + "enum": [ + "ARCHITECTURE_UNSPECIFIED", + "X86", + "X64" + ], + "default": "ARCHITECTURE_UNSPECIFIED", + "description": "Instruction set architectures supported by various package managers.\n\n - ARCHITECTURE_UNSPECIFIED: Unknown architecture\n - X86: X86 architecture\n - X64: X64 architecture" + }, + "PackageManagerDistribution": { + "type": "object", + "properties": { + "cpe_uri": { + "type": "string", + "description": "The cpe_uri in [cpe format](https://cpe.mitre.org/specification/)\ndenoting the package manager version distributing a package." + }, + "architecture": { + "$ref": "#/definitions/PackageManagerArchitecture", + "title": "The CPU architecture for which packages in this distribution\nchannel were built" + }, + "latest_version": { + "$ref": "#/definitions/VulnerabilityTypeVersion", + "description": "The latest available version of this package in\nthis distribution channel." + }, + "maintainer": { + "type": "string", + "description": "A freeform string denoting the maintainer of this package." + }, + "url": { + "type": "string", + "description": "The distribution channel-specific homepage for this package." + }, + "description": { + "type": "string", + "description": "The distribution channel-specific description of this package." + } + }, + "title": "This represents a particular channel of distribution for a given package.\ne.g. Debian's jessie-backports dpkg mirror" + }, + "PackageManagerInstallation": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Output only. The name of the installed package." + }, + "location": { + "type": "array", + "items": { + "$ref": "#/definitions/apiPackageManagerLocation" + }, + "description": "All of the places within the filesystem versions of this package\nhave been found." + } + }, + "description": "This represents how a particular software package may be installed on\na system." + }, + "PackageManagerPackage": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the package." + }, + "distribution": { + "type": "array", + "items": { + "$ref": "#/definitions/PackageManagerDistribution" + }, + "description": "The various channels by which a package is distributed." + } + }, + "description": "This represents a particular package that is distributed over\nvarious channels.\ne.g. glibc (aka libc6) is distributed by many, at various versions." + }, + "VersionVersionKind": { + "type": "string", + "enum": [ + "NORMAL", + "MINIMUM", + "MAXIMUM" + ], + "default": "NORMAL", + "description": "Whether this is an ordinary package version or a\nsentinel MIN/MAX version.\n\n - NORMAL: A standard package version, defined by the other fields.\n - MINIMUM: A special version representing negative infinity,\nother fields are ignored.\n - MAXIMUM: A special version representing positive infinity,\nother fields are ignored." + }, + "VulnerabilityTypeDetail": { + "type": "object", + "properties": { + "cpe_uri": { + "type": "string", + "description": "The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in\nwhich the vulnerability manifests. Examples include distro or storage\nlocation for vulnerable jar.\nThis field can be used as a filter in list requests." + }, + "package": { + "type": "string", + "description": "The name of the package where the vulnerability was found.\nThis field can be used as a filter in list requests." + }, + "min_affected_version": { + "$ref": "#/definitions/VulnerabilityTypeVersion", + "description": "The min version of the package in which the vulnerability exists." + }, + "max_affected_version": { + "$ref": "#/definitions/VulnerabilityTypeVersion", + "description": "The max version of the package in which the vulnerability exists.\nThis field can be used as a filter in list requests." + }, + "severity_name": { + "type": "string", + "description": "The severity (eg: distro assigned severity) for this vulnerability." + }, + "description": { + "type": "string", + "description": "A vendor-specific description of this note." + }, + "fixed_location": { + "$ref": "#/definitions/VulnerabilityTypeVulnerabilityLocation", + "description": "The fix for this specific package version." + }, + "package_type": { + "type": "string", + "title": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)" + } + }, + "title": "Identifies all occurrences of this vulnerability in the package for a\nspecific distro/location\nFor example: glibc in cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2" + }, + "VulnerabilityTypePackageIssue": { + "type": "object", + "properties": { + "affected_location": { + "$ref": "#/definitions/VulnerabilityTypeVulnerabilityLocation", + "description": "The location of the vulnerability." + }, + "fixed_location": { + "$ref": "#/definitions/VulnerabilityTypeVulnerabilityLocation", + "description": "The location of the available fix for vulnerability." + }, + "severity_name": { + "type": "string", + "description": "The severity (eg: distro assigned severity) for this vulnerability." + } + }, + "description": "This message wraps a location affected by a vulnerability and its\nassociated fix (if one is available)." + }, + "VulnerabilityTypeSeverity": { + "type": "string", + "enum": [ + "SEVERITY_UNSPECIFIED", + "MINIMAL", + "LOW", + "MEDIUM", + "HIGH", + "CRITICAL" + ], + "default": "SEVERITY_UNSPECIFIED", + "description": "- SEVERITY_UNSPECIFIED: Unknown Impact\n - MINIMAL: Minimal Impact\n - LOW: Low Impact\n - MEDIUM: Medium Impact\n - HIGH: High Impact\n - CRITICAL: Critical Impact", + "title": "Note provider-assigned severity/impact ranking" + }, + "VulnerabilityTypeVersion": { + "type": "object", + "properties": { + "epoch": { + "type": "integer", + "format": "int32", + "description": "Used to correct mistakes in the version numbering scheme." + }, + "name": { + "type": "string", + "description": "The main part of the version name." + }, + "revision": { + "type": "string", + "description": "The iteration of the package build from the above version." + }, + "kind": { + "$ref": "#/definitions/VersionVersionKind", + "description": "Distinguish between sentinel MIN/MAX versions and normal versions.\nIf kind is not NORMAL, then the other fields are ignored." + } + }, + "title": "Version contains structured information about the version of the package.\nFor a discussion of this in Debian/Ubuntu:\nhttp://serverfault.com/questions/604541/debian-packages-version-convention\nFor a discussion of this in Redhat/Fedora/Centos:\nhttp://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/" + }, + "VulnerabilityTypeVulnerabilityDetails": { + "type": "object", + "properties": { + "type": { + "type": "string", + "title": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)" + }, + "severity": { + "$ref": "#/definitions/VulnerabilityTypeSeverity", + "description": "Output only. The note provider assigned Severity of the vulnerability." + }, + "cvss_score": { + "type": "number", + "format": "float", + "description": "Output only. The CVSS score of this vulnerability. CVSS score is on a\nscale of 0-10 where 0 indicates low severity and 10 indicates high\nseverity." + }, + "package_issue": { + "type": "array", + "items": { + "$ref": "#/definitions/VulnerabilityTypePackageIssue" + }, + "description": "The set of affected locations and their fixes (if available) within\nthe associated resource." + } + }, + "description": "Used by Occurrence to point to where the vulnerability exists and how\nto fix it." + }, + "VulnerabilityTypeVulnerabilityLocation": { + "type": "object", + "properties": { + "cpe_uri": { + "type": "string", + "description": "The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/)\nformat. Examples include distro or storage location for vulnerable jar.\nThis field can be used as a filter in list requests." + }, + "package": { + "type": "string", + "description": "The package being described." + }, + "version": { + "$ref": "#/definitions/VulnerabilityTypeVersion", + "description": "The version of the package being described.\nThis field can be used as a filter in list requests." + } + }, + "title": "The location of the vulnerability" + }, + "apiAliasContext": { + "type": "object", + "properties": { + "kind": { + "$ref": "#/definitions/apiAliasContextKind", + "description": "The alias kind." + }, + "name": { + "type": "string", + "description": "The alias name." + } + }, + "description": "An alias to a repo revision." + }, + "apiAliasContextKind": { + "type": "string", + "enum": [ + "KIND_UNSPECIFIED", + "FIXED", + "MOVABLE", + "OTHER" + ], + "default": "KIND_UNSPECIFIED", + "description": "The type of an alias.\n\n - KIND_UNSPECIFIED: Unknown.\n - FIXED: Git tag.\n - MOVABLE: Git branch.\n - OTHER: Used to specify non-standard aliases. For example, if a Git repo has a\nref named \"refs/foo/bar\"." + }, + "apiArtifact": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the artifact. This may be the path to a binary or jar file, or in\nthe case of a container build, the name used to push the container image to\nGoogle Container Registry, as presented to `docker push`.\n\nThis field is deprecated in favor of the plural `names` field; it continues\nto exist here to allow existing BuildProvenance serialized to json in\ngoogle.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to\ndeserialize back into proto." + }, + "checksum": { + "type": "string", + "description": "Hash or checksum value of a binary, or Docker Registry 2.0 digest of a\ncontainer." + }, + "id": { + "type": "string", + "title": "Artifact ID, if any; for container images, this will be a URL by digest\nlike gcr.io/projectID/imagename@sha256:123456" + }, + "names": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Related artifact names. This may be the path to a binary or jar file, or in\nthe case of a container build, the name used to push the container image to\nGoogle Container Registry, as presented to `docker push`. Note that a\nsingle Artifact ID can have multiple names, for example if two tags are\napplied to one image." + } + }, + "description": "Artifact describes a build product." + }, + "apiBuildDetails": { + "type": "object", + "properties": { + "provenance": { + "$ref": "#/definitions/apiBuildProvenance", + "title": "The actual provenance" + }, + "provenance_bytes": { + "type": "string", + "description": "Serialized JSON representation of the provenance, used in generating the\n`BuildSignature` in the corresponding Result. After verifying the\nsignature, `provenance_bytes` can be unmarshalled and compared to the\nprovenance to confirm that it is unchanged. A base64-encoded string\nrepresentation of the provenance bytes is used for the signature in order\nto interoperate with openssl which expects this format for signature\nverification.\n\nThe serialized form is captured both to avoid ambiguity in how the\nprovenance is marshalled to json as well to prevent incompatibilities with\nfuture changes." + } + }, + "description": "Message encapsulating build provenance details." + }, + "apiBuildProvenance": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier of the build." + }, + "project_id": { + "type": "string", + "description": "ID of the project." + }, + "commands": { + "type": "array", + "items": { + "$ref": "#/definitions/apiCommand" + }, + "description": "Commands requested by the build." + }, + "built_artifacts": { + "type": "array", + "items": { + "$ref": "#/definitions/apiArtifact" + }, + "description": "Output of the build." + }, + "create_time": { + "type": "string", + "format": "date-time", + "description": "Time at which the build was created." + }, + "start_time": { + "type": "string", + "format": "date-time", + "description": "Time at which execution of the build was started." + }, + "finish_time": { + "type": "string", + "format": "date-time", + "description": "Time at which execution of the build was finished." + }, + "creator": { + "type": "string", + "description": "E-mail address of the user who initiated this build. Note that this was the\nuser's e-mail address at the time the build was initiated; this address may\nnot represent the same end-user for all time." + }, + "logs_bucket": { + "type": "string", + "description": "Google Cloud Storage bucket where logs were written." + }, + "source_provenance": { + "$ref": "#/definitions/apiSource", + "description": "Details of the Source input to the build." + }, + "trigger_id": { + "type": "string", + "description": "Trigger identifier if the build was triggered automatically; empty if not." + }, + "build_options": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Special options applied to this build. This is a catch-all field where\nbuild providers can enter any desired additional details." + }, + "builder_version": { + "type": "string", + "description": "Version string of the builder at the time this build was executed." + } + }, + "description": "Provenance of a build. Contains all information needed to verify the full\ndetails about the build from source to completion." + }, + "apiBuildSignature": { + "type": "object", + "properties": { + "public_key": { + "type": "string", + "description": "Public key of the builder which can be used to verify that the related\nfindings are valid and unchanged. If `key_type` is empty, this defaults\nto PEM encoded public keys.\n\nThis field may be empty if `key_id` references an external key.\n\nFor Cloud Container Builder based signatures, this is a PEM encoded public\nkey. To verify the Cloud Container Builder signature, place the contents of\nthis field into a file (public.pem). The signature field is base64-decoded\ninto its binary representation in signature.bin, and the provenance bytes\nfrom `BuildDetails` are base64-decoded into a binary representation in\nsigned.bin. OpenSSL can then verify the signature:\n`openssl sha256 -verify public.pem -signature signature.bin signed.bin`" + }, + "signature": { + "type": "string", + "description": "Signature of the related `BuildProvenance`, encoded in a base64 string." + }, + "key_id": { + "type": "string", + "description": "An Id for the key used to sign. This could be either an Id for the key\nstored in `public_key` (such as the Id or fingerprint for a PGP key, or the\nCN for a cert), or a reference to an external key (such as a reference to a\nkey in Cloud Key Management Service)." + }, + "key_type": { + "$ref": "#/definitions/BuildSignatureKeyType", + "title": "The type of the key, either stored in `public_key` or referenced in\n`key_id`" + } + }, + "description": "Message encapsulating the signature of the verified build." + }, + "apiBuildType": { + "type": "object", + "properties": { + "builder_version": { + "type": "string", + "description": "Version of the builder which produced this Note." + }, + "signature": { + "$ref": "#/definitions/apiBuildSignature", + "description": "Signature of the build in Occurrences pointing to the Note containing this\n`BuilderDetails`." + } + }, + "description": "Note holding the version of the provider's builder and the signature of\nthe provenance message in linked BuildDetails." + }, + "apiCloudRepoSourceContext": { + "type": "object", + "properties": { + "repo_id": { + "$ref": "#/definitions/apiRepoId", + "description": "The ID of the repo." + }, + "revision_id": { + "type": "string", + "description": "A revision ID." + }, + "alias_context": { + "$ref": "#/definitions/apiAliasContext", + "description": "An alias, which may be a branch or tag." + } + }, + "description": "A CloudRepoSourceContext denotes a particular revision in a Google Cloud\nSource Repo." + }, + "apiCommand": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the command, as presented on the command line, or if the command is\npackaged as a Docker container, as presented to `docker pull`." + }, + "env": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Environment variables set before running this Command." + }, + "args": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Command-line arguments used when executing this Command." + }, + "dir": { + "type": "string", + "description": "Working directory (relative to project source root) used when running\nthis Command." + }, + "id": { + "type": "string", + "description": "Optional unique identifier for this Command, used in wait_for to reference\nthis Command as a dependency." + }, + "wait_for": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The ID(s) of the Command(s) that this Command depends on." + } + }, + "description": "Command describes a step performed as part of the build pipeline." + }, + "apiCreateOperationRequest": { + "type": "object", + "properties": { + "parent": { + "type": "string", + "description": "The projectId that this operation should be created under." + }, + "operation_id": { + "type": "string", + "description": "The ID to use for this operation." + }, + "operation": { + "$ref": "#/definitions/longrunningOperation", + "description": "The operation to create." + } + }, + "title": "Request for creating an operation" + }, + "apiDeployable": { + "type": "object", + "properties": { + "resource_uri": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Resource URI for the artifact being deployed." + } + }, + "description": "An artifact that can be deployed in some runtime." + }, + "apiDiscovery": { + "type": "object", + "properties": { + "analysis_kind": { + "$ref": "#/definitions/apiNoteKind", + "description": "The kind of analysis that is handled by this discovery." + } + }, + "description": "A note that indicates a type of analysis a provider would perform. This note\nexists in a provider's project. A `Discovery` occurrence is created in a\nconsumer's project at the start of analysis. The occurrence's operation will\nindicate the status of the analysis. Absence of an occurrence linked to this\nnote for a resource indicates that analysis hasn't started." + }, + "apiFileHashes": { + "type": "object", + "properties": { + "file_hash": { + "type": "array", + "items": { + "$ref": "#/definitions/apiHash" + }, + "description": "Collection of file hashes." + } + }, + "description": "Container message for hashes of byte content of files, used in Source\nmessages to verify integrity of source input to the build." + }, + "apiGerritSourceContext": { + "type": "object", + "properties": { + "host_uri": { + "type": "string", + "description": "The URI of a running Gerrit instance." + }, + "gerrit_project": { + "type": "string", + "description": "The full project name within the host. Projects may be nested, so\n\"project/subproject\" is a valid project name. The \"repo name\" is\nthe hostURI/project." + }, + "revision_id": { + "type": "string", + "description": "A revision (commit) ID." + }, + "alias_context": { + "$ref": "#/definitions/apiAliasContext", + "description": "An alias, which may be a branch or tag." + } + }, + "description": "A SourceContext referring to a Gerrit project." + }, + "apiGitSourceContext": { + "type": "object", + "properties": { + "url": { + "type": "string", + "description": "Git repository URL." + }, + "revision_id": { + "type": "string", + "description": "Required.\nGit commit hash." + } + }, + "description": "A GitSourceContext denotes a particular revision in a third party Git\nrepository (e.g., GitHub)." + }, + "apiHash": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/HashHashType", + "description": "The type of hash that was performed." + }, + "value": { + "type": "string", + "format": "byte", + "description": "The hash value." + } + }, + "description": "Container message for hash values." + }, + "apiListNoteOccurrencesResponse": { + "type": "object", + "properties": { + "occurrences": { + "type": "array", + "items": { + "$ref": "#/definitions/apiOccurrence" + }, + "description": "The occurrences attached to the specified note." + }, + "next_page_token": { + "type": "string", + "description": "Token to receive the next page of notes." + } + }, + "description": "Response including listed occurrences for a note." + }, + "apiListNotesResponse": { + "type": "object", + "properties": { + "notes": { + "type": "array", + "items": { + "$ref": "#/definitions/apiNote" + }, + "title": "The occurrences requested" + }, + "next_page_token": { + "type": "string", + "description": "The next pagination token in the list response. It should be used as\npage_token for the following request. An empty value means no more result." + } + }, + "description": "Response including listed notes." + }, + "apiListOccurrencesResponse": { + "type": "object", + "properties": { + "occurrences": { + "type": "array", + "items": { + "$ref": "#/definitions/apiOccurrence" + }, + "description": "The occurrences requested." + }, + "next_page_token": { + "type": "string", + "description": "The next pagination token in the list response. It should be used as\n`page_token` for the following request. An empty value means no more\nresults." + } + }, + "description": "Response including listed active occurrences." + }, + "apiListProjectsResponse": { + "type": "object", + "properties": { + "projects": { + "type": "array", + "items": { + "$ref": "#/definitions/apiProject" + }, + "description": "The projects requested." + }, + "next_page_token": { + "type": "string", + "description": "The next pagination token in the list response. It should be used as\n`page_token` for the following request. An empty value means no more\nresults." + } + }, + "title": "Response including listed projects" + }, + "apiNote": { + "type": "object", + "properties": { + "name": { + "type": "string", + "title": "The name of the note in the form\n\"providers/{provider_id}/notes/{NOTE_ID}\"" + }, + "short_description": { + "type": "string", + "description": "A one sentence description of this `Note`." + }, + "long_description": { + "type": "string", + "description": "A detailed description of this `Note`." + }, + "kind": { + "$ref": "#/definitions/apiNoteKind", + "description": "Output only. This explicitly denotes which kind of note is specified. This\nfield can be used as a filter in list requests." + }, + "vulnerability_type": { + "$ref": "#/definitions/apiVulnerabilityType", + "description": "A package vulnerability type of note." + }, + "build_type": { + "$ref": "#/definitions/apiBuildType", + "description": "Build provenance type for a verifiable build." + }, + "base_image": { + "$ref": "#/definitions/DockerImageBasis", + "description": "A note describing a base image." + }, + "package": { + "$ref": "#/definitions/PackageManagerPackage", + "description": "A note describing a package hosted by various package managers." + }, + "deployable": { + "$ref": "#/definitions/apiDeployable", + "description": "A note describing something that can be deployed." + }, + "discovery": { + "$ref": "#/definitions/apiDiscovery", + "description": "A note describing a provider/analysis type." + }, + "related_url": { + "type": "array", + "items": { + "$ref": "#/definitions/NoteRelatedUrl" + }, + "title": "URLs associated with this note" + }, + "expiration_time": { + "type": "string", + "format": "date-time", + "description": "Time of expiration for this note, null if note does not expire." + }, + "create_time": { + "type": "string", + "format": "date-time", + "description": "Output only. The time this note was created. This field can be used as a\nfilter in list requests." + }, + "update_time": { + "type": "string", + "format": "date-time", + "description": "Output only. The time this note was last updated. This field can be used as\na filter in list requests." + } + }, + "description": "Provides a detailed description of a `Note`." + }, + "apiNoteKind": { + "type": "string", + "enum": [ + "KIND_UNSPECIFIED", + "PACKAGE_VULNERABILITY", + "BUILD_DETAILS", + "IMAGE_BASIS", + "PACKAGE_MANAGER", + "DEPLOYABLE", + "DISCOVERY" + ], + "default": "KIND_UNSPECIFIED", + "description": "This must be 1:1 with members of our oneofs, it can be used for filtering\nNote and Occurrence on their kind.\n\n - KIND_UNSPECIFIED: Unknown\n - PACKAGE_VULNERABILITY: The note and occurrence represent a package vulnerability.\n - BUILD_DETAILS: The note and occurrence assert build provenance.\n - IMAGE_BASIS: This represents an image basis relationship.\n - PACKAGE_MANAGER: This represents a package installed via a package manager.\n - DEPLOYABLE: The note and occurrence track deployment events.\n - DISCOVERY: The note and occurrence track the initial discovery status of a resource." + }, + "apiOccurrence": { + "type": "object", + "properties": { + "name": { + "type": "string", + "title": "Output only. The name of the `Occurrence` in the form\n\"projects/{project_id}/occurrences/{OCCURRENCE_ID}\"" + }, + "resource_url": { + "type": "string", + "description": "The unique URL of the image or the container for which the `Occurrence`\napplies. For example, https://gcr.io/project/image@sha256:foo This field\ncan be used as a filter in list requests." + }, + "note_name": { + "type": "string", + "description": "An analysis note associated with this image, in the form\n\"providers/{provider_id}/notes/{NOTE_ID}\"\nThis field can be used as a filter in list requests." + }, + "kind": { + "$ref": "#/definitions/apiNoteKind", + "description": "Output only. This explicitly denotes which of the `Occurrence` details are\nspecified. This field can be used as a filter in list requests." + }, + "vulnerability_details": { + "$ref": "#/definitions/VulnerabilityTypeVulnerabilityDetails", + "description": "Details of a security vulnerability note." + }, + "build_details": { + "$ref": "#/definitions/apiBuildDetails", + "description": "Build details for a verifiable build." + }, + "derived_image": { + "$ref": "#/definitions/DockerImageDerived", + "description": "Describes how this resource derives from the basis\nin the associated note." + }, + "installation": { + "$ref": "#/definitions/PackageManagerInstallation", + "description": "Describes the installation of a package on the linked resource." + }, + "deployment": { + "$ref": "#/definitions/DeployableDeployment", + "description": "Describes the deployment of an artifact on a runtime." + }, + "discovered": { + "$ref": "#/definitions/DiscoveryDiscovered", + "description": "Describes the initial scan status for this resource." + }, + "remediation": { + "type": "string", + "title": "A description of actions that can be taken to remedy the `Note`" + }, + "create_time": { + "type": "string", + "format": "date-time", + "description": "Output only. The time this `Occurrence` was created." + }, + "update_time": { + "type": "string", + "format": "date-time", + "description": "Output only. The time this `Occurrence` was last updated." + } + }, + "description": "`Occurrence` includes information about analysis occurrences for an image." + }, + "apiPackageManagerLocation": { + "type": "object", + "properties": { + "cpe_uri": { + "type": "string", + "description": "The cpe_uri in [cpe format](https://cpe.mitre.org/specification/)\ndenoting the package manager version distributing a package." + }, + "version": { + "$ref": "#/definitions/VulnerabilityTypeVersion", + "description": "The version installed at this location." + }, + "path": { + "type": "string", + "description": "The path from which we gathered that this package/version is installed." + } + }, + "title": "An occurrence of a particular package installation found within a\nsystem's filesystem.\ne.g. glibc was found in /var/lib/dpkg/status" + }, + "apiProject": { + "type": "object", + "properties": { + "name": { + "type": "string", + "title": "The name of the project of the form\n\"projects/{project_id}\"" + } + }, + "description": "Provides detailed description of a `Project`." + }, + "apiProjectRepoId": { + "type": "object", + "properties": { + "project_id": { + "type": "string", + "description": "The ID of the project." + }, + "repo_name": { + "type": "string", + "description": "The name of the repo. Leave empty for the default repo." + } + }, + "description": "Selects a repo using a Google Cloud Platform project ID (e.g.,\nwinged-cargo-31) and a repo name within that project." + }, + "apiRepoId": { + "type": "object", + "properties": { + "project_repo_id": { + "$ref": "#/definitions/apiProjectRepoId", + "description": "A combination of a project ID and a repo name." + }, + "uid": { + "type": "string", + "description": "A server-assigned, globally unique identifier." + } + }, + "description": "A unique identifier for a Cloud Repo." + }, + "apiRepoSource": { + "type": "object", + "properties": { + "project_id": { + "type": "string", + "description": "ID of the project that owns the repo." + }, + "repo_name": { + "type": "string", + "description": "Name of the repo." + }, + "branch_name": { + "type": "string", + "description": "Name of the branch to build." + }, + "tag_name": { + "type": "string", + "description": "Name of the tag to build." + }, + "commit_sha": { + "type": "string", + "description": "Explicit commit SHA to build." + } + }, + "description": "RepoSource describes the location of the source in a Google Cloud Source\nRepository." + }, + "apiSource": { + "type": "object", + "properties": { + "storage_source": { + "$ref": "#/definitions/apiStorageSource", + "description": "If provided, get the source from this location in in Google Cloud\nStorage." + }, + "repo_source": { + "$ref": "#/definitions/apiRepoSource", + "description": "If provided, get source from this location in a Cloud Repo." + }, + "artifact_storage_source": { + "$ref": "#/definitions/apiStorageSource", + "description": "If provided, the input binary artifacts for the build came from this\nlocation." + }, + "file_hashes": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/apiFileHashes" + }, + "description": "Hash(es) of the build source, which can be used to verify that the original\nsource integrity was maintained in the build.\n\nThe keys to this map are file paths used as build source and the values\ncontain the hash values for those files.\n\nIf the build source came in a single package such as a gzipped tarfile\n(.tar.gz), the FileHash will be for the single path to that file." + }, + "context": { + "$ref": "#/definitions/apiSourceContext", + "description": "If provided, the source code used for the build came from this location." + }, + "additional_contexts": { + "type": "array", + "items": { + "$ref": "#/definitions/apiSourceContext" + }, + "description": "If provided, some of the source code used for the build may be found in\nthese locations, in the case where the source repository had multiple\nremotes or submodules. This list will not include the context specified in\nthe context field." + } + }, + "description": "Source describes the location of the source used for the build." + }, + "apiSourceContext": { + "type": "object", + "properties": { + "cloud_repo": { + "$ref": "#/definitions/apiCloudRepoSourceContext", + "description": "A SourceContext referring to a revision in a Google Cloud Source Repo." + }, + "gerrit": { + "$ref": "#/definitions/apiGerritSourceContext", + "description": "A SourceContext referring to a Gerrit project." + }, + "git": { + "$ref": "#/definitions/apiGitSourceContext", + "description": "A SourceContext referring to any third party Git repo (e.g., GitHub)." + }, + "labels": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Labels with user defined metadata." + } + }, + "description": "A SourceContext is a reference to a tree of files. A SourceContext together\nwith a path point to a unique revision of a single file or directory." + }, + "apiStorageSource": { + "type": "object", + "properties": { + "bucket": { + "type": "string", + "description": "Google Cloud Storage bucket containing source (see [Bucket Name\nRequirements]\n(https://cloud.google.com/storage/docs/bucket-naming#requirements))." + }, + "object": { + "type": "string", + "description": "Google Cloud Storage object containing source." + }, + "generation": { + "type": "string", + "format": "int64", + "description": "Google Cloud Storage generation for the object." + } + }, + "description": "StorageSource describes the location of the source in an archive file in\nGoogle Cloud Storage." + }, + "apiUpdateOperationRequest": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the Operation.\nShould be of the form \"projects/{provider_id}/operations/{operation_id}\"." + }, + "operation": { + "$ref": "#/definitions/longrunningOperation", + "description": "The operation to create." + } + }, + "title": "Request for updating an existing operation" + }, + "apiVulnerabilityType": { + "type": "object", + "properties": { + "cvss_score": { + "type": "number", + "format": "float", + "description": "The CVSS score for this Vulnerability." + }, + "severity": { + "$ref": "#/definitions/VulnerabilityTypeSeverity", + "title": "Note provider assigned impact of the vulnerability" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/VulnerabilityTypeDetail" + }, + "description": "All information about the package to specifically identify this\nvulnerability. One entry per (version range and cpe_uri) the\npackage vulnerability has manifested in." + } + }, + "description": "VulnerabilityType provides metadata about a security vulnerability." + }, + "longrunningOperation": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The server-assigned name, which is only unique within the same service that\noriginally returns it. If you use the default HTTP mapping, the\n`name` should have the format of `operations/some/unique/name`." + }, + "metadata": { + "$ref": "#/definitions/protobufAny", + "description": "Service-specific metadata associated with the operation. It typically\ncontains progress information and common metadata such as create time.\nSome services might not provide such metadata. Any method that returns a\nlong-running operation should document the metadata type, if any." + }, + "done": { + "type": "boolean", + "format": "boolean", + "description": "If the value is `false`, it means the operation is still in progress.\nIf true, the operation is completed, and either `error` or `response` is\navailable." + }, + "error": { + "$ref": "#/definitions/rpcStatus", + "description": "The error result of the operation in case of failure or cancellation." + }, + "response": { + "$ref": "#/definitions/protobufAny", + "description": "The normal response of the operation in case of success. If the original\nmethod returns no data on success, such as `Delete`, the response is\n`google.protobuf.Empty`. If the original method is standard\n`Get`/`Create`/`Update`, the response should be the resource. For other\nmethods, the response should have the type `XxxResponse`, where `Xxx`\nis the original method name. For example, if the original method name\nis `TakeSnapshot()`, the inferred response type is\n`TakeSnapshotResponse`." + } + }, + "description": "This resource represents a long-running operation that is the result of a\nnetwork API call." + }, + "protobufAny": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name whose content describes the type of the\nserialized protocol buffer message.\n\nFor URLs which use the scheme `http`, `https`, or no scheme, the\nfollowing restrictions and interpretations apply:\n\n* If no scheme is provided, `https` is assumed.\n* The last segment of the URL's path must represent the fully\n qualified name of the type (as in `path/google.protobuf.Duration`).\n The name should be in a canonical form (e.g., leading \".\" is\n not accepted).\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := \u0026pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "protobufEmpty": { + "type": "object", + "description": "service Foo {\n rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);\n }\n\nThe JSON representation for `Empty` is empty JSON object `{}`.", + "title": "A generic empty message that you can re-use to avoid defining duplicated\nempty messages in your APIs. A typical example is to use it as the request\nor the response type of an API method. For instance:" + }, + "protobufFieldMask": { + "type": "object", + "properties": { + "paths": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The set of field mask paths." + } + }, + "description": "paths: \"f.a\"\n paths: \"f.b.d\"\n\nHere `f` represents a field in some root message, `a` and `b`\nfields in the message found in `f`, and `d` a field found in the\nmessage in `f.b`.\n\nField masks are used to specify a subset of fields that should be\nreturned by a get operation or modified by an update operation.\nField masks also have a custom JSON encoding (see below).\n\n# Field Masks in Projections\n\nWhen used in the context of a projection, a response message or\nsub-message is filtered by the API to only contain those fields as\nspecified in the mask. For example, if the mask in the previous\nexample is applied to a response message as follows:\n\n f {\n a : 22\n b {\n d : 1\n x : 2\n }\n y : 13\n }\n z: 8\n\nThe result will not contain specific values for fields x,y and z\n(their value will be set to the default, and omitted in proto text\noutput):\n\n\n f {\n a : 22\n b {\n d : 1\n }\n }\n\nA repeated field is not allowed except at the last position of a\npaths string.\n\nIf a FieldMask object is not present in a get operation, the\noperation applies to all fields (as if a FieldMask of all fields\nhad been specified).\n\nNote that a field mask does not necessarily apply to the\ntop-level response message. In case of a REST get operation, the\nfield mask applies directly to the response, but in case of a REST\nlist operation, the mask instead applies to each individual message\nin the returned resource list. In case of a REST custom method,\nother definitions may be used. Where the mask applies will be\nclearly documented together with its declaration in the API. In\nany case, the effect on the returned resource/resources is required\nbehavior for APIs.\n\n# Field Masks in Update Operations\n\nA field mask in update operations specifies which fields of the\ntargeted resource are going to be updated. The API is required\nto only change the values of the fields as specified in the mask\nand leave the others untouched. If a resource is passed in to\ndescribe the updated values, the API ignores the values of all\nfields not covered by the mask.\n\nIf a repeated field is specified for an update operation, the existing\nrepeated values in the target resource will be overwritten by the new values.\nNote that a repeated field is only allowed in the last position of a `paths`\nstring.\n\nIf a sub-message is specified in the last position of the field mask for an\nupdate operation, then the existing sub-message in the target resource is\noverwritten. Given the target message:\n\n f {\n b {\n d : 1\n x : 2\n }\n c : 1\n }\n\nAnd an update message:\n\n f {\n b {\n d : 10\n }\n }\n\nthen if the field mask is:\n\n paths: \"f.b\"\n\nthen the result will be:\n\n f {\n b {\n d : 10\n }\n c : 1\n }\n\nHowever, if the update mask was:\n\n paths: \"f.b.d\"\n\nthen the result would be:\n\n f {\n b {\n d : 10\n x : 2\n }\n c : 1\n }\n\nIn order to reset a field's value to the default, the field must\nbe in the mask and set to the default value in the provided resource.\nHence, in order to reset all fields of a resource, provide a default\ninstance of the resource and set all fields in the mask, or do\nnot provide a mask as described below.\n\nIf a field mask is not present on update, the operation applies to\nall fields (as if a field mask of all fields has been specified).\nNote that in the presence of schema evolution, this may mean that\nfields the client does not know and has therefore not filled into\nthe request will be reset to their default. If this is unwanted\nbehavior, a specific service may require a client to always specify\na field mask, producing an error if not.\n\nAs with get operations, the location of the resource which\ndescribes the updated values in the request message depends on the\noperation kind. In any case, the effect of the field mask is\nrequired to be honored by the API.\n\n## Considerations for HTTP REST\n\nThe HTTP kind of an update operation which uses a field mask must\nbe set to PATCH instead of PUT in order to satisfy HTTP semantics\n(PUT must only be used for full updates).\n\n# JSON Encoding of Field Masks\n\nIn JSON, a field mask is encoded as a single string where paths are\nseparated by a comma. Fields name in each path are converted\nto/from lower-camel naming conventions.\n\nAs an example, consider the following message declarations:\n\n message Profile {\n User user = 1;\n Photo photo = 2;\n }\n message User {\n string display_name = 1;\n string address = 2;\n }\n\nIn proto a field mask for `Profile` may look as such:\n\n mask {\n paths: \"user.display_name\"\n paths: \"photo\"\n }\n\nIn JSON, the same mask is represented as below:\n\n {\n mask: \"user.displayName,photo\"\n }\n\n# Field Masks and Oneof Fields\n\nField masks treat fields in oneofs just as regular fields. Consider the\nfollowing message:\n\n message SampleMessage {\n oneof test_oneof {\n string name = 4;\n SubMessage sub_message = 9;\n }\n }\n\nThe field mask can be:\n\n mask {\n paths: \"name\"\n }\n\nOr:\n\n mask {\n paths: \"sub_message\"\n }\n\nNote that oneof type names (\"test_oneof\" in this case) cannot be used in\npaths.\n\n## Field Mask Verification\n\nThe implementation of the all the API methods, which have any FieldMask type\nfield in the request, should verify the included field paths, and return\n`INVALID_ARGUMENT` error if any path is duplicated or unmappable.", + "title": "`FieldMask` represents a set of symbolic field paths, for example:" + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." + }, + "message": { + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." + } + }, + "description": "- Simple to use and understand for most users\n- Flexible enough to meet unexpected needs\n\n# Overview\n\nThe `Status` message contains three pieces of data: error code, error message,\nand error details. The error code should be an enum value of\n[google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed. The\nerror message should be a developer-facing English message that helps\ndevelopers *understand* and *resolve* the error. If a localized user-facing\nerror message is needed, put the localized message in the error details or\nlocalize it in the client. The optional error details may contain arbitrary\ninformation about the error. There is a predefined set of error detail types\nin the package `google.rpc` that can be used for common error conditions.\n\n# Language mapping\n\nThe `Status` message is the logical representation of the error model, but it\nis not necessarily the actual wire format. When the `Status` message is\nexposed in different client libraries and different wire protocols, it can be\nmapped differently. For example, it will likely be mapped to some exceptions\nin Java, but more likely mapped to some error codes in C.\n\n# Other uses\n\nThe error model and the `Status` message can be used in a variety of\nenvironments, either with or without APIs, to provide a\nconsistent developer experience across different environments.\n\nExample uses of this error model include:\n\n- Partial errors. If a service needs to return partial errors to the client,\n it may embed the `Status` in the normal response to indicate the partial\n errors.\n\n- Workflow errors. A typical workflow has multiple steps. Each step may\n have a `Status` message for error reporting.\n\n- Batch operations. If a client uses batch request and batch response, the\n `Status` message should be used directly inside batch response, one for\n each error sub-response.\n\n- Asynchronous operations. If an API call embeds asynchronous operation\n results in its response, the status of those operations should be\n represented directly using the `Status` message.\n\n- Logging. If some API errors are stored in logs, the message `Status` could\n be used directly after any stripping needed for security/privacy reasons.", + "title": "The `Status` type defines a logical error model that is suitable for different\nprogramming environments, including REST APIs and RPC APIs. It is used by\n[gRPC](https://github.com/grpc). The error model is designed to be:" + } + } +} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE deleted file mode 100644 index bb673323..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -ISC License - -Copyright (c) 2012-2013 Dave Collins - -Permission to use, copy, modify, and distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypass.go deleted file mode 100644 index d42a0bc4..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright (c) 2015 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is not running on Google App Engine, compiled by GopherJS, and -// "-tags safe" is not added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// +build !js,!appengine,!safe,!disableunsafe - -package spew - -import ( - "reflect" - "unsafe" -) - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = false - - // ptrSize is the size of a pointer on the current arch. - ptrSize = unsafe.Sizeof((*byte)(nil)) -) - -var ( - // offsetPtr, offsetScalar, and offsetFlag are the offsets for the - // internal reflect.Value fields. These values are valid before golang - // commit ecccf07e7f9d which changed the format. The are also valid - // after commit 82f48826c6c7 which changed the format again to mirror - // the original format. Code in the init function updates these offsets - // as necessary. - offsetPtr = uintptr(ptrSize) - offsetScalar = uintptr(0) - offsetFlag = uintptr(ptrSize * 2) - - // flagKindWidth and flagKindShift indicate various bits that the - // reflect package uses internally to track kind information. - // - // flagRO indicates whether or not the value field of a reflect.Value is - // read-only. - // - // flagIndir indicates whether the value field of a reflect.Value is - // the actual data or a pointer to the data. - // - // These values are valid before golang commit 90a7c3c86944 which - // changed their positions. Code in the init function updates these - // flags as necessary. - flagKindWidth = uintptr(5) - flagKindShift = uintptr(flagKindWidth - 1) - flagRO = uintptr(1 << 0) - flagIndir = uintptr(1 << 1) -) - -func init() { - // Older versions of reflect.Value stored small integers directly in the - // ptr field (which is named val in the older versions). Versions - // between commits ecccf07e7f9d and 82f48826c6c7 added a new field named - // scalar for this purpose which unfortunately came before the flag - // field, so the offset of the flag field is different for those - // versions. - // - // This code constructs a new reflect.Value from a known small integer - // and checks if the size of the reflect.Value struct indicates it has - // the scalar field. When it does, the offsets are updated accordingly. - vv := reflect.ValueOf(0xf00) - if unsafe.Sizeof(vv) == (ptrSize * 4) { - offsetScalar = ptrSize * 2 - offsetFlag = ptrSize * 3 - } - - // Commit 90a7c3c86944 changed the flag positions such that the low - // order bits are the kind. This code extracts the kind from the flags - // field and ensures it's the correct type. When it's not, the flag - // order has been changed to the newer format, so the flags are updated - // accordingly. - upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag) - upfv := *(*uintptr)(upf) - flagKindMask := uintptr((1<>flagKindShift != uintptr(reflect.Int) { - flagKindShift = 0 - flagRO = 1 << 5 - flagIndir = 1 << 6 - - // Commit adf9b30e5594 modified the flags to separate the - // flagRO flag into two bits which specifies whether or not the - // field is embedded. This causes flagIndir to move over a bit - // and means that flagRO is the combination of either of the - // original flagRO bit and the new bit. - // - // This code detects the change by extracting what used to be - // the indirect bit to ensure it's set. When it's not, the flag - // order has been changed to the newer format, so the flags are - // updated accordingly. - if upfv&flagIndir == 0 { - flagRO = 3 << 5 - flagIndir = 1 << 7 - } - } -} - -// unsafeReflectValue converts the passed reflect.Value into a one that bypasses -// the typical safety restrictions preventing access to unaddressable and -// unexported data. It works by digging the raw pointer to the underlying -// value out of the protected value and generating a new unprotected (unsafe) -// reflect.Value to it. -// -// This allows us to check for implementations of the Stringer and error -// interfaces to be used for pretty printing ordinarily unaddressable and -// inaccessible values such as unexported struct fields. -func unsafeReflectValue(v reflect.Value) (rv reflect.Value) { - indirects := 1 - vt := v.Type() - upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr) - rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag)) - if rvf&flagIndir != 0 { - vt = reflect.PtrTo(v.Type()) - indirects++ - } else if offsetScalar != 0 { - // The value is in the scalar field when it's not one of the - // reference types. - switch vt.Kind() { - case reflect.Uintptr: - case reflect.Chan: - case reflect.Func: - case reflect.Map: - case reflect.Ptr: - case reflect.UnsafePointer: - default: - upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + - offsetScalar) - } - } - - pv := reflect.NewAt(vt, upv) - rv = pv - for i := 0; i < indirects; i++ { - rv = rv.Elem() - } - return rv -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go deleted file mode 100644 index e47a4e79..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) 2015 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is running on Google App Engine, compiled by GopherJS, or -// "-tags safe" is added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// +build js appengine safe disableunsafe - -package spew - -import "reflect" - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = true -) - -// unsafeReflectValue typically converts the passed reflect.Value into a one -// that bypasses the typical safety restrictions preventing access to -// unaddressable and unexported data. However, doing this relies on access to -// the unsafe package. This is a stub version which simply returns the passed -// reflect.Value when the unsafe package is not available. -func unsafeReflectValue(v reflect.Value) reflect.Value { - return v -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/common.go deleted file mode 100644 index 14f02dc1..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/common.go +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "reflect" - "sort" - "strconv" -) - -// Some constants in the form of bytes to avoid string overhead. This mirrors -// the technique used in the fmt package. -var ( - panicBytes = []byte("(PANIC=") - plusBytes = []byte("+") - iBytes = []byte("i") - trueBytes = []byte("true") - falseBytes = []byte("false") - interfaceBytes = []byte("(interface {})") - commaNewlineBytes = []byte(",\n") - newlineBytes = []byte("\n") - openBraceBytes = []byte("{") - openBraceNewlineBytes = []byte("{\n") - closeBraceBytes = []byte("}") - asteriskBytes = []byte("*") - colonBytes = []byte(":") - colonSpaceBytes = []byte(": ") - openParenBytes = []byte("(") - closeParenBytes = []byte(")") - spaceBytes = []byte(" ") - pointerChainBytes = []byte("->") - nilAngleBytes = []byte("") - maxNewlineBytes = []byte("\n") - maxShortBytes = []byte("") - circularBytes = []byte("") - circularShortBytes = []byte("") - invalidAngleBytes = []byte("") - openBracketBytes = []byte("[") - closeBracketBytes = []byte("]") - percentBytes = []byte("%") - precisionBytes = []byte(".") - openAngleBytes = []byte("<") - closeAngleBytes = []byte(">") - openMapBytes = []byte("map[") - closeMapBytes = []byte("]") - lenEqualsBytes = []byte("len=") - capEqualsBytes = []byte("cap=") -) - -// hexDigits is used to map a decimal value to a hex digit. -var hexDigits = "0123456789abcdef" - -// catchPanic handles any panics that might occur during the handleMethods -// calls. -func catchPanic(w io.Writer, v reflect.Value) { - if err := recover(); err != nil { - w.Write(panicBytes) - fmt.Fprintf(w, "%v", err) - w.Write(closeParenBytes) - } -} - -// handleMethods attempts to call the Error and String methods on the underlying -// type the passed reflect.Value represents and outputes the result to Writer w. -// -// It handles panics in any called methods by catching and displaying the error -// as the formatted value. -func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) { - // We need an interface to check if the type implements the error or - // Stringer interface. However, the reflect package won't give us an - // interface on certain things like unexported struct fields in order - // to enforce visibility rules. We use unsafe, when it's available, - // to bypass these restrictions since this package does not mutate the - // values. - if !v.CanInterface() { - if UnsafeDisabled { - return false - } - - v = unsafeReflectValue(v) - } - - // Choose whether or not to do error and Stringer interface lookups against - // the base type or a pointer to the base type depending on settings. - // Technically calling one of these methods with a pointer receiver can - // mutate the value, however, types which choose to satisify an error or - // Stringer interface with a pointer receiver should not be mutating their - // state inside these interface methods. - if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() { - v = unsafeReflectValue(v) - } - if v.CanAddr() { - v = v.Addr() - } - - // Is it an error or Stringer? - switch iface := v.Interface().(type) { - case error: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.Error())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - - w.Write([]byte(iface.Error())) - return true - - case fmt.Stringer: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.String())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - w.Write([]byte(iface.String())) - return true - } - return false -} - -// printBool outputs a boolean value as true or false to Writer w. -func printBool(w io.Writer, val bool) { - if val { - w.Write(trueBytes) - } else { - w.Write(falseBytes) - } -} - -// printInt outputs a signed integer value to Writer w. -func printInt(w io.Writer, val int64, base int) { - w.Write([]byte(strconv.FormatInt(val, base))) -} - -// printUint outputs an unsigned integer value to Writer w. -func printUint(w io.Writer, val uint64, base int) { - w.Write([]byte(strconv.FormatUint(val, base))) -} - -// printFloat outputs a floating point value using the specified precision, -// which is expected to be 32 or 64bit, to Writer w. -func printFloat(w io.Writer, val float64, precision int) { - w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision))) -} - -// printComplex outputs a complex value using the specified float precision -// for the real and imaginary parts to Writer w. -func printComplex(w io.Writer, c complex128, floatPrecision int) { - r := real(c) - w.Write(openParenBytes) - w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision))) - i := imag(c) - if i >= 0 { - w.Write(plusBytes) - } - w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision))) - w.Write(iBytes) - w.Write(closeParenBytes) -} - -// printHexPtr outputs a uintptr formatted as hexidecimal with a leading '0x' -// prefix to Writer w. -func printHexPtr(w io.Writer, p uintptr) { - // Null pointer. - num := uint64(p) - if num == 0 { - w.Write(nilAngleBytes) - return - } - - // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix - buf := make([]byte, 18) - - // It's simpler to construct the hex string right to left. - base := uint64(16) - i := len(buf) - 1 - for num >= base { - buf[i] = hexDigits[num%base] - num /= base - i-- - } - buf[i] = hexDigits[num] - - // Add '0x' prefix. - i-- - buf[i] = 'x' - i-- - buf[i] = '0' - - // Strip unused leading bytes. - buf = buf[i:] - w.Write(buf) -} - -// valuesSorter implements sort.Interface to allow a slice of reflect.Value -// elements to be sorted. -type valuesSorter struct { - values []reflect.Value - strings []string // either nil or same len and values - cs *ConfigState -} - -// newValuesSorter initializes a valuesSorter instance, which holds a set of -// surrogate keys on which the data should be sorted. It uses flags in -// ConfigState to decide if and how to populate those surrogate keys. -func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface { - vs := &valuesSorter{values: values, cs: cs} - if canSortSimply(vs.values[0].Kind()) { - return vs - } - if !cs.DisableMethods { - vs.strings = make([]string, len(values)) - for i := range vs.values { - b := bytes.Buffer{} - if !handleMethods(cs, &b, vs.values[i]) { - vs.strings = nil - break - } - vs.strings[i] = b.String() - } - } - if vs.strings == nil && cs.SpewKeys { - vs.strings = make([]string, len(values)) - for i := range vs.values { - vs.strings[i] = Sprintf("%#v", vs.values[i].Interface()) - } - } - return vs -} - -// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted -// directly, or whether it should be considered for sorting by surrogate keys -// (if the ConfigState allows it). -func canSortSimply(kind reflect.Kind) bool { - // This switch parallels valueSortLess, except for the default case. - switch kind { - case reflect.Bool: - return true - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return true - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return true - case reflect.Float32, reflect.Float64: - return true - case reflect.String: - return true - case reflect.Uintptr: - return true - case reflect.Array: - return true - } - return false -} - -// Len returns the number of values in the slice. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Len() int { - return len(s.values) -} - -// Swap swaps the values at the passed indices. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Swap(i, j int) { - s.values[i], s.values[j] = s.values[j], s.values[i] - if s.strings != nil { - s.strings[i], s.strings[j] = s.strings[j], s.strings[i] - } -} - -// valueSortLess returns whether the first value should sort before the second -// value. It is used by valueSorter.Less as part of the sort.Interface -// implementation. -func valueSortLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Bool: - return !a.Bool() && b.Bool() - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return a.Int() < b.Int() - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return a.Uint() < b.Uint() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.String: - return a.String() < b.String() - case reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Array: - // Compare the contents of both arrays. - l := a.Len() - for i := 0; i < l; i++ { - av := a.Index(i) - bv := b.Index(i) - if av.Interface() == bv.Interface() { - continue - } - return valueSortLess(av, bv) - } - } - return a.String() < b.String() -} - -// Less returns whether the value at index i should sort before the -// value at index j. It is part of the sort.Interface implementation. -func (s *valuesSorter) Less(i, j int) bool { - if s.strings == nil { - return valueSortLess(s.values[i], s.values[j]) - } - return s.strings[i] < s.strings[j] -} - -// sortValues is a sort function that handles both native types and any type that -// can be converted to error or Stringer. Other inputs are sorted according to -// their Value.String() value to ensure display stability. -func sortValues(values []reflect.Value, cs *ConfigState) { - if len(values) == 0 { - return - } - sort.Sort(newValuesSorter(values, cs)) -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/config.go deleted file mode 100644 index 55528272..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/config.go +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "os" -) - -// ConfigState houses the configuration options used by spew to format and -// display values. There is a global instance, Config, that is used to control -// all top-level Formatter and Dump functionality. Each ConfigState instance -// provides methods equivalent to the top-level functions. -// -// The zero value for ConfigState provides no indentation. You would typically -// want to set it to a space or a tab. -// -// Alternatively, you can use NewDefaultConfig to get a ConfigState instance -// with default settings. See the documentation of NewDefaultConfig for default -// values. -type ConfigState struct { - // Indent specifies the string to use for each indentation level. The - // global config instance that all top-level functions use set this to a - // single space by default. If you would like more indentation, you might - // set this to a tab with "\t" or perhaps two spaces with " ". - Indent string - - // MaxDepth controls the maximum number of levels to descend into nested - // data structures. The default, 0, means there is no limit. - // - // NOTE: Circular data structures are properly detected, so it is not - // necessary to set this value unless you specifically want to limit deeply - // nested data structures. - MaxDepth int - - // DisableMethods specifies whether or not error and Stringer interfaces are - // invoked for types that implement them. - DisableMethods bool - - // DisablePointerMethods specifies whether or not to check for and invoke - // error and Stringer interfaces on types which only accept a pointer - // receiver when the current type is not a pointer. - // - // NOTE: This might be an unsafe action since calling one of these methods - // with a pointer receiver could technically mutate the value, however, - // in practice, types which choose to satisify an error or Stringer - // interface with a pointer receiver should not be mutating their state - // inside these interface methods. As a result, this option relies on - // access to the unsafe package, so it will not have any effect when - // running in environments without access to the unsafe package such as - // Google App Engine or with the "safe" build tag specified. - DisablePointerMethods bool - - // ContinueOnMethod specifies whether or not recursion should continue once - // a custom error or Stringer interface is invoked. The default, false, - // means it will print the results of invoking the custom error or Stringer - // interface and return immediately instead of continuing to recurse into - // the internals of the data type. - // - // NOTE: This flag does not have any effect if method invocation is disabled - // via the DisableMethods or DisablePointerMethods options. - ContinueOnMethod bool - - // SortKeys specifies map keys should be sorted before being printed. Use - // this to have a more deterministic, diffable output. Note that only - // native types (bool, int, uint, floats, uintptr and string) and types - // that support the error or Stringer interfaces (if methods are - // enabled) are supported, with other types sorted according to the - // reflect.Value.String() output which guarantees display stability. - SortKeys bool - - // SpewKeys specifies that, as a last resort attempt, map keys should - // be spewed to strings and sorted by those strings. This is only - // considered if SortKeys is true. - SpewKeys bool -} - -// Config is the active configuration of the top-level functions. -// The configuration can be changed by modifying the contents of spew.Config. -var Config = ConfigState{Indent: " "} - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the formatted string as a value that satisfies error. See NewFormatter -// for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, c.convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, c.convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, c.convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a Formatter interface returned by c.NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, c.convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Print(a ...interface{}) (n int, err error) { - return fmt.Print(c.convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, c.convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Println(a ...interface{}) (n int, err error) { - return fmt.Println(c.convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprint(a ...interface{}) string { - return fmt.Sprint(c.convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, c.convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a Formatter interface returned by c.NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintln(a ...interface{}) string { - return fmt.Sprintln(c.convertArgs(a)...) -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -c.Printf, c.Println, or c.Printf. -*/ -func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(c, v) -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) { - fdump(c, w, a...) -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by modifying the public members -of c. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func (c *ConfigState) Dump(a ...interface{}) { - fdump(c, os.Stdout, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func (c *ConfigState) Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(c, &buf, a...) - return buf.String() -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a spew Formatter interface using -// the ConfigState associated with s. -func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = newFormatter(c, arg) - } - return formatters -} - -// NewDefaultConfig returns a ConfigState with the following default settings. -// -// Indent: " " -// MaxDepth: 0 -// DisableMethods: false -// DisablePointerMethods: false -// ContinueOnMethod: false -// SortKeys: false -func NewDefaultConfig() *ConfigState { - return &ConfigState{Indent: " "} -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/doc.go deleted file mode 100644 index 5be0c406..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/doc.go +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -/* -Package spew implements a deep pretty printer for Go data structures to aid in -debugging. - -A quick overview of the additional features spew provides over the built-in -printing facilities for Go data types are as follows: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output (only when using - Dump style) - -There are two different approaches spew allows for dumping Go data structures: - - * Dump style which prints with newlines, customizable indentation, - and additional debug information such as types and all pointer addresses - used to indirect to the final value - * A custom Formatter interface that integrates cleanly with the standard fmt - package and replaces %v, %+v, %#v, and %#+v to provide inline printing - similar to the default %v while providing the additional functionality - outlined above and passing unsupported format verbs such as %x and %q - along to fmt - -Quick Start - -This section demonstrates how to quickly get started with spew. See the -sections below for further details on formatting and configuration options. - -To dump a variable with full newlines, indentation, type, and pointer -information use Dump, Fdump, or Sdump: - spew.Dump(myVar1, myVar2, ...) - spew.Fdump(someWriter, myVar1, myVar2, ...) - str := spew.Sdump(myVar1, myVar2, ...) - -Alternatively, if you would prefer to use format strings with a compacted inline -printing style, use the convenience wrappers Printf, Fprintf, etc with -%v (most compact), %+v (adds pointer addresses), %#v (adds types), or -%#+v (adds types and pointer addresses): - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -Configuration Options - -Configuration of spew is handled by fields in the ConfigState type. For -convenience, all of the top-level functions use a global state available -via the spew.Config global. - -It is also possible to create a ConfigState instance that provides methods -equivalent to the top-level functions. This allows concurrent configuration -options. See the ConfigState documentation for more details. - -The following configuration options are available: - * Indent - String to use for each indentation level for Dump functions. - It is a single space by default. A popular alternative is "\t". - - * MaxDepth - Maximum number of levels to descend into nested data structures. - There is no limit by default. - - * DisableMethods - Disables invocation of error and Stringer interface methods. - Method invocation is enabled by default. - - * DisablePointerMethods - Disables invocation of error and Stringer interface methods on types - which only accept pointer receivers from non-pointer variables. - Pointer method invocation is enabled by default. - - * ContinueOnMethod - Enables recursion into types after invoking error and Stringer interface - methods. Recursion after method invocation is disabled by default. - - * SortKeys - Specifies map keys should be sorted before being printed. Use - this to have a more deterministic, diffable output. Note that - only native types (bool, int, uint, floats, uintptr and string) - and types which implement error or Stringer interfaces are - supported with other types sorted according to the - reflect.Value.String() output which guarantees display - stability. Natural map order is used by default. - - * SpewKeys - Specifies that, as a last resort attempt, map keys should be - spewed to strings and sorted by those strings. This is only - considered if SortKeys is true. - -Dump Usage - -Simply call spew.Dump with a list of variables you want to dump: - - spew.Dump(myVar1, myVar2, ...) - -You may also call spew.Fdump if you would prefer to output to an arbitrary -io.Writer. For example, to dump to standard error: - - spew.Fdump(os.Stderr, myVar1, myVar2, ...) - -A third option is to call spew.Sdump to get the formatted output as a string: - - str := spew.Sdump(myVar1, myVar2, ...) - -Sample Dump Output - -See the Dump example for details on the setup of the types and variables being -shown here. - - (main.Foo) { - unexportedField: (*main.Bar)(0xf84002e210)({ - flag: (main.Flag) flagTwo, - data: (uintptr) - }), - ExportedField: (map[interface {}]interface {}) (len=1) { - (string) (len=3) "one": (bool) true - } - } - -Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C -command as shown. - ([]uint8) (len=32 cap=32) { - 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... | - 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0| - 00000020 31 32 |12| - } - -Custom Formatter - -Spew provides a custom formatter that implements the fmt.Formatter interface -so that it integrates cleanly with standard fmt package printing functions. The -formatter is useful for inline printing of smaller data types similar to the -standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Custom Formatter Usage - -The simplest way to make use of the spew custom formatter is to call one of the -convenience functions such as spew.Printf, spew.Println, or spew.Printf. The -functions have syntax you are most likely already familiar with: - - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Println(myVar, myVar2) - spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -See the Index for the full list convenience functions. - -Sample Formatter Output - -Double pointer to a uint8: - %v: <**>5 - %+v: <**>(0xf8400420d0->0xf8400420c8)5 - %#v: (**uint8)5 - %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5 - -Pointer to circular struct with a uint8 field and a pointer to itself: - %v: <*>{1 <*>} - %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)} - %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)} - %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)} - -See the Printf example for details on the setup of variables being shown -here. - -Errors - -Since it is possible for custom Stringer/error interfaces to panic, spew -detects them and handles them internally by printing the panic information -inline with the output. Since spew is intended to provide deep pretty printing -capabilities on structures, it intentionally does not return any errors. -*/ -package spew diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/dump.go deleted file mode 100644 index a0ff95e2..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/dump.go +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "encoding/hex" - "fmt" - "io" - "os" - "reflect" - "regexp" - "strconv" - "strings" -) - -var ( - // uint8Type is a reflect.Type representing a uint8. It is used to - // convert cgo types to uint8 slices for hexdumping. - uint8Type = reflect.TypeOf(uint8(0)) - - // cCharRE is a regular expression that matches a cgo char. - // It is used to detect character arrays to hexdump them. - cCharRE = regexp.MustCompile("^.*\\._Ctype_char$") - - // cUnsignedCharRE is a regular expression that matches a cgo unsigned - // char. It is used to detect unsigned character arrays to hexdump - // them. - cUnsignedCharRE = regexp.MustCompile("^.*\\._Ctype_unsignedchar$") - - // cUint8tCharRE is a regular expression that matches a cgo uint8_t. - // It is used to detect uint8_t arrays to hexdump them. - cUint8tCharRE = regexp.MustCompile("^.*\\._Ctype_uint8_t$") -) - -// dumpState contains information about the state of a dump operation. -type dumpState struct { - w io.Writer - depth int - pointers map[uintptr]int - ignoreNextType bool - ignoreNextIndent bool - cs *ConfigState -} - -// indent performs indentation according to the depth level and cs.Indent -// option. -func (d *dumpState) indent() { - if d.ignoreNextIndent { - d.ignoreNextIndent = false - return - } - d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth)) -} - -// unpackValue returns values inside of non-nil interfaces when possible. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (d *dumpState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface && !v.IsNil() { - v = v.Elem() - } - return v -} - -// dumpPtr handles formatting of pointers by indirecting them as necessary. -func (d *dumpState) dumpPtr(v reflect.Value) { - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range d.pointers { - if depth >= d.depth { - delete(d.pointers, k) - } - } - - // Keep list of all dereferenced pointers to show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by dereferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := d.pointers[addr]; ok && pd < d.depth { - cycleFound = true - indirects-- - break - } - d.pointers[addr] = d.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type information. - d.w.Write(openParenBytes) - d.w.Write(bytes.Repeat(asteriskBytes, indirects)) - d.w.Write([]byte(ve.Type().String())) - d.w.Write(closeParenBytes) - - // Display pointer information. - if len(pointerChain) > 0 { - d.w.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - d.w.Write(pointerChainBytes) - } - printHexPtr(d.w, addr) - } - d.w.Write(closeParenBytes) - } - - // Display dereferenced value. - d.w.Write(openParenBytes) - switch { - case nilFound == true: - d.w.Write(nilAngleBytes) - - case cycleFound == true: - d.w.Write(circularBytes) - - default: - d.ignoreNextType = true - d.dump(ve) - } - d.w.Write(closeParenBytes) -} - -// dumpSlice handles formatting of arrays and slices. Byte (uint8 under -// reflection) arrays and slices are dumped in hexdump -C fashion. -func (d *dumpState) dumpSlice(v reflect.Value) { - // Determine whether this type should be hex dumped or not. Also, - // for types which should be hexdumped, try to use the underlying data - // first, then fall back to trying to convert them to a uint8 slice. - var buf []uint8 - doConvert := false - doHexDump := false - numEntries := v.Len() - if numEntries > 0 { - vt := v.Index(0).Type() - vts := vt.String() - switch { - // C types that need to be converted. - case cCharRE.MatchString(vts): - fallthrough - case cUnsignedCharRE.MatchString(vts): - fallthrough - case cUint8tCharRE.MatchString(vts): - doConvert = true - - // Try to use existing uint8 slices and fall back to converting - // and copying if that fails. - case vt.Kind() == reflect.Uint8: - // We need an addressable interface to convert the type - // to a byte slice. However, the reflect package won't - // give us an interface on certain things like - // unexported struct fields in order to enforce - // visibility rules. We use unsafe, when available, to - // bypass these restrictions since this package does not - // mutate the values. - vs := v - if !vs.CanInterface() || !vs.CanAddr() { - vs = unsafeReflectValue(vs) - } - if !UnsafeDisabled { - vs = vs.Slice(0, numEntries) - - // Use the existing uint8 slice if it can be - // type asserted. - iface := vs.Interface() - if slice, ok := iface.([]uint8); ok { - buf = slice - doHexDump = true - break - } - } - - // The underlying data needs to be converted if it can't - // be type asserted to a uint8 slice. - doConvert = true - } - - // Copy and convert the underlying type if needed. - if doConvert && vt.ConvertibleTo(uint8Type) { - // Convert and copy each element into a uint8 byte - // slice. - buf = make([]uint8, numEntries) - for i := 0; i < numEntries; i++ { - vv := v.Index(i) - buf[i] = uint8(vv.Convert(uint8Type).Uint()) - } - doHexDump = true - } - } - - // Hexdump the entire slice as needed. - if doHexDump { - indent := strings.Repeat(d.cs.Indent, d.depth) - str := indent + hex.Dump(buf) - str = strings.Replace(str, "\n", "\n"+indent, -1) - str = strings.TrimRight(str, d.cs.Indent) - d.w.Write([]byte(str)) - return - } - - // Recursively call dump for each item. - for i := 0; i < numEntries; i++ { - d.dump(d.unpackValue(v.Index(i))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } -} - -// dump is the main workhorse for dumping a value. It uses the passed reflect -// value to figure out what kind of object we are dealing with and formats it -// appropriately. It is a recursive function, however circular data structures -// are detected and handled properly. -func (d *dumpState) dump(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - d.w.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - d.indent() - d.dumpPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !d.ignoreNextType { - d.indent() - d.w.Write(openParenBytes) - d.w.Write([]byte(v.Type().String())) - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - d.ignoreNextType = false - - // Display length and capacity if the built-in len and cap functions - // work with the value's kind and the len/cap itself is non-zero. - valueLen, valueCap := 0, 0 - switch v.Kind() { - case reflect.Array, reflect.Slice, reflect.Chan: - valueLen, valueCap = v.Len(), v.Cap() - case reflect.Map, reflect.String: - valueLen = v.Len() - } - if valueLen != 0 || valueCap != 0 { - d.w.Write(openParenBytes) - if valueLen != 0 { - d.w.Write(lenEqualsBytes) - printInt(d.w, int64(valueLen), 10) - } - if valueCap != 0 { - if valueLen != 0 { - d.w.Write(spaceBytes) - } - d.w.Write(capEqualsBytes) - printInt(d.w, int64(valueCap), 10) - } - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - - // Call Stringer/error interfaces if they exist and the handle methods flag - // is enabled - if !d.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(d.cs, d.w, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(d.w, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(d.w, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(d.w, v.Uint(), 10) - - case reflect.Float32: - printFloat(d.w, v.Float(), 32) - - case reflect.Float64: - printFloat(d.w, v.Float(), 64) - - case reflect.Complex64: - printComplex(d.w, v.Complex(), 32) - - case reflect.Complex128: - printComplex(d.w, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - d.dumpSlice(v) - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.String: - d.w.Write([]byte(strconv.Quote(v.String()))) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - d.w.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - numEntries := v.Len() - keys := v.MapKeys() - if d.cs.SortKeys { - sortValues(keys, d.cs) - } - for i, key := range keys { - d.dump(d.unpackValue(key)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.MapIndex(key))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Struct: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - vt := v.Type() - numFields := v.NumField() - for i := 0; i < numFields; i++ { - d.indent() - vtf := vt.Field(i) - d.w.Write([]byte(vtf.Name)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.Field(i))) - if i < (numFields - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(d.w, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(d.w, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it in case any new - // types are added. - default: - if v.CanInterface() { - fmt.Fprintf(d.w, "%v", v.Interface()) - } else { - fmt.Fprintf(d.w, "%v", v.String()) - } - } -} - -// fdump is a helper function to consolidate the logic from the various public -// methods which take varying writers and config states. -func fdump(cs *ConfigState, w io.Writer, a ...interface{}) { - for _, arg := range a { - if arg == nil { - w.Write(interfaceBytes) - w.Write(spaceBytes) - w.Write(nilAngleBytes) - w.Write(newlineBytes) - continue - } - - d := dumpState{w: w, cs: cs} - d.pointers = make(map[uintptr]int) - d.dump(reflect.ValueOf(arg)) - d.w.Write(newlineBytes) - } -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func Fdump(w io.Writer, a ...interface{}) { - fdump(&Config, w, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(&Config, &buf, a...) - return buf.String() -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by an exported package global, -spew.Config. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func Dump(a ...interface{}) { - fdump(&Config, os.Stdout, a...) -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/format.go deleted file mode 100644 index ecf3b80e..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/format.go +++ /dev/null @@ -1,419 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "reflect" - "strconv" - "strings" -) - -// supportedFlags is a list of all the character flags supported by fmt package. -const supportedFlags = "0-+# " - -// formatState implements the fmt.Formatter interface and contains information -// about the state of a formatting operation. The NewFormatter function can -// be used to get a new Formatter which can be used directly as arguments -// in standard fmt package printing calls. -type formatState struct { - value interface{} - fs fmt.State - depth int - pointers map[uintptr]int - ignoreNextType bool - cs *ConfigState -} - -// buildDefaultFormat recreates the original format string without precision -// and width information to pass in to fmt.Sprintf in the case of an -// unrecognized type. Unless new types are added to the language, this -// function won't ever be called. -func (f *formatState) buildDefaultFormat() (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - buf.WriteRune('v') - - format = buf.String() - return format -} - -// constructOrigFormat recreates the original format string including precision -// and width information to pass along to the standard fmt package. This allows -// automatic deferral of all format strings this package doesn't support. -func (f *formatState) constructOrigFormat(verb rune) (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - if width, ok := f.fs.Width(); ok { - buf.WriteString(strconv.Itoa(width)) - } - - if precision, ok := f.fs.Precision(); ok { - buf.Write(precisionBytes) - buf.WriteString(strconv.Itoa(precision)) - } - - buf.WriteRune(verb) - - format = buf.String() - return format -} - -// unpackValue returns values inside of non-nil interfaces when possible and -// ensures that types for values which have been unpacked from an interface -// are displayed when the show types flag is also set. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (f *formatState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface { - f.ignoreNextType = false - if !v.IsNil() { - v = v.Elem() - } - } - return v -} - -// formatPtr handles formatting of pointers by indirecting them as necessary. -func (f *formatState) formatPtr(v reflect.Value) { - // Display nil if top level pointer is nil. - showTypes := f.fs.Flag('#') - if v.IsNil() && (!showTypes || f.ignoreNextType) { - f.fs.Write(nilAngleBytes) - return - } - - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range f.pointers { - if depth >= f.depth { - delete(f.pointers, k) - } - } - - // Keep list of all dereferenced pointers to possibly show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by derferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := f.pointers[addr]; ok && pd < f.depth { - cycleFound = true - indirects-- - break - } - f.pointers[addr] = f.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type or indirection level depending on flags. - if showTypes && !f.ignoreNextType { - f.fs.Write(openParenBytes) - f.fs.Write(bytes.Repeat(asteriskBytes, indirects)) - f.fs.Write([]byte(ve.Type().String())) - f.fs.Write(closeParenBytes) - } else { - if nilFound || cycleFound { - indirects += strings.Count(ve.Type().String(), "*") - } - f.fs.Write(openAngleBytes) - f.fs.Write([]byte(strings.Repeat("*", indirects))) - f.fs.Write(closeAngleBytes) - } - - // Display pointer information depending on flags. - if f.fs.Flag('+') && (len(pointerChain) > 0) { - f.fs.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - f.fs.Write(pointerChainBytes) - } - printHexPtr(f.fs, addr) - } - f.fs.Write(closeParenBytes) - } - - // Display dereferenced value. - switch { - case nilFound == true: - f.fs.Write(nilAngleBytes) - - case cycleFound == true: - f.fs.Write(circularShortBytes) - - default: - f.ignoreNextType = true - f.format(ve) - } -} - -// format is the main workhorse for providing the Formatter interface. It -// uses the passed reflect value to figure out what kind of object we are -// dealing with and formats it appropriately. It is a recursive function, -// however circular data structures are detected and handled properly. -func (f *formatState) format(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - f.fs.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - f.formatPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !f.ignoreNextType && f.fs.Flag('#') { - f.fs.Write(openParenBytes) - f.fs.Write([]byte(v.Type().String())) - f.fs.Write(closeParenBytes) - } - f.ignoreNextType = false - - // Call Stringer/error interfaces if they exist and the handle methods - // flag is enabled. - if !f.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(f.cs, f.fs, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(f.fs, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(f.fs, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(f.fs, v.Uint(), 10) - - case reflect.Float32: - printFloat(f.fs, v.Float(), 32) - - case reflect.Float64: - printFloat(f.fs, v.Float(), 64) - - case reflect.Complex64: - printComplex(f.fs, v.Complex(), 32) - - case reflect.Complex128: - printComplex(f.fs, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - f.fs.Write(openBracketBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - numEntries := v.Len() - for i := 0; i < numEntries; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(v.Index(i))) - } - } - f.depth-- - f.fs.Write(closeBracketBytes) - - case reflect.String: - f.fs.Write([]byte(v.String())) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - f.fs.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - - f.fs.Write(openMapBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - keys := v.MapKeys() - if f.cs.SortKeys { - sortValues(keys, f.cs) - } - for i, key := range keys { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(key)) - f.fs.Write(colonBytes) - f.ignoreNextType = true - f.format(f.unpackValue(v.MapIndex(key))) - } - } - f.depth-- - f.fs.Write(closeMapBytes) - - case reflect.Struct: - numFields := v.NumField() - f.fs.Write(openBraceBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - vt := v.Type() - for i := 0; i < numFields; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - vtf := vt.Field(i) - if f.fs.Flag('+') || f.fs.Flag('#') { - f.fs.Write([]byte(vtf.Name)) - f.fs.Write(colonBytes) - } - f.format(f.unpackValue(v.Field(i))) - } - } - f.depth-- - f.fs.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(f.fs, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(f.fs, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it if any get added. - default: - format := f.buildDefaultFormat() - if v.CanInterface() { - fmt.Fprintf(f.fs, format, v.Interface()) - } else { - fmt.Fprintf(f.fs, format, v.String()) - } - } -} - -// Format satisfies the fmt.Formatter interface. See NewFormatter for usage -// details. -func (f *formatState) Format(fs fmt.State, verb rune) { - f.fs = fs - - // Use standard formatting for verbs that are not v. - if verb != 'v' { - format := f.constructOrigFormat(verb) - fmt.Fprintf(fs, format, f.value) - return - } - - if f.value == nil { - if fs.Flag('#') { - fs.Write(interfaceBytes) - } - fs.Write(nilAngleBytes) - return - } - - f.format(reflect.ValueOf(f.value)) -} - -// newFormatter is a helper function to consolidate the logic from the various -// public methods which take varying config states. -func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter { - fs := &formatState{value: v, cs: cs} - fs.pointers = make(map[uintptr]int) - return fs -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -Printf, Println, or Fprintf. -*/ -func NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(&Config, v) -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/spew.go b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/spew.go deleted file mode 100644 index d8233f54..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/spew/spew.go +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2013 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "fmt" - "io" -) - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the formatted string as a value that satisfies error. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a default Formatter interface returned by NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b)) -func Print(a ...interface{}) (n int, err error) { - return fmt.Print(convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b)) -func Println(a ...interface{}) (n int, err error) { - return fmt.Println(convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprint(a ...interface{}) string { - return fmt.Sprint(convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintln(a ...interface{}) string { - return fmt.Sprintln(convertArgs(a)...) -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a default spew Formatter interface. -func convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = NewFormatter(arg) - } - return formatters -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE deleted file mode 100644 index c67dad61..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013, Patrick Mezard -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution. - The names of its contributors may not be used to endorse or promote -products derived from this software without specific prior written -permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/difflib/difflib.go deleted file mode 100644 index 64cc40fe..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/difflib/difflib.go +++ /dev/null @@ -1,758 +0,0 @@ -// Package difflib is a partial port of Python difflib module. -// -// It provides tools to compare sequences of strings and generate textual diffs. -// -// The following class and functions have been ported: -// -// - SequenceMatcher -// -// - unified_diff -// -// - context_diff -// -// Getting unified diffs was the main goal of the port. Keep in mind this code -// is mostly suitable to output text differences in a human friendly way, there -// are no guarantees generated diffs are consumable by patch(1). -package difflib - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" -) - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func calculateRatio(matches, length int) float64 { - if length > 0 { - return 2.0 * float64(matches) / float64(length) - } - return 1.0 -} - -type Match struct { - A int - B int - Size int -} - -type OpCode struct { - Tag byte - I1 int - I2 int - J1 int - J2 int -} - -// SequenceMatcher compares sequence of strings. The basic -// algorithm predates, and is a little fancier than, an algorithm -// published in the late 1980's by Ratcliff and Obershelp under the -// hyperbolic name "gestalt pattern matching". The basic idea is to find -// the longest contiguous matching subsequence that contains no "junk" -// elements (R-O doesn't address junk). The same idea is then applied -// recursively to the pieces of the sequences to the left and to the right -// of the matching subsequence. This does not yield minimal edit -// sequences, but does tend to yield matches that "look right" to people. -// -// SequenceMatcher tries to compute a "human-friendly diff" between two -// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -// longest *contiguous* & junk-free matching subsequence. That's what -// catches peoples' eyes. The Windows(tm) windiff has another interesting -// notion, pairing up elements that appear uniquely in each sequence. -// That, and the method here, appear to yield more intuitive difference -// reports than does diff. This method appears to be the least vulnerable -// to synching up on blocks of "junk lines", though (like blank lines in -// ordinary text files, or maybe "

" lines in HTML files). That may be -// because this is the only method of the 3 that has a *concept* of -// "junk" . -// -// Timing: Basic R-O is cubic time worst case and quadratic time expected -// case. SequenceMatcher is quadratic time for the worst case and has -// expected-case behavior dependent in a complicated way on how many -// elements the sequences have in common; best case time is linear. -type SequenceMatcher struct { - a []string - b []string - b2j map[string][]int - IsJunk func(string) bool - autoJunk bool - bJunk map[string]struct{} - matchingBlocks []Match - fullBCount map[string]int - bPopular map[string]struct{} - opCodes []OpCode -} - -func NewMatcher(a, b []string) *SequenceMatcher { - m := SequenceMatcher{autoJunk: true} - m.SetSeqs(a, b) - return &m -} - -func NewMatcherWithJunk(a, b []string, autoJunk bool, - isJunk func(string) bool) *SequenceMatcher { - - m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} - m.SetSeqs(a, b) - return &m -} - -// Set two sequences to be compared. -func (m *SequenceMatcher) SetSeqs(a, b []string) { - m.SetSeq1(a) - m.SetSeq2(b) -} - -// Set the first sequence to be compared. The second sequence to be compared is -// not changed. -// -// SequenceMatcher computes and caches detailed information about the second -// sequence, so if you want to compare one sequence S against many sequences, -// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other -// sequences. -// -// See also SetSeqs() and SetSeq2(). -func (m *SequenceMatcher) SetSeq1(a []string) { - if &a == &m.a { - return - } - m.a = a - m.matchingBlocks = nil - m.opCodes = nil -} - -// Set the second sequence to be compared. The first sequence to be compared is -// not changed. -func (m *SequenceMatcher) SetSeq2(b []string) { - if &b == &m.b { - return - } - m.b = b - m.matchingBlocks = nil - m.opCodes = nil - m.fullBCount = nil - m.chainB() -} - -func (m *SequenceMatcher) chainB() { - // Populate line -> index mapping - b2j := map[string][]int{} - for i, s := range m.b { - indices := b2j[s] - indices = append(indices, i) - b2j[s] = indices - } - - // Purge junk elements - m.bJunk = map[string]struct{}{} - if m.IsJunk != nil { - junk := m.bJunk - for s, _ := range b2j { - if m.IsJunk(s) { - junk[s] = struct{}{} - } - } - for s, _ := range junk { - delete(b2j, s) - } - } - - // Purge remaining popular elements - popular := map[string]struct{}{} - n := len(m.b) - if m.autoJunk && n >= 200 { - ntest := n/100 + 1 - for s, indices := range b2j { - if len(indices) > ntest { - popular[s] = struct{}{} - } - } - for s, _ := range popular { - delete(b2j, s) - } - } - m.bPopular = popular - m.b2j = b2j -} - -func (m *SequenceMatcher) isBJunk(s string) bool { - _, ok := m.bJunk[s] - return ok -} - -// Find longest matching block in a[alo:ahi] and b[blo:bhi]. -// -// If IsJunk is not defined: -// -// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where -// alo <= i <= i+k <= ahi -// blo <= j <= j+k <= bhi -// and for all (i',j',k') meeting those conditions, -// k >= k' -// i <= i' -// and if i == i', j <= j' -// -// In other words, of all maximal matching blocks, return one that -// starts earliest in a, and of all those maximal matching blocks that -// start earliest in a, return the one that starts earliest in b. -// -// If IsJunk is defined, first the longest matching block is -// determined as above, but with the additional restriction that no -// junk element appears in the block. Then that block is extended as -// far as possible by matching (only) junk elements on both sides. So -// the resulting block never matches on junk except as identical junk -// happens to be adjacent to an "interesting" match. -// -// If no blocks match, return (alo, blo, 0). -func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { - // CAUTION: stripping common prefix or suffix would be incorrect. - // E.g., - // ab - // acab - // Longest matching block is "ab", but if common prefix is - // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so - // strip, so ends up claiming that ab is changed to acab by - // inserting "ca" in the middle. That's minimal but unintuitive: - // "it's obvious" that someone inserted "ac" at the front. - // Windiff ends up at the same place as diff, but by pairing up - // the unique 'b's and then matching the first two 'a's. - besti, bestj, bestsize := alo, blo, 0 - - // find longest junk-free match - // during an iteration of the loop, j2len[j] = length of longest - // junk-free match ending with a[i-1] and b[j] - j2len := map[int]int{} - for i := alo; i != ahi; i++ { - // look at all instances of a[i] in b; note that because - // b2j has no junk keys, the loop is skipped if a[i] is junk - newj2len := map[int]int{} - for _, j := range m.b2j[m.a[i]] { - // a[i] matches b[j] - if j < blo { - continue - } - if j >= bhi { - break - } - k := j2len[j-1] + 1 - newj2len[j] = k - if k > bestsize { - besti, bestj, bestsize = i-k+1, j-k+1, k - } - } - j2len = newj2len - } - - // Extend the best by non-junk elements on each end. In particular, - // "popular" non-junk elements aren't in b2j, which greatly speeds - // the inner loop above, but also means "the best" match so far - // doesn't contain any junk *or* popular non-junk elements. - for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - !m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - // Now that we have a wholly interesting match (albeit possibly - // empty!), we may as well suck up the matching junk on each - // side of it too. Can't think of a good reason not to, and it - // saves post-processing the (possibly considerable) expense of - // figuring out what to do with it. In the case of an empty - // interesting match, this is clearly the right thing to do, - // because no other kind of match is possible in the regions. - for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - return Match{A: besti, B: bestj, Size: bestsize} -} - -// Return list of triples describing matching subsequences. -// -// Each triple is of the form (i, j, n), and means that -// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are -// adjacent triples in the list, and the second is not the last triple in the -// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe -// adjacent equal blocks. -// -// The last triple is a dummy, (len(a), len(b), 0), and is the only -// triple with n==0. -func (m *SequenceMatcher) GetMatchingBlocks() []Match { - if m.matchingBlocks != nil { - return m.matchingBlocks - } - - var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match - matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { - match := m.findLongestMatch(alo, ahi, blo, bhi) - i, j, k := match.A, match.B, match.Size - if match.Size > 0 { - if alo < i && blo < j { - matched = matchBlocks(alo, i, blo, j, matched) - } - matched = append(matched, match) - if i+k < ahi && j+k < bhi { - matched = matchBlocks(i+k, ahi, j+k, bhi, matched) - } - } - return matched - } - matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) - - // It's possible that we have adjacent equal blocks in the - // matching_blocks list now. - nonAdjacent := []Match{} - i1, j1, k1 := 0, 0, 0 - for _, b := range matched { - // Is this block adjacent to i1, j1, k1? - i2, j2, k2 := b.A, b.B, b.Size - if i1+k1 == i2 && j1+k1 == j2 { - // Yes, so collapse them -- this just increases the length of - // the first block by the length of the second, and the first - // block so lengthened remains the block to compare against. - k1 += k2 - } else { - // Not adjacent. Remember the first block (k1==0 means it's - // the dummy we started with), and make the second block the - // new block to compare against. - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - i1, j1, k1 = i2, j2, k2 - } - } - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - - nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) - m.matchingBlocks = nonAdjacent - return m.matchingBlocks -} - -// Return list of 5-tuples describing how to turn a into b. -// -// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -// tuple preceding it, and likewise for j1 == the previous j2. -// -// The tags are characters, with these meanings: -// -// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] -// -// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. -// -// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. -// -// 'e' (equal): a[i1:i2] == b[j1:j2] -func (m *SequenceMatcher) GetOpCodes() []OpCode { - if m.opCodes != nil { - return m.opCodes - } - i, j := 0, 0 - matching := m.GetMatchingBlocks() - opCodes := make([]OpCode, 0, len(matching)) - for _, m := range matching { - // invariant: we've pumped out correct diffs to change - // a[:i] into b[:j], and the next matching block is - // a[ai:ai+size] == b[bj:bj+size]. So we need to pump - // out a diff to change a[i:ai] into b[j:bj], pump out - // the matching block, and move (i,j) beyond the match - ai, bj, size := m.A, m.B, m.Size - tag := byte(0) - if i < ai && j < bj { - tag = 'r' - } else if i < ai { - tag = 'd' - } else if j < bj { - tag = 'i' - } - if tag > 0 { - opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) - } - i, j = ai+size, bj+size - // the list of matching blocks is terminated by a - // sentinel with size 0 - if size > 0 { - opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) - } - } - m.opCodes = opCodes - return m.opCodes -} - -// Isolate change clusters by eliminating ranges with no changes. -// -// Return a generator of groups with up to n lines of context. -// Each group is in the same format as returned by GetOpCodes(). -func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { - if n < 0 { - n = 3 - } - codes := m.GetOpCodes() - if len(codes) == 0 { - codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} - } - // Fixup leading and trailing groups if they show no changes. - if codes[0].Tag == 'e' { - c := codes[0] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} - } - if codes[len(codes)-1].Tag == 'e' { - c := codes[len(codes)-1] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} - } - nn := n + n - groups := [][]OpCode{} - group := []OpCode{} - for _, c := range codes { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - // End the current group and start a new one whenever - // there is a large range with no changes. - if c.Tag == 'e' && i2-i1 > nn { - group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), - j1, min(j2, j1+n)}) - groups = append(groups, group) - group = []OpCode{} - i1, j1 = max(i1, i2-n), max(j1, j2-n) - } - group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) - } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { - groups = append(groups, group) - } - return groups -} - -// Return a measure of the sequences' similarity (float in [0,1]). -// -// Where T is the total number of elements in both sequences, and -// M is the number of matches, this is 2.0*M / T. -// Note that this is 1 if the sequences are identical, and 0 if -// they have nothing in common. -// -// .Ratio() is expensive to compute if you haven't already computed -// .GetMatchingBlocks() or .GetOpCodes(), in which case you may -// want to try .QuickRatio() or .RealQuickRation() first to get an -// upper bound. -func (m *SequenceMatcher) Ratio() float64 { - matches := 0 - for _, m := range m.GetMatchingBlocks() { - matches += m.Size - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() relatively quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute. -func (m *SequenceMatcher) QuickRatio() float64 { - // viewing a and b as multisets, set matches to the cardinality - // of their intersection; this counts the number of matches - // without regard to order, so is clearly an upper bound - if m.fullBCount == nil { - m.fullBCount = map[string]int{} - for _, s := range m.b { - m.fullBCount[s] = m.fullBCount[s] + 1 - } - } - - // avail[x] is the number of times x appears in 'b' less the - // number of times we've seen it in 'a' so far ... kinda - avail := map[string]int{} - matches := 0 - for _, s := range m.a { - n, ok := avail[s] - if !ok { - n = m.fullBCount[s] - } - avail[s] = n - 1 - if n > 0 { - matches += 1 - } - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() very quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute than either .Ratio() or .QuickRatio(). -func (m *SequenceMatcher) RealQuickRatio() float64 { - la, lb := len(m.a), len(m.b) - return calculateRatio(min(la, lb), la+lb) -} - -// Convert range to the "ed" format -func formatRangeUnified(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 1 { - return fmt.Sprintf("%d", beginning) - } - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - return fmt.Sprintf("%d,%d", beginning, length) -} - -// Unified diff parameters -type UnifiedDiff struct { - A []string // First sequence lines - FromFile string // First file name - FromDate string // First file time - B []string // Second sequence lines - ToFile string // Second file name - ToDate string // Second file time - Eol string // Headers end of line, defaults to LF - Context int // Number of context lines -} - -// Compare two sequences of lines; generate the delta as a unified diff. -// -// Unified diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by 'n' which -// defaults to three. -// -// By default, the diff control lines (those with ---, +++, or @@) are -// created with a trailing newline. This is helpful so that inputs -// created from file.readlines() result in diffs that are suitable for -// file.writelines() since both the inputs and outputs have trailing -// newlines. -// -// For inputs that do not have trailing newlines, set the lineterm -// argument to "" so that the output will be uniformly newline free. -// -// The unidiff format normally has a header for filenames and modification -// times. Any or all of these may be specified using strings for -// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -// The modification times are normally expressed in the ISO 8601 format. -func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - w := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - return err - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - err := w("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = w("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err - } - } - first, last := g[0], g[len(g)-1] - range1 := formatRangeUnified(first.I1, last.I2) - range2 := formatRangeUnified(first.J1, last.J2) - if err := w("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { - return err - } - for _, c := range g { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - if c.Tag == 'e' { - for _, line := range diff.A[i1:i2] { - if err := w(" " + line); err != nil { - return err - } - } - continue - } - if c.Tag == 'r' || c.Tag == 'd' { - for _, line := range diff.A[i1:i2] { - if err := w("-" + line); err != nil { - return err - } - } - } - if c.Tag == 'r' || c.Tag == 'i' { - for _, line := range diff.B[j1:j2] { - if err := w("+" + line); err != nil { - return err - } - } - } - } - } - return nil -} - -// Like WriteUnifiedDiff but returns the diff a string. -func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteUnifiedDiff(w, diff) - return string(w.Bytes()), err -} - -// Convert range to the "ed" format. -func formatRangeContext(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - if length <= 1 { - return fmt.Sprintf("%d", beginning) - } - return fmt.Sprintf("%d,%d", beginning, beginning+length-1) -} - -type ContextDiff UnifiedDiff - -// Compare two sequences of lines; generate the delta as a context diff. -// -// Context diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by diff.Context -// which defaults to three. -// -// By default, the diff control lines (those with *** or ---) are -// created with a trailing newline. -// -// For inputs that do not have trailing newlines, set the diff.Eol -// argument to "" so that the output will be uniformly newline free. -// -// The context diff format normally has a header for filenames and -// modification times. Any or all of these may be specified using -// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. -// The modification times are normally expressed in the ISO 8601 format. -// If not specified, the strings default to blanks. -func WriteContextDiff(writer io.Writer, diff ContextDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - var diffErr error - w := func(format string, args ...interface{}) { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - if diffErr == nil && err != nil { - diffErr = err - } - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - prefix := map[byte]string{ - 'i': "+ ", - 'd': "- ", - 'r': "! ", - 'e': " ", - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - w("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) - w("--- %s%s%s", diff.ToFile, toDate, diff.Eol) - } - - first, last := g[0], g[len(g)-1] - w("***************" + diff.Eol) - - range1 := formatRangeContext(first.I1, last.I2) - w("*** %s ****%s", range1, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'd' { - for _, cc := range g { - if cc.Tag == 'i' { - continue - } - for _, line := range diff.A[cc.I1:cc.I2] { - w(prefix[cc.Tag] + line) - } - } - break - } - } - - range2 := formatRangeContext(first.J1, last.J2) - w("--- %s ----%s", range2, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'i' { - for _, cc := range g { - if cc.Tag == 'd' { - continue - } - for _, line := range diff.B[cc.J1:cc.J2] { - w(prefix[cc.Tag] + line) - } - } - break - } - } - } - return diffErr -} - -// Like WriteContextDiff but returns the diff a string. -func GetContextDiffString(diff ContextDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteContextDiff(w, diff) - return string(w.Bytes()), err -} - -// Split a string on "\n" while preserving them. The output can be used -// as input for UnifiedDiff and ContextDiff structures. -func SplitLines(s string) []string { - lines := strings.SplitAfter(s, "\n") - lines[len(lines)-1] += "\n" - return lines -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/.gitignore b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/.gitignore deleted file mode 100644 index 00268614..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md deleted file mode 100644 index 21999458..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md +++ /dev/null @@ -1,23 +0,0 @@ -objx - by Mat Ryer and Tyler Bunnell - -The MIT License (MIT) - -Copyright (c) 2014 Stretchr, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/README.md b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/README.md deleted file mode 100644 index 4aa18068..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# objx - - * Jump into the [API Documentation](http://godoc.org/github.com/stretchr/objx) diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/accessors.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/accessors.go deleted file mode 100644 index 721bcac7..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/accessors.go +++ /dev/null @@ -1,179 +0,0 @@ -package objx - -import ( - "fmt" - "regexp" - "strconv" - "strings" -) - -// arrayAccesRegexString is the regex used to extract the array number -// from the access path -const arrayAccesRegexString = `^(.+)\[([0-9]+)\]$` - -// arrayAccesRegex is the compiled arrayAccesRegexString -var arrayAccesRegex = regexp.MustCompile(arrayAccesRegexString) - -// Get gets the value using the specified selector and -// returns it inside a new Obj object. -// -// If it cannot find the value, Get will return a nil -// value inside an instance of Obj. -// -// Get can only operate directly on map[string]interface{} and []interface. -// -// Example -// -// To access the title of the third chapter of the second book, do: -// -// o.Get("books[1].chapters[2].title") -func (m Map) Get(selector string) *Value { - rawObj := access(m, selector, nil, false, false) - return &Value{data: rawObj} -} - -// Set sets the value using the specified selector and -// returns the object on which Set was called. -// -// Set can only operate directly on map[string]interface{} and []interface -// -// Example -// -// To set the title of the third chapter of the second book, do: -// -// o.Set("books[1].chapters[2].title","Time to Go") -func (m Map) Set(selector string, value interface{}) Map { - access(m, selector, value, true, false) - return m -} - -// access accesses the object using the selector and performs the -// appropriate action. -func access(current, selector, value interface{}, isSet, panics bool) interface{} { - - switch selector.(type) { - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: - - if array, ok := current.([]interface{}); ok { - index := intFromInterface(selector) - - if index >= len(array) { - if panics { - panic(fmt.Sprintf("objx: Index %d is out of range. Slice only contains %d items.", index, len(array))) - } - return nil - } - - return array[index] - } - - return nil - - case string: - - selStr := selector.(string) - selSegs := strings.SplitN(selStr, PathSeparator, 2) - thisSel := selSegs[0] - index := -1 - var err error - - // https://github.com/stretchr/objx/issues/12 - if strings.Contains(thisSel, "[") { - - arrayMatches := arrayAccesRegex.FindStringSubmatch(thisSel) - - if len(arrayMatches) > 0 { - - // Get the key into the map - thisSel = arrayMatches[1] - - // Get the index into the array at the key - index, err = strconv.Atoi(arrayMatches[2]) - - if err != nil { - // This should never happen. If it does, something has gone - // seriously wrong. Panic. - panic("objx: Array index is not an integer. Must use array[int].") - } - - } - } - - if curMap, ok := current.(Map); ok { - current = map[string]interface{}(curMap) - } - - // get the object in question - switch current.(type) { - case map[string]interface{}: - curMSI := current.(map[string]interface{}) - if len(selSegs) <= 1 && isSet { - curMSI[thisSel] = value - return nil - } else { - current = curMSI[thisSel] - } - default: - current = nil - } - - if current == nil && panics { - panic(fmt.Sprintf("objx: '%v' invalid on object.", selector)) - } - - // do we need to access the item of an array? - if index > -1 { - if array, ok := current.([]interface{}); ok { - if index < len(array) { - current = array[index] - } else { - if panics { - panic(fmt.Sprintf("objx: Index %d is out of range. Slice only contains %d items.", index, len(array))) - } - current = nil - } - } - } - - if len(selSegs) > 1 { - current = access(current, selSegs[1], value, isSet, panics) - } - - } - - return current - -} - -// intFromInterface converts an interface object to the largest -// representation of an unsigned integer using a type switch and -// assertions -func intFromInterface(selector interface{}) int { - var value int - switch selector.(type) { - case int: - value = selector.(int) - case int8: - value = int(selector.(int8)) - case int16: - value = int(selector.(int16)) - case int32: - value = int(selector.(int32)) - case int64: - value = int(selector.(int64)) - case uint: - value = int(selector.(uint)) - case uint8: - value = int(selector.(uint8)) - case uint16: - value = int(selector.(uint16)) - case uint32: - value = int(selector.(uint32)) - case uint64: - value = int(selector.(uint64)) - default: - panic("objx: array access argument is not an integer type (this should never happen)") - } - - return value -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/array-access.txt b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/array-access.txt deleted file mode 100644 index 30602347..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/array-access.txt +++ /dev/null @@ -1,14 +0,0 @@ - case []{1}: - a := object.([]{1}) - if isSet { - a[index] = value.({1}) - } else { - if index >= len(a) { - if panics { - panic(fmt.Sprintf("objx: Index %d is out of range because the []{1} only contains %d items.", index, len(a))) - } - return nil - } else { - return a[index] - } - } diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/index.html b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/index.html deleted file mode 100644 index 379ffc3c..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/index.html +++ /dev/null @@ -1,86 +0,0 @@ - - - - Codegen - - - - - -

- Template -

-

- Use {x} as a placeholder for each argument. -

- - -

- Arguments (comma separated) -

-

- One block per line -

- - -

- Output -

- - - - - - - - diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/template.txt b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/template.txt deleted file mode 100644 index b396900b..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/template.txt +++ /dev/null @@ -1,286 +0,0 @@ -/* - {4} ({1} and []{1}) - -------------------------------------------------- -*/ - -// {4} gets the value as a {1}, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) {4}(optionalDefault ...{1}) {1} { - if s, ok := v.data.({1}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return {3} -} - -// Must{4} gets the value as a {1}. -// -// Panics if the object is not a {1}. -func (v *Value) Must{4}() {1} { - return v.data.({1}) -} - -// {4}Slice gets the value as a []{1}, returns the optionalDefault -// value or nil if the value is not a []{1}. -func (v *Value) {4}Slice(optionalDefault ...[]{1}) []{1} { - if s, ok := v.data.([]{1}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// Must{4}Slice gets the value as a []{1}. -// -// Panics if the object is not a []{1}. -func (v *Value) Must{4}Slice() []{1} { - return v.data.([]{1}) -} - -// Is{4} gets whether the object contained is a {1} or not. -func (v *Value) Is{4}() bool { - _, ok := v.data.({1}) - return ok -} - -// Is{4}Slice gets whether the object contained is a []{1} or not. -func (v *Value) Is{4}Slice() bool { - _, ok := v.data.([]{1}) - return ok -} - -// Each{4} calls the specified callback for each object -// in the []{1}. -// -// Panics if the object is the wrong type. -func (v *Value) Each{4}(callback func(int, {1}) bool) *Value { - - for index, val := range v.Must{4}Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// Where{4} uses the specified decider function to select items -// from the []{1}. The object contained in the result will contain -// only the selected items. -func (v *Value) Where{4}(decider func(int, {1}) bool) *Value { - - var selected []{1} - - v.Each{4}(func(index int, val {1}) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data:selected} - -} - -// Group{4} uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]{1}. -func (v *Value) Group{4}(grouper func(int, {1}) string) *Value { - - groups := make(map[string][]{1}) - - v.Each{4}(func(index int, val {1}) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]{1}, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data:groups} - -} - -// Replace{4} uses the specified function to replace each {1}s -// by iterating each item. The data in the returned result will be a -// []{1} containing the replaced items. -func (v *Value) Replace{4}(replacer func(int, {1}) {1}) *Value { - - arr := v.Must{4}Slice() - replaced := make([]{1}, len(arr)) - - v.Each{4}(func(index int, val {1}) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data:replaced} - -} - -// Collect{4} uses the specified collector function to collect a value -// for each of the {1}s in the slice. The data returned will be a -// []interface{}. -func (v *Value) Collect{4}(collector func(int, {1}) interface{}) *Value { - - arr := v.Must{4}Slice() - collected := make([]interface{}, len(arr)) - - v.Each{4}(func(index int, val {1}) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data:collected} -} - -// ************************************************************ -// TESTS -// ************************************************************ - -func Test{4}(t *testing.T) { - - val := {1}( {2} ) - m := map[string]interface{}{"value": val, "nothing": nil} - assert.Equal(t, val, New(m).Get("value").{4}()) - assert.Equal(t, val, New(m).Get("value").Must{4}()) - assert.Equal(t, {1}({3}), New(m).Get("nothing").{4}()) - assert.Equal(t, val, New(m).Get("nothing").{4}({2})) - - assert.Panics(t, func() { - New(m).Get("age").Must{4}() - }) - -} - -func Test{4}Slice(t *testing.T) { - - val := {1}( {2} ) - m := map[string]interface{}{"value": []{1}{ val }, "nothing": nil} - assert.Equal(t, val, New(m).Get("value").{4}Slice()[0]) - assert.Equal(t, val, New(m).Get("value").Must{4}Slice()[0]) - assert.Equal(t, []{1}(nil), New(m).Get("nothing").{4}Slice()) - assert.Equal(t, val, New(m).Get("nothing").{4}Slice( []{1}{ {1}({2}) } )[0]) - - assert.Panics(t, func() { - New(m).Get("nothing").Must{4}Slice() - }) - -} - -func TestIs{4}(t *testing.T) { - - var v *Value - - v = &Value{data: {1}({2})} - assert.True(t, v.Is{4}()) - - v = &Value{data: []{1}{ {1}({2}) }} - assert.True(t, v.Is{4}Slice()) - -} - -func TestEach{4}(t *testing.T) { - - v := &Value{data: []{1}{ {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}) }} - count := 0 - replacedVals := make([]{1}, 0) - assert.Equal(t, v, v.Each{4}(func(i int, val {1}) bool { - - count++ - replacedVals = append(replacedVals, val) - - // abort early - if i == 2 { - return false - } - - return true - - })) - - assert.Equal(t, count, 3) - assert.Equal(t, replacedVals[0], v.Must{4}Slice()[0]) - assert.Equal(t, replacedVals[1], v.Must{4}Slice()[1]) - assert.Equal(t, replacedVals[2], v.Must{4}Slice()[2]) - -} - -func TestWhere{4}(t *testing.T) { - - v := &Value{data: []{1}{ {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}) }} - - selected := v.Where{4}(func(i int, val {1}) bool { - return i%2==0 - }).Must{4}Slice() - - assert.Equal(t, 3, len(selected)) - -} - -func TestGroup{4}(t *testing.T) { - - v := &Value{data: []{1}{ {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}) }} - - grouped := v.Group{4}(func(i int, val {1}) string { - return fmt.Sprintf("%v", i%2==0) - }).data.(map[string][]{1}) - - assert.Equal(t, 2, len(grouped)) - assert.Equal(t, 3, len(grouped["true"])) - assert.Equal(t, 3, len(grouped["false"])) - -} - -func TestReplace{4}(t *testing.T) { - - v := &Value{data: []{1}{ {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}) }} - - rawArr := v.Must{4}Slice() - - replaced := v.Replace{4}(func(index int, val {1}) {1} { - if index < len(rawArr)-1 { - return rawArr[index+1] - } - return rawArr[0] - }) - - replacedArr := replaced.Must{4}Slice() - if assert.Equal(t, 6, len(replacedArr)) { - assert.Equal(t, replacedArr[0], rawArr[1]) - assert.Equal(t, replacedArr[1], rawArr[2]) - assert.Equal(t, replacedArr[2], rawArr[3]) - assert.Equal(t, replacedArr[3], rawArr[4]) - assert.Equal(t, replacedArr[4], rawArr[5]) - assert.Equal(t, replacedArr[5], rawArr[0]) - } - -} - -func TestCollect{4}(t *testing.T) { - - v := &Value{data: []{1}{ {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}), {1}({2}) }} - - collected := v.Collect{4}(func(index int, val {1}) interface{} { - return index - }) - - collectedArr := collected.MustInterSlice() - if assert.Equal(t, 6, len(collectedArr)) { - assert.Equal(t, collectedArr[0], 0) - assert.Equal(t, collectedArr[1], 1) - assert.Equal(t, collectedArr[2], 2) - assert.Equal(t, collectedArr[3], 3) - assert.Equal(t, collectedArr[4], 4) - assert.Equal(t, collectedArr[5], 5) - } - -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/types_list.txt b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/types_list.txt deleted file mode 100644 index 069d43d8..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/codegen/types_list.txt +++ /dev/null @@ -1,20 +0,0 @@ -Interface,interface{},"something",nil,Inter -Map,map[string]interface{},map[string]interface{}{"name":"Tyler"},nil,MSI -ObjxMap,(Map),New(1),New(nil),ObjxMap -Bool,bool,true,false,Bool -String,string,"hello","",Str -Int,int,1,0,Int -Int8,int8,1,0,Int8 -Int16,int16,1,0,Int16 -Int32,int32,1,0,Int32 -Int64,int64,1,0,Int64 -Uint,uint,1,0,Uint -Uint8,uint8,1,0,Uint8 -Uint16,uint16,1,0,Uint16 -Uint32,uint32,1,0,Uint32 -Uint64,uint64,1,0,Uint64 -Uintptr,uintptr,1,0,Uintptr -Float32,float32,1,0,Float32 -Float64,float64,1,0,Float64 -Complex64,complex64,1,0,Complex64 -Complex128,complex128,1,0,Complex128 diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/constants.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/constants.go deleted file mode 100644 index f9eb42a2..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/constants.go +++ /dev/null @@ -1,13 +0,0 @@ -package objx - -const ( - // PathSeparator is the character used to separate the elements - // of the keypath. - // - // For example, `location.address.city` - PathSeparator string = "." - - // SignatureSeparator is the character that is used to - // separate the Base64 string from the security signature. - SignatureSeparator = "_" -) diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/conversions.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/conversions.go deleted file mode 100644 index 9cdfa9f9..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/conversions.go +++ /dev/null @@ -1,117 +0,0 @@ -package objx - -import ( - "bytes" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "net/url" -) - -// JSON converts the contained object to a JSON string -// representation -func (m Map) JSON() (string, error) { - - result, err := json.Marshal(m) - - if err != nil { - err = errors.New("objx: JSON encode failed with: " + err.Error()) - } - - return string(result), err - -} - -// MustJSON converts the contained object to a JSON string -// representation and panics if there is an error -func (m Map) MustJSON() string { - result, err := m.JSON() - if err != nil { - panic(err.Error()) - } - return result -} - -// Base64 converts the contained object to a Base64 string -// representation of the JSON string representation -func (m Map) Base64() (string, error) { - - var buf bytes.Buffer - - jsonData, err := m.JSON() - if err != nil { - return "", err - } - - encoder := base64.NewEncoder(base64.StdEncoding, &buf) - encoder.Write([]byte(jsonData)) - encoder.Close() - - return buf.String(), nil - -} - -// MustBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and panics -// if there is an error -func (m Map) MustBase64() string { - result, err := m.Base64() - if err != nil { - panic(err.Error()) - } - return result -} - -// SignedBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and signs it -// using the provided key. -func (m Map) SignedBase64(key string) (string, error) { - - base64, err := m.Base64() - if err != nil { - return "", err - } - - sig := HashWithKey(base64, key) - - return base64 + SignatureSeparator + sig, nil - -} - -// MustSignedBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and signs it -// using the provided key and panics if there is an error -func (m Map) MustSignedBase64(key string) string { - result, err := m.SignedBase64(key) - if err != nil { - panic(err.Error()) - } - return result -} - -/* - URL Query - ------------------------------------------------ -*/ - -// URLValues creates a url.Values object from an Obj. This -// function requires that the wrapped object be a map[string]interface{} -func (m Map) URLValues() url.Values { - - vals := make(url.Values) - - for k, v := range m { - //TODO: can this be done without sprintf? - vals.Set(k, fmt.Sprintf("%v", v)) - } - - return vals -} - -// URLQuery gets an encoded URL query representing the given -// Obj. This function requires that the wrapped object be a -// map[string]interface{} -func (m Map) URLQuery() (string, error) { - return m.URLValues().Encode(), nil -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/doc.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/doc.go deleted file mode 100644 index 47bf85e4..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/doc.go +++ /dev/null @@ -1,72 +0,0 @@ -// objx - Go package for dealing with maps, slices, JSON and other data. -// -// Overview -// -// Objx provides the `objx.Map` type, which is a `map[string]interface{}` that exposes -// a powerful `Get` method (among others) that allows you to easily and quickly get -// access to data within the map, without having to worry too much about type assertions, -// missing data, default values etc. -// -// Pattern -// -// Objx uses a preditable pattern to make access data from within `map[string]interface{}'s -// easy. -// -// Call one of the `objx.` functions to create your `objx.Map` to get going: -// -// m, err := objx.FromJSON(json) -// -// NOTE: Any methods or functions with the `Must` prefix will panic if something goes wrong, -// the rest will be optimistic and try to figure things out without panicking. -// -// Use `Get` to access the value you're interested in. You can use dot and array -// notation too: -// -// m.Get("places[0].latlng") -// -// Once you have saught the `Value` you're interested in, you can use the `Is*` methods -// to determine its type. -// -// if m.Get("code").IsStr() { /* ... */ } -// -// Or you can just assume the type, and use one of the strong type methods to -// extract the real value: -// -// m.Get("code").Int() -// -// If there's no value there (or if it's the wrong type) then a default value -// will be returned, or you can be explicit about the default value. -// -// Get("code").Int(-1) -// -// If you're dealing with a slice of data as a value, Objx provides many useful -// methods for iterating, manipulating and selecting that data. You can find out more -// by exploring the index below. -// -// Reading data -// -// A simple example of how to use Objx: -// -// // use MustFromJSON to make an objx.Map from some JSON -// m := objx.MustFromJSON(`{"name": "Mat", "age": 30}`) -// -// // get the details -// name := m.Get("name").Str() -// age := m.Get("age").Int() -// -// // get their nickname (or use their name if they -// // don't have one) -// nickname := m.Get("nickname").Str(name) -// -// Ranging -// -// Since `objx.Map` is a `map[string]interface{}` you can treat it as such. For -// example, to `range` the data, do what you would expect: -// -// m := objx.MustFromJSON(json) -// for key, value := range m { -// -// /* ... do your magic ... */ -// -// } -package objx diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/map.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/map.go deleted file mode 100644 index eb6ed8e2..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/map.go +++ /dev/null @@ -1,222 +0,0 @@ -package objx - -import ( - "encoding/base64" - "encoding/json" - "errors" - "io/ioutil" - "net/url" - "strings" -) - -// MSIConvertable is an interface that defines methods for converting your -// custom types to a map[string]interface{} representation. -type MSIConvertable interface { - // MSI gets a map[string]interface{} (msi) representing the - // object. - MSI() map[string]interface{} -} - -// Map provides extended functionality for working with -// untyped data, in particular map[string]interface (msi). -type Map map[string]interface{} - -// Value returns the internal value instance -func (m Map) Value() *Value { - return &Value{data: m} -} - -// Nil represents a nil Map. -var Nil Map = New(nil) - -// New creates a new Map containing the map[string]interface{} in the data argument. -// If the data argument is not a map[string]interface, New attempts to call the -// MSI() method on the MSIConvertable interface to create one. -func New(data interface{}) Map { - if _, ok := data.(map[string]interface{}); !ok { - if converter, ok := data.(MSIConvertable); ok { - data = converter.MSI() - } else { - return nil - } - } - return Map(data.(map[string]interface{})) -} - -// MSI creates a map[string]interface{} and puts it inside a new Map. -// -// The arguments follow a key, value pattern. -// -// Panics -// -// Panics if any key arugment is non-string or if there are an odd number of arguments. -// -// Example -// -// To easily create Maps: -// -// m := objx.MSI("name", "Mat", "age", 29, "subobj", objx.MSI("active", true)) -// -// // creates an Map equivalent to -// m := objx.New(map[string]interface{}{"name": "Mat", "age": 29, "subobj": map[string]interface{}{"active": true}}) -func MSI(keyAndValuePairs ...interface{}) Map { - - newMap := make(map[string]interface{}) - keyAndValuePairsLen := len(keyAndValuePairs) - - if keyAndValuePairsLen%2 != 0 { - panic("objx: MSI must have an even number of arguments following the 'key, value' pattern.") - } - - for i := 0; i < keyAndValuePairsLen; i = i + 2 { - - key := keyAndValuePairs[i] - value := keyAndValuePairs[i+1] - - // make sure the key is a string - keyString, keyStringOK := key.(string) - if !keyStringOK { - panic("objx: MSI must follow 'string, interface{}' pattern. " + keyString + " is not a valid key.") - } - - newMap[keyString] = value - - } - - return New(newMap) -} - -// ****** Conversion Constructors - -// MustFromJSON creates a new Map containing the data specified in the -// jsonString. -// -// Panics if the JSON is invalid. -func MustFromJSON(jsonString string) Map { - o, err := FromJSON(jsonString) - - if err != nil { - panic("objx: MustFromJSON failed with error: " + err.Error()) - } - - return o -} - -// FromJSON creates a new Map containing the data specified in the -// jsonString. -// -// Returns an error if the JSON is invalid. -func FromJSON(jsonString string) (Map, error) { - - var data interface{} - err := json.Unmarshal([]byte(jsonString), &data) - - if err != nil { - return Nil, err - } - - return New(data), nil - -} - -// FromBase64 creates a new Obj containing the data specified -// in the Base64 string. -// -// The string is an encoded JSON string returned by Base64 -func FromBase64(base64String string) (Map, error) { - - decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64String)) - - decoded, err := ioutil.ReadAll(decoder) - if err != nil { - return nil, err - } - - return FromJSON(string(decoded)) -} - -// MustFromBase64 creates a new Obj containing the data specified -// in the Base64 string and panics if there is an error. -// -// The string is an encoded JSON string returned by Base64 -func MustFromBase64(base64String string) Map { - - result, err := FromBase64(base64String) - - if err != nil { - panic("objx: MustFromBase64 failed with error: " + err.Error()) - } - - return result -} - -// FromSignedBase64 creates a new Obj containing the data specified -// in the Base64 string. -// -// The string is an encoded JSON string returned by SignedBase64 -func FromSignedBase64(base64String, key string) (Map, error) { - parts := strings.Split(base64String, SignatureSeparator) - if len(parts) != 2 { - return nil, errors.New("objx: Signed base64 string is malformed.") - } - - sig := HashWithKey(parts[0], key) - if parts[1] != sig { - return nil, errors.New("objx: Signature for base64 data does not match.") - } - - return FromBase64(parts[0]) -} - -// MustFromSignedBase64 creates a new Obj containing the data specified -// in the Base64 string and panics if there is an error. -// -// The string is an encoded JSON string returned by Base64 -func MustFromSignedBase64(base64String, key string) Map { - - result, err := FromSignedBase64(base64String, key) - - if err != nil { - panic("objx: MustFromSignedBase64 failed with error: " + err.Error()) - } - - return result -} - -// FromURLQuery generates a new Obj by parsing the specified -// query. -// -// For queries with multiple values, the first value is selected. -func FromURLQuery(query string) (Map, error) { - - vals, err := url.ParseQuery(query) - - if err != nil { - return nil, err - } - - m := make(map[string]interface{}) - for k, vals := range vals { - m[k] = vals[0] - } - - return New(m), nil -} - -// MustFromURLQuery generates a new Obj by parsing the specified -// query. -// -// For queries with multiple values, the first value is selected. -// -// Panics if it encounters an error -func MustFromURLQuery(query string) Map { - - o, err := FromURLQuery(query) - - if err != nil { - panic("objx: MustFromURLQuery failed with error: " + err.Error()) - } - - return o - -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/mutations.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/mutations.go deleted file mode 100644 index b35c8639..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/mutations.go +++ /dev/null @@ -1,81 +0,0 @@ -package objx - -// Exclude returns a new Map with the keys in the specified []string -// excluded. -func (d Map) Exclude(exclude []string) Map { - - excluded := make(Map) - for k, v := range d { - var shouldInclude bool = true - for _, toExclude := range exclude { - if k == toExclude { - shouldInclude = false - break - } - } - if shouldInclude { - excluded[k] = v - } - } - - return excluded -} - -// Copy creates a shallow copy of the Obj. -func (m Map) Copy() Map { - copied := make(map[string]interface{}) - for k, v := range m { - copied[k] = v - } - return New(copied) -} - -// Merge blends the specified map with a copy of this map and returns the result. -// -// Keys that appear in both will be selected from the specified map. -// This method requires that the wrapped object be a map[string]interface{} -func (m Map) Merge(merge Map) Map { - return m.Copy().MergeHere(merge) -} - -// Merge blends the specified map with this map and returns the current map. -// -// Keys that appear in both will be selected from the specified map. The original map -// will be modified. This method requires that -// the wrapped object be a map[string]interface{} -func (m Map) MergeHere(merge Map) Map { - - for k, v := range merge { - m[k] = v - } - - return m - -} - -// Transform builds a new Obj giving the transformer a chance -// to change the keys and values as it goes. This method requires that -// the wrapped object be a map[string]interface{} -func (m Map) Transform(transformer func(key string, value interface{}) (string, interface{})) Map { - newMap := make(map[string]interface{}) - for k, v := range m { - modifiedKey, modifiedVal := transformer(k, v) - newMap[modifiedKey] = modifiedVal - } - return New(newMap) -} - -// TransformKeys builds a new map using the specified key mapping. -// -// Unspecified keys will be unaltered. -// This method requires that the wrapped object be a map[string]interface{} -func (m Map) TransformKeys(mapping map[string]string) Map { - return m.Transform(func(key string, value interface{}) (string, interface{}) { - - if newKey, ok := mapping[key]; ok { - return newKey, value - } - - return key, value - }) -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/security.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/security.go deleted file mode 100644 index fdd6be9c..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/security.go +++ /dev/null @@ -1,14 +0,0 @@ -package objx - -import ( - "crypto/sha1" - "encoding/hex" -) - -// HashWithKey hashes the specified string using the security -// key. -func HashWithKey(data, key string) string { - hash := sha1.New() - hash.Write([]byte(data + ":" + key)) - return hex.EncodeToString(hash.Sum(nil)) -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/tests.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/tests.go deleted file mode 100644 index d9e0b479..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/tests.go +++ /dev/null @@ -1,17 +0,0 @@ -package objx - -// Has gets whether there is something at the specified selector -// or not. -// -// If m is nil, Has will always return false. -func (m Map) Has(selector string) bool { - if m == nil { - return false - } - return !m.Get(selector).IsNil() -} - -// IsNil gets whether the data is nil or not. -func (v *Value) IsNil() bool { - return v == nil || v.data == nil -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/type_specific_codegen.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/type_specific_codegen.go deleted file mode 100644 index f3ecb29b..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/type_specific_codegen.go +++ /dev/null @@ -1,2881 +0,0 @@ -package objx - -/* - Inter (interface{} and []interface{}) - -------------------------------------------------- -*/ - -// Inter gets the value as a interface{}, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Inter(optionalDefault ...interface{}) interface{} { - if s, ok := v.data.(interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInter gets the value as a interface{}. -// -// Panics if the object is not a interface{}. -func (v *Value) MustInter() interface{} { - return v.data.(interface{}) -} - -// InterSlice gets the value as a []interface{}, returns the optionalDefault -// value or nil if the value is not a []interface{}. -func (v *Value) InterSlice(optionalDefault ...[]interface{}) []interface{} { - if s, ok := v.data.([]interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInterSlice gets the value as a []interface{}. -// -// Panics if the object is not a []interface{}. -func (v *Value) MustInterSlice() []interface{} { - return v.data.([]interface{}) -} - -// IsInter gets whether the object contained is a interface{} or not. -func (v *Value) IsInter() bool { - _, ok := v.data.(interface{}) - return ok -} - -// IsInterSlice gets whether the object contained is a []interface{} or not. -func (v *Value) IsInterSlice() bool { - _, ok := v.data.([]interface{}) - return ok -} - -// EachInter calls the specified callback for each object -// in the []interface{}. -// -// Panics if the object is the wrong type. -func (v *Value) EachInter(callback func(int, interface{}) bool) *Value { - - for index, val := range v.MustInterSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInter uses the specified decider function to select items -// from the []interface{}. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInter(decider func(int, interface{}) bool) *Value { - - var selected []interface{} - - v.EachInter(func(index int, val interface{}) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInter uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]interface{}. -func (v *Value) GroupInter(grouper func(int, interface{}) string) *Value { - - groups := make(map[string][]interface{}) - - v.EachInter(func(index int, val interface{}) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]interface{}, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInter uses the specified function to replace each interface{}s -// by iterating each item. The data in the returned result will be a -// []interface{} containing the replaced items. -func (v *Value) ReplaceInter(replacer func(int, interface{}) interface{}) *Value { - - arr := v.MustInterSlice() - replaced := make([]interface{}, len(arr)) - - v.EachInter(func(index int, val interface{}) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInter uses the specified collector function to collect a value -// for each of the interface{}s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInter(collector func(int, interface{}) interface{}) *Value { - - arr := v.MustInterSlice() - collected := make([]interface{}, len(arr)) - - v.EachInter(func(index int, val interface{}) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - MSI (map[string]interface{} and []map[string]interface{}) - -------------------------------------------------- -*/ - -// MSI gets the value as a map[string]interface{}, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) MSI(optionalDefault ...map[string]interface{}) map[string]interface{} { - if s, ok := v.data.(map[string]interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustMSI gets the value as a map[string]interface{}. -// -// Panics if the object is not a map[string]interface{}. -func (v *Value) MustMSI() map[string]interface{} { - return v.data.(map[string]interface{}) -} - -// MSISlice gets the value as a []map[string]interface{}, returns the optionalDefault -// value or nil if the value is not a []map[string]interface{}. -func (v *Value) MSISlice(optionalDefault ...[]map[string]interface{}) []map[string]interface{} { - if s, ok := v.data.([]map[string]interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustMSISlice gets the value as a []map[string]interface{}. -// -// Panics if the object is not a []map[string]interface{}. -func (v *Value) MustMSISlice() []map[string]interface{} { - return v.data.([]map[string]interface{}) -} - -// IsMSI gets whether the object contained is a map[string]interface{} or not. -func (v *Value) IsMSI() bool { - _, ok := v.data.(map[string]interface{}) - return ok -} - -// IsMSISlice gets whether the object contained is a []map[string]interface{} or not. -func (v *Value) IsMSISlice() bool { - _, ok := v.data.([]map[string]interface{}) - return ok -} - -// EachMSI calls the specified callback for each object -// in the []map[string]interface{}. -// -// Panics if the object is the wrong type. -func (v *Value) EachMSI(callback func(int, map[string]interface{}) bool) *Value { - - for index, val := range v.MustMSISlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereMSI uses the specified decider function to select items -// from the []map[string]interface{}. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereMSI(decider func(int, map[string]interface{}) bool) *Value { - - var selected []map[string]interface{} - - v.EachMSI(func(index int, val map[string]interface{}) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupMSI uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]map[string]interface{}. -func (v *Value) GroupMSI(grouper func(int, map[string]interface{}) string) *Value { - - groups := make(map[string][]map[string]interface{}) - - v.EachMSI(func(index int, val map[string]interface{}) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]map[string]interface{}, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceMSI uses the specified function to replace each map[string]interface{}s -// by iterating each item. The data in the returned result will be a -// []map[string]interface{} containing the replaced items. -func (v *Value) ReplaceMSI(replacer func(int, map[string]interface{}) map[string]interface{}) *Value { - - arr := v.MustMSISlice() - replaced := make([]map[string]interface{}, len(arr)) - - v.EachMSI(func(index int, val map[string]interface{}) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectMSI uses the specified collector function to collect a value -// for each of the map[string]interface{}s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectMSI(collector func(int, map[string]interface{}) interface{}) *Value { - - arr := v.MustMSISlice() - collected := make([]interface{}, len(arr)) - - v.EachMSI(func(index int, val map[string]interface{}) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - ObjxMap ((Map) and [](Map)) - -------------------------------------------------- -*/ - -// ObjxMap gets the value as a (Map), returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) ObjxMap(optionalDefault ...(Map)) Map { - if s, ok := v.data.((Map)); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return New(nil) -} - -// MustObjxMap gets the value as a (Map). -// -// Panics if the object is not a (Map). -func (v *Value) MustObjxMap() Map { - return v.data.((Map)) -} - -// ObjxMapSlice gets the value as a [](Map), returns the optionalDefault -// value or nil if the value is not a [](Map). -func (v *Value) ObjxMapSlice(optionalDefault ...[](Map)) [](Map) { - if s, ok := v.data.([](Map)); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustObjxMapSlice gets the value as a [](Map). -// -// Panics if the object is not a [](Map). -func (v *Value) MustObjxMapSlice() [](Map) { - return v.data.([](Map)) -} - -// IsObjxMap gets whether the object contained is a (Map) or not. -func (v *Value) IsObjxMap() bool { - _, ok := v.data.((Map)) - return ok -} - -// IsObjxMapSlice gets whether the object contained is a [](Map) or not. -func (v *Value) IsObjxMapSlice() bool { - _, ok := v.data.([](Map)) - return ok -} - -// EachObjxMap calls the specified callback for each object -// in the [](Map). -// -// Panics if the object is the wrong type. -func (v *Value) EachObjxMap(callback func(int, Map) bool) *Value { - - for index, val := range v.MustObjxMapSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereObjxMap uses the specified decider function to select items -// from the [](Map). The object contained in the result will contain -// only the selected items. -func (v *Value) WhereObjxMap(decider func(int, Map) bool) *Value { - - var selected [](Map) - - v.EachObjxMap(func(index int, val Map) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupObjxMap uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][](Map). -func (v *Value) GroupObjxMap(grouper func(int, Map) string) *Value { - - groups := make(map[string][](Map)) - - v.EachObjxMap(func(index int, val Map) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([](Map), 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceObjxMap uses the specified function to replace each (Map)s -// by iterating each item. The data in the returned result will be a -// [](Map) containing the replaced items. -func (v *Value) ReplaceObjxMap(replacer func(int, Map) Map) *Value { - - arr := v.MustObjxMapSlice() - replaced := make([](Map), len(arr)) - - v.EachObjxMap(func(index int, val Map) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectObjxMap uses the specified collector function to collect a value -// for each of the (Map)s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectObjxMap(collector func(int, Map) interface{}) *Value { - - arr := v.MustObjxMapSlice() - collected := make([]interface{}, len(arr)) - - v.EachObjxMap(func(index int, val Map) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Bool (bool and []bool) - -------------------------------------------------- -*/ - -// Bool gets the value as a bool, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Bool(optionalDefault ...bool) bool { - if s, ok := v.data.(bool); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return false -} - -// MustBool gets the value as a bool. -// -// Panics if the object is not a bool. -func (v *Value) MustBool() bool { - return v.data.(bool) -} - -// BoolSlice gets the value as a []bool, returns the optionalDefault -// value or nil if the value is not a []bool. -func (v *Value) BoolSlice(optionalDefault ...[]bool) []bool { - if s, ok := v.data.([]bool); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustBoolSlice gets the value as a []bool. -// -// Panics if the object is not a []bool. -func (v *Value) MustBoolSlice() []bool { - return v.data.([]bool) -} - -// IsBool gets whether the object contained is a bool or not. -func (v *Value) IsBool() bool { - _, ok := v.data.(bool) - return ok -} - -// IsBoolSlice gets whether the object contained is a []bool or not. -func (v *Value) IsBoolSlice() bool { - _, ok := v.data.([]bool) - return ok -} - -// EachBool calls the specified callback for each object -// in the []bool. -// -// Panics if the object is the wrong type. -func (v *Value) EachBool(callback func(int, bool) bool) *Value { - - for index, val := range v.MustBoolSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereBool uses the specified decider function to select items -// from the []bool. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereBool(decider func(int, bool) bool) *Value { - - var selected []bool - - v.EachBool(func(index int, val bool) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupBool uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]bool. -func (v *Value) GroupBool(grouper func(int, bool) string) *Value { - - groups := make(map[string][]bool) - - v.EachBool(func(index int, val bool) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]bool, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceBool uses the specified function to replace each bools -// by iterating each item. The data in the returned result will be a -// []bool containing the replaced items. -func (v *Value) ReplaceBool(replacer func(int, bool) bool) *Value { - - arr := v.MustBoolSlice() - replaced := make([]bool, len(arr)) - - v.EachBool(func(index int, val bool) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectBool uses the specified collector function to collect a value -// for each of the bools in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectBool(collector func(int, bool) interface{}) *Value { - - arr := v.MustBoolSlice() - collected := make([]interface{}, len(arr)) - - v.EachBool(func(index int, val bool) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Str (string and []string) - -------------------------------------------------- -*/ - -// Str gets the value as a string, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Str(optionalDefault ...string) string { - if s, ok := v.data.(string); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return "" -} - -// MustStr gets the value as a string. -// -// Panics if the object is not a string. -func (v *Value) MustStr() string { - return v.data.(string) -} - -// StrSlice gets the value as a []string, returns the optionalDefault -// value or nil if the value is not a []string. -func (v *Value) StrSlice(optionalDefault ...[]string) []string { - if s, ok := v.data.([]string); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustStrSlice gets the value as a []string. -// -// Panics if the object is not a []string. -func (v *Value) MustStrSlice() []string { - return v.data.([]string) -} - -// IsStr gets whether the object contained is a string or not. -func (v *Value) IsStr() bool { - _, ok := v.data.(string) - return ok -} - -// IsStrSlice gets whether the object contained is a []string or not. -func (v *Value) IsStrSlice() bool { - _, ok := v.data.([]string) - return ok -} - -// EachStr calls the specified callback for each object -// in the []string. -// -// Panics if the object is the wrong type. -func (v *Value) EachStr(callback func(int, string) bool) *Value { - - for index, val := range v.MustStrSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereStr uses the specified decider function to select items -// from the []string. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereStr(decider func(int, string) bool) *Value { - - var selected []string - - v.EachStr(func(index int, val string) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupStr uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]string. -func (v *Value) GroupStr(grouper func(int, string) string) *Value { - - groups := make(map[string][]string) - - v.EachStr(func(index int, val string) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]string, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceStr uses the specified function to replace each strings -// by iterating each item. The data in the returned result will be a -// []string containing the replaced items. -func (v *Value) ReplaceStr(replacer func(int, string) string) *Value { - - arr := v.MustStrSlice() - replaced := make([]string, len(arr)) - - v.EachStr(func(index int, val string) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectStr uses the specified collector function to collect a value -// for each of the strings in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectStr(collector func(int, string) interface{}) *Value { - - arr := v.MustStrSlice() - collected := make([]interface{}, len(arr)) - - v.EachStr(func(index int, val string) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Int (int and []int) - -------------------------------------------------- -*/ - -// Int gets the value as a int, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int(optionalDefault ...int) int { - if s, ok := v.data.(int); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt gets the value as a int. -// -// Panics if the object is not a int. -func (v *Value) MustInt() int { - return v.data.(int) -} - -// IntSlice gets the value as a []int, returns the optionalDefault -// value or nil if the value is not a []int. -func (v *Value) IntSlice(optionalDefault ...[]int) []int { - if s, ok := v.data.([]int); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustIntSlice gets the value as a []int. -// -// Panics if the object is not a []int. -func (v *Value) MustIntSlice() []int { - return v.data.([]int) -} - -// IsInt gets whether the object contained is a int or not. -func (v *Value) IsInt() bool { - _, ok := v.data.(int) - return ok -} - -// IsIntSlice gets whether the object contained is a []int or not. -func (v *Value) IsIntSlice() bool { - _, ok := v.data.([]int) - return ok -} - -// EachInt calls the specified callback for each object -// in the []int. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt(callback func(int, int) bool) *Value { - - for index, val := range v.MustIntSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInt uses the specified decider function to select items -// from the []int. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt(decider func(int, int) bool) *Value { - - var selected []int - - v.EachInt(func(index int, val int) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInt uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int. -func (v *Value) GroupInt(grouper func(int, int) string) *Value { - - groups := make(map[string][]int) - - v.EachInt(func(index int, val int) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInt uses the specified function to replace each ints -// by iterating each item. The data in the returned result will be a -// []int containing the replaced items. -func (v *Value) ReplaceInt(replacer func(int, int) int) *Value { - - arr := v.MustIntSlice() - replaced := make([]int, len(arr)) - - v.EachInt(func(index int, val int) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInt uses the specified collector function to collect a value -// for each of the ints in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt(collector func(int, int) interface{}) *Value { - - arr := v.MustIntSlice() - collected := make([]interface{}, len(arr)) - - v.EachInt(func(index int, val int) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Int8 (int8 and []int8) - -------------------------------------------------- -*/ - -// Int8 gets the value as a int8, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int8(optionalDefault ...int8) int8 { - if s, ok := v.data.(int8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt8 gets the value as a int8. -// -// Panics if the object is not a int8. -func (v *Value) MustInt8() int8 { - return v.data.(int8) -} - -// Int8Slice gets the value as a []int8, returns the optionalDefault -// value or nil if the value is not a []int8. -func (v *Value) Int8Slice(optionalDefault ...[]int8) []int8 { - if s, ok := v.data.([]int8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt8Slice gets the value as a []int8. -// -// Panics if the object is not a []int8. -func (v *Value) MustInt8Slice() []int8 { - return v.data.([]int8) -} - -// IsInt8 gets whether the object contained is a int8 or not. -func (v *Value) IsInt8() bool { - _, ok := v.data.(int8) - return ok -} - -// IsInt8Slice gets whether the object contained is a []int8 or not. -func (v *Value) IsInt8Slice() bool { - _, ok := v.data.([]int8) - return ok -} - -// EachInt8 calls the specified callback for each object -// in the []int8. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt8(callback func(int, int8) bool) *Value { - - for index, val := range v.MustInt8Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInt8 uses the specified decider function to select items -// from the []int8. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt8(decider func(int, int8) bool) *Value { - - var selected []int8 - - v.EachInt8(func(index int, val int8) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInt8 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int8. -func (v *Value) GroupInt8(grouper func(int, int8) string) *Value { - - groups := make(map[string][]int8) - - v.EachInt8(func(index int, val int8) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int8, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInt8 uses the specified function to replace each int8s -// by iterating each item. The data in the returned result will be a -// []int8 containing the replaced items. -func (v *Value) ReplaceInt8(replacer func(int, int8) int8) *Value { - - arr := v.MustInt8Slice() - replaced := make([]int8, len(arr)) - - v.EachInt8(func(index int, val int8) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInt8 uses the specified collector function to collect a value -// for each of the int8s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt8(collector func(int, int8) interface{}) *Value { - - arr := v.MustInt8Slice() - collected := make([]interface{}, len(arr)) - - v.EachInt8(func(index int, val int8) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Int16 (int16 and []int16) - -------------------------------------------------- -*/ - -// Int16 gets the value as a int16, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int16(optionalDefault ...int16) int16 { - if s, ok := v.data.(int16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt16 gets the value as a int16. -// -// Panics if the object is not a int16. -func (v *Value) MustInt16() int16 { - return v.data.(int16) -} - -// Int16Slice gets the value as a []int16, returns the optionalDefault -// value or nil if the value is not a []int16. -func (v *Value) Int16Slice(optionalDefault ...[]int16) []int16 { - if s, ok := v.data.([]int16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt16Slice gets the value as a []int16. -// -// Panics if the object is not a []int16. -func (v *Value) MustInt16Slice() []int16 { - return v.data.([]int16) -} - -// IsInt16 gets whether the object contained is a int16 or not. -func (v *Value) IsInt16() bool { - _, ok := v.data.(int16) - return ok -} - -// IsInt16Slice gets whether the object contained is a []int16 or not. -func (v *Value) IsInt16Slice() bool { - _, ok := v.data.([]int16) - return ok -} - -// EachInt16 calls the specified callback for each object -// in the []int16. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt16(callback func(int, int16) bool) *Value { - - for index, val := range v.MustInt16Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInt16 uses the specified decider function to select items -// from the []int16. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt16(decider func(int, int16) bool) *Value { - - var selected []int16 - - v.EachInt16(func(index int, val int16) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInt16 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int16. -func (v *Value) GroupInt16(grouper func(int, int16) string) *Value { - - groups := make(map[string][]int16) - - v.EachInt16(func(index int, val int16) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int16, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInt16 uses the specified function to replace each int16s -// by iterating each item. The data in the returned result will be a -// []int16 containing the replaced items. -func (v *Value) ReplaceInt16(replacer func(int, int16) int16) *Value { - - arr := v.MustInt16Slice() - replaced := make([]int16, len(arr)) - - v.EachInt16(func(index int, val int16) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInt16 uses the specified collector function to collect a value -// for each of the int16s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt16(collector func(int, int16) interface{}) *Value { - - arr := v.MustInt16Slice() - collected := make([]interface{}, len(arr)) - - v.EachInt16(func(index int, val int16) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Int32 (int32 and []int32) - -------------------------------------------------- -*/ - -// Int32 gets the value as a int32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int32(optionalDefault ...int32) int32 { - if s, ok := v.data.(int32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt32 gets the value as a int32. -// -// Panics if the object is not a int32. -func (v *Value) MustInt32() int32 { - return v.data.(int32) -} - -// Int32Slice gets the value as a []int32, returns the optionalDefault -// value or nil if the value is not a []int32. -func (v *Value) Int32Slice(optionalDefault ...[]int32) []int32 { - if s, ok := v.data.([]int32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt32Slice gets the value as a []int32. -// -// Panics if the object is not a []int32. -func (v *Value) MustInt32Slice() []int32 { - return v.data.([]int32) -} - -// IsInt32 gets whether the object contained is a int32 or not. -func (v *Value) IsInt32() bool { - _, ok := v.data.(int32) - return ok -} - -// IsInt32Slice gets whether the object contained is a []int32 or not. -func (v *Value) IsInt32Slice() bool { - _, ok := v.data.([]int32) - return ok -} - -// EachInt32 calls the specified callback for each object -// in the []int32. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt32(callback func(int, int32) bool) *Value { - - for index, val := range v.MustInt32Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInt32 uses the specified decider function to select items -// from the []int32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt32(decider func(int, int32) bool) *Value { - - var selected []int32 - - v.EachInt32(func(index int, val int32) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInt32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int32. -func (v *Value) GroupInt32(grouper func(int, int32) string) *Value { - - groups := make(map[string][]int32) - - v.EachInt32(func(index int, val int32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInt32 uses the specified function to replace each int32s -// by iterating each item. The data in the returned result will be a -// []int32 containing the replaced items. -func (v *Value) ReplaceInt32(replacer func(int, int32) int32) *Value { - - arr := v.MustInt32Slice() - replaced := make([]int32, len(arr)) - - v.EachInt32(func(index int, val int32) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInt32 uses the specified collector function to collect a value -// for each of the int32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt32(collector func(int, int32) interface{}) *Value { - - arr := v.MustInt32Slice() - collected := make([]interface{}, len(arr)) - - v.EachInt32(func(index int, val int32) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Int64 (int64 and []int64) - -------------------------------------------------- -*/ - -// Int64 gets the value as a int64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int64(optionalDefault ...int64) int64 { - if s, ok := v.data.(int64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt64 gets the value as a int64. -// -// Panics if the object is not a int64. -func (v *Value) MustInt64() int64 { - return v.data.(int64) -} - -// Int64Slice gets the value as a []int64, returns the optionalDefault -// value or nil if the value is not a []int64. -func (v *Value) Int64Slice(optionalDefault ...[]int64) []int64 { - if s, ok := v.data.([]int64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt64Slice gets the value as a []int64. -// -// Panics if the object is not a []int64. -func (v *Value) MustInt64Slice() []int64 { - return v.data.([]int64) -} - -// IsInt64 gets whether the object contained is a int64 or not. -func (v *Value) IsInt64() bool { - _, ok := v.data.(int64) - return ok -} - -// IsInt64Slice gets whether the object contained is a []int64 or not. -func (v *Value) IsInt64Slice() bool { - _, ok := v.data.([]int64) - return ok -} - -// EachInt64 calls the specified callback for each object -// in the []int64. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt64(callback func(int, int64) bool) *Value { - - for index, val := range v.MustInt64Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereInt64 uses the specified decider function to select items -// from the []int64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt64(decider func(int, int64) bool) *Value { - - var selected []int64 - - v.EachInt64(func(index int, val int64) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupInt64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int64. -func (v *Value) GroupInt64(grouper func(int, int64) string) *Value { - - groups := make(map[string][]int64) - - v.EachInt64(func(index int, val int64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceInt64 uses the specified function to replace each int64s -// by iterating each item. The data in the returned result will be a -// []int64 containing the replaced items. -func (v *Value) ReplaceInt64(replacer func(int, int64) int64) *Value { - - arr := v.MustInt64Slice() - replaced := make([]int64, len(arr)) - - v.EachInt64(func(index int, val int64) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectInt64 uses the specified collector function to collect a value -// for each of the int64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt64(collector func(int, int64) interface{}) *Value { - - arr := v.MustInt64Slice() - collected := make([]interface{}, len(arr)) - - v.EachInt64(func(index int, val int64) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uint (uint and []uint) - -------------------------------------------------- -*/ - -// Uint gets the value as a uint, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint(optionalDefault ...uint) uint { - if s, ok := v.data.(uint); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint gets the value as a uint. -// -// Panics if the object is not a uint. -func (v *Value) MustUint() uint { - return v.data.(uint) -} - -// UintSlice gets the value as a []uint, returns the optionalDefault -// value or nil if the value is not a []uint. -func (v *Value) UintSlice(optionalDefault ...[]uint) []uint { - if s, ok := v.data.([]uint); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUintSlice gets the value as a []uint. -// -// Panics if the object is not a []uint. -func (v *Value) MustUintSlice() []uint { - return v.data.([]uint) -} - -// IsUint gets whether the object contained is a uint or not. -func (v *Value) IsUint() bool { - _, ok := v.data.(uint) - return ok -} - -// IsUintSlice gets whether the object contained is a []uint or not. -func (v *Value) IsUintSlice() bool { - _, ok := v.data.([]uint) - return ok -} - -// EachUint calls the specified callback for each object -// in the []uint. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint(callback func(int, uint) bool) *Value { - - for index, val := range v.MustUintSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUint uses the specified decider function to select items -// from the []uint. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint(decider func(int, uint) bool) *Value { - - var selected []uint - - v.EachUint(func(index int, val uint) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUint uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint. -func (v *Value) GroupUint(grouper func(int, uint) string) *Value { - - groups := make(map[string][]uint) - - v.EachUint(func(index int, val uint) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUint uses the specified function to replace each uints -// by iterating each item. The data in the returned result will be a -// []uint containing the replaced items. -func (v *Value) ReplaceUint(replacer func(int, uint) uint) *Value { - - arr := v.MustUintSlice() - replaced := make([]uint, len(arr)) - - v.EachUint(func(index int, val uint) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUint uses the specified collector function to collect a value -// for each of the uints in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint(collector func(int, uint) interface{}) *Value { - - arr := v.MustUintSlice() - collected := make([]interface{}, len(arr)) - - v.EachUint(func(index int, val uint) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uint8 (uint8 and []uint8) - -------------------------------------------------- -*/ - -// Uint8 gets the value as a uint8, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint8(optionalDefault ...uint8) uint8 { - if s, ok := v.data.(uint8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint8 gets the value as a uint8. -// -// Panics if the object is not a uint8. -func (v *Value) MustUint8() uint8 { - return v.data.(uint8) -} - -// Uint8Slice gets the value as a []uint8, returns the optionalDefault -// value or nil if the value is not a []uint8. -func (v *Value) Uint8Slice(optionalDefault ...[]uint8) []uint8 { - if s, ok := v.data.([]uint8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint8Slice gets the value as a []uint8. -// -// Panics if the object is not a []uint8. -func (v *Value) MustUint8Slice() []uint8 { - return v.data.([]uint8) -} - -// IsUint8 gets whether the object contained is a uint8 or not. -func (v *Value) IsUint8() bool { - _, ok := v.data.(uint8) - return ok -} - -// IsUint8Slice gets whether the object contained is a []uint8 or not. -func (v *Value) IsUint8Slice() bool { - _, ok := v.data.([]uint8) - return ok -} - -// EachUint8 calls the specified callback for each object -// in the []uint8. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint8(callback func(int, uint8) bool) *Value { - - for index, val := range v.MustUint8Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUint8 uses the specified decider function to select items -// from the []uint8. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint8(decider func(int, uint8) bool) *Value { - - var selected []uint8 - - v.EachUint8(func(index int, val uint8) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUint8 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint8. -func (v *Value) GroupUint8(grouper func(int, uint8) string) *Value { - - groups := make(map[string][]uint8) - - v.EachUint8(func(index int, val uint8) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint8, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUint8 uses the specified function to replace each uint8s -// by iterating each item. The data in the returned result will be a -// []uint8 containing the replaced items. -func (v *Value) ReplaceUint8(replacer func(int, uint8) uint8) *Value { - - arr := v.MustUint8Slice() - replaced := make([]uint8, len(arr)) - - v.EachUint8(func(index int, val uint8) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUint8 uses the specified collector function to collect a value -// for each of the uint8s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint8(collector func(int, uint8) interface{}) *Value { - - arr := v.MustUint8Slice() - collected := make([]interface{}, len(arr)) - - v.EachUint8(func(index int, val uint8) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uint16 (uint16 and []uint16) - -------------------------------------------------- -*/ - -// Uint16 gets the value as a uint16, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint16(optionalDefault ...uint16) uint16 { - if s, ok := v.data.(uint16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint16 gets the value as a uint16. -// -// Panics if the object is not a uint16. -func (v *Value) MustUint16() uint16 { - return v.data.(uint16) -} - -// Uint16Slice gets the value as a []uint16, returns the optionalDefault -// value or nil if the value is not a []uint16. -func (v *Value) Uint16Slice(optionalDefault ...[]uint16) []uint16 { - if s, ok := v.data.([]uint16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint16Slice gets the value as a []uint16. -// -// Panics if the object is not a []uint16. -func (v *Value) MustUint16Slice() []uint16 { - return v.data.([]uint16) -} - -// IsUint16 gets whether the object contained is a uint16 or not. -func (v *Value) IsUint16() bool { - _, ok := v.data.(uint16) - return ok -} - -// IsUint16Slice gets whether the object contained is a []uint16 or not. -func (v *Value) IsUint16Slice() bool { - _, ok := v.data.([]uint16) - return ok -} - -// EachUint16 calls the specified callback for each object -// in the []uint16. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint16(callback func(int, uint16) bool) *Value { - - for index, val := range v.MustUint16Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUint16 uses the specified decider function to select items -// from the []uint16. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint16(decider func(int, uint16) bool) *Value { - - var selected []uint16 - - v.EachUint16(func(index int, val uint16) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUint16 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint16. -func (v *Value) GroupUint16(grouper func(int, uint16) string) *Value { - - groups := make(map[string][]uint16) - - v.EachUint16(func(index int, val uint16) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint16, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUint16 uses the specified function to replace each uint16s -// by iterating each item. The data in the returned result will be a -// []uint16 containing the replaced items. -func (v *Value) ReplaceUint16(replacer func(int, uint16) uint16) *Value { - - arr := v.MustUint16Slice() - replaced := make([]uint16, len(arr)) - - v.EachUint16(func(index int, val uint16) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUint16 uses the specified collector function to collect a value -// for each of the uint16s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint16(collector func(int, uint16) interface{}) *Value { - - arr := v.MustUint16Slice() - collected := make([]interface{}, len(arr)) - - v.EachUint16(func(index int, val uint16) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uint32 (uint32 and []uint32) - -------------------------------------------------- -*/ - -// Uint32 gets the value as a uint32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint32(optionalDefault ...uint32) uint32 { - if s, ok := v.data.(uint32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint32 gets the value as a uint32. -// -// Panics if the object is not a uint32. -func (v *Value) MustUint32() uint32 { - return v.data.(uint32) -} - -// Uint32Slice gets the value as a []uint32, returns the optionalDefault -// value or nil if the value is not a []uint32. -func (v *Value) Uint32Slice(optionalDefault ...[]uint32) []uint32 { - if s, ok := v.data.([]uint32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint32Slice gets the value as a []uint32. -// -// Panics if the object is not a []uint32. -func (v *Value) MustUint32Slice() []uint32 { - return v.data.([]uint32) -} - -// IsUint32 gets whether the object contained is a uint32 or not. -func (v *Value) IsUint32() bool { - _, ok := v.data.(uint32) - return ok -} - -// IsUint32Slice gets whether the object contained is a []uint32 or not. -func (v *Value) IsUint32Slice() bool { - _, ok := v.data.([]uint32) - return ok -} - -// EachUint32 calls the specified callback for each object -// in the []uint32. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint32(callback func(int, uint32) bool) *Value { - - for index, val := range v.MustUint32Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUint32 uses the specified decider function to select items -// from the []uint32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint32(decider func(int, uint32) bool) *Value { - - var selected []uint32 - - v.EachUint32(func(index int, val uint32) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUint32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint32. -func (v *Value) GroupUint32(grouper func(int, uint32) string) *Value { - - groups := make(map[string][]uint32) - - v.EachUint32(func(index int, val uint32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUint32 uses the specified function to replace each uint32s -// by iterating each item. The data in the returned result will be a -// []uint32 containing the replaced items. -func (v *Value) ReplaceUint32(replacer func(int, uint32) uint32) *Value { - - arr := v.MustUint32Slice() - replaced := make([]uint32, len(arr)) - - v.EachUint32(func(index int, val uint32) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUint32 uses the specified collector function to collect a value -// for each of the uint32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint32(collector func(int, uint32) interface{}) *Value { - - arr := v.MustUint32Slice() - collected := make([]interface{}, len(arr)) - - v.EachUint32(func(index int, val uint32) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uint64 (uint64 and []uint64) - -------------------------------------------------- -*/ - -// Uint64 gets the value as a uint64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint64(optionalDefault ...uint64) uint64 { - if s, ok := v.data.(uint64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint64 gets the value as a uint64. -// -// Panics if the object is not a uint64. -func (v *Value) MustUint64() uint64 { - return v.data.(uint64) -} - -// Uint64Slice gets the value as a []uint64, returns the optionalDefault -// value or nil if the value is not a []uint64. -func (v *Value) Uint64Slice(optionalDefault ...[]uint64) []uint64 { - if s, ok := v.data.([]uint64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint64Slice gets the value as a []uint64. -// -// Panics if the object is not a []uint64. -func (v *Value) MustUint64Slice() []uint64 { - return v.data.([]uint64) -} - -// IsUint64 gets whether the object contained is a uint64 or not. -func (v *Value) IsUint64() bool { - _, ok := v.data.(uint64) - return ok -} - -// IsUint64Slice gets whether the object contained is a []uint64 or not. -func (v *Value) IsUint64Slice() bool { - _, ok := v.data.([]uint64) - return ok -} - -// EachUint64 calls the specified callback for each object -// in the []uint64. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint64(callback func(int, uint64) bool) *Value { - - for index, val := range v.MustUint64Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUint64 uses the specified decider function to select items -// from the []uint64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint64(decider func(int, uint64) bool) *Value { - - var selected []uint64 - - v.EachUint64(func(index int, val uint64) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUint64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint64. -func (v *Value) GroupUint64(grouper func(int, uint64) string) *Value { - - groups := make(map[string][]uint64) - - v.EachUint64(func(index int, val uint64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUint64 uses the specified function to replace each uint64s -// by iterating each item. The data in the returned result will be a -// []uint64 containing the replaced items. -func (v *Value) ReplaceUint64(replacer func(int, uint64) uint64) *Value { - - arr := v.MustUint64Slice() - replaced := make([]uint64, len(arr)) - - v.EachUint64(func(index int, val uint64) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUint64 uses the specified collector function to collect a value -// for each of the uint64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint64(collector func(int, uint64) interface{}) *Value { - - arr := v.MustUint64Slice() - collected := make([]interface{}, len(arr)) - - v.EachUint64(func(index int, val uint64) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Uintptr (uintptr and []uintptr) - -------------------------------------------------- -*/ - -// Uintptr gets the value as a uintptr, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uintptr(optionalDefault ...uintptr) uintptr { - if s, ok := v.data.(uintptr); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUintptr gets the value as a uintptr. -// -// Panics if the object is not a uintptr. -func (v *Value) MustUintptr() uintptr { - return v.data.(uintptr) -} - -// UintptrSlice gets the value as a []uintptr, returns the optionalDefault -// value or nil if the value is not a []uintptr. -func (v *Value) UintptrSlice(optionalDefault ...[]uintptr) []uintptr { - if s, ok := v.data.([]uintptr); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUintptrSlice gets the value as a []uintptr. -// -// Panics if the object is not a []uintptr. -func (v *Value) MustUintptrSlice() []uintptr { - return v.data.([]uintptr) -} - -// IsUintptr gets whether the object contained is a uintptr or not. -func (v *Value) IsUintptr() bool { - _, ok := v.data.(uintptr) - return ok -} - -// IsUintptrSlice gets whether the object contained is a []uintptr or not. -func (v *Value) IsUintptrSlice() bool { - _, ok := v.data.([]uintptr) - return ok -} - -// EachUintptr calls the specified callback for each object -// in the []uintptr. -// -// Panics if the object is the wrong type. -func (v *Value) EachUintptr(callback func(int, uintptr) bool) *Value { - - for index, val := range v.MustUintptrSlice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereUintptr uses the specified decider function to select items -// from the []uintptr. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUintptr(decider func(int, uintptr) bool) *Value { - - var selected []uintptr - - v.EachUintptr(func(index int, val uintptr) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupUintptr uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uintptr. -func (v *Value) GroupUintptr(grouper func(int, uintptr) string) *Value { - - groups := make(map[string][]uintptr) - - v.EachUintptr(func(index int, val uintptr) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uintptr, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceUintptr uses the specified function to replace each uintptrs -// by iterating each item. The data in the returned result will be a -// []uintptr containing the replaced items. -func (v *Value) ReplaceUintptr(replacer func(int, uintptr) uintptr) *Value { - - arr := v.MustUintptrSlice() - replaced := make([]uintptr, len(arr)) - - v.EachUintptr(func(index int, val uintptr) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectUintptr uses the specified collector function to collect a value -// for each of the uintptrs in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUintptr(collector func(int, uintptr) interface{}) *Value { - - arr := v.MustUintptrSlice() - collected := make([]interface{}, len(arr)) - - v.EachUintptr(func(index int, val uintptr) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Float32 (float32 and []float32) - -------------------------------------------------- -*/ - -// Float32 gets the value as a float32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Float32(optionalDefault ...float32) float32 { - if s, ok := v.data.(float32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustFloat32 gets the value as a float32. -// -// Panics if the object is not a float32. -func (v *Value) MustFloat32() float32 { - return v.data.(float32) -} - -// Float32Slice gets the value as a []float32, returns the optionalDefault -// value or nil if the value is not a []float32. -func (v *Value) Float32Slice(optionalDefault ...[]float32) []float32 { - if s, ok := v.data.([]float32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustFloat32Slice gets the value as a []float32. -// -// Panics if the object is not a []float32. -func (v *Value) MustFloat32Slice() []float32 { - return v.data.([]float32) -} - -// IsFloat32 gets whether the object contained is a float32 or not. -func (v *Value) IsFloat32() bool { - _, ok := v.data.(float32) - return ok -} - -// IsFloat32Slice gets whether the object contained is a []float32 or not. -func (v *Value) IsFloat32Slice() bool { - _, ok := v.data.([]float32) - return ok -} - -// EachFloat32 calls the specified callback for each object -// in the []float32. -// -// Panics if the object is the wrong type. -func (v *Value) EachFloat32(callback func(int, float32) bool) *Value { - - for index, val := range v.MustFloat32Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereFloat32 uses the specified decider function to select items -// from the []float32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereFloat32(decider func(int, float32) bool) *Value { - - var selected []float32 - - v.EachFloat32(func(index int, val float32) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupFloat32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]float32. -func (v *Value) GroupFloat32(grouper func(int, float32) string) *Value { - - groups := make(map[string][]float32) - - v.EachFloat32(func(index int, val float32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]float32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceFloat32 uses the specified function to replace each float32s -// by iterating each item. The data in the returned result will be a -// []float32 containing the replaced items. -func (v *Value) ReplaceFloat32(replacer func(int, float32) float32) *Value { - - arr := v.MustFloat32Slice() - replaced := make([]float32, len(arr)) - - v.EachFloat32(func(index int, val float32) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectFloat32 uses the specified collector function to collect a value -// for each of the float32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectFloat32(collector func(int, float32) interface{}) *Value { - - arr := v.MustFloat32Slice() - collected := make([]interface{}, len(arr)) - - v.EachFloat32(func(index int, val float32) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Float64 (float64 and []float64) - -------------------------------------------------- -*/ - -// Float64 gets the value as a float64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Float64(optionalDefault ...float64) float64 { - if s, ok := v.data.(float64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustFloat64 gets the value as a float64. -// -// Panics if the object is not a float64. -func (v *Value) MustFloat64() float64 { - return v.data.(float64) -} - -// Float64Slice gets the value as a []float64, returns the optionalDefault -// value or nil if the value is not a []float64. -func (v *Value) Float64Slice(optionalDefault ...[]float64) []float64 { - if s, ok := v.data.([]float64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustFloat64Slice gets the value as a []float64. -// -// Panics if the object is not a []float64. -func (v *Value) MustFloat64Slice() []float64 { - return v.data.([]float64) -} - -// IsFloat64 gets whether the object contained is a float64 or not. -func (v *Value) IsFloat64() bool { - _, ok := v.data.(float64) - return ok -} - -// IsFloat64Slice gets whether the object contained is a []float64 or not. -func (v *Value) IsFloat64Slice() bool { - _, ok := v.data.([]float64) - return ok -} - -// EachFloat64 calls the specified callback for each object -// in the []float64. -// -// Panics if the object is the wrong type. -func (v *Value) EachFloat64(callback func(int, float64) bool) *Value { - - for index, val := range v.MustFloat64Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereFloat64 uses the specified decider function to select items -// from the []float64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereFloat64(decider func(int, float64) bool) *Value { - - var selected []float64 - - v.EachFloat64(func(index int, val float64) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupFloat64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]float64. -func (v *Value) GroupFloat64(grouper func(int, float64) string) *Value { - - groups := make(map[string][]float64) - - v.EachFloat64(func(index int, val float64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]float64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceFloat64 uses the specified function to replace each float64s -// by iterating each item. The data in the returned result will be a -// []float64 containing the replaced items. -func (v *Value) ReplaceFloat64(replacer func(int, float64) float64) *Value { - - arr := v.MustFloat64Slice() - replaced := make([]float64, len(arr)) - - v.EachFloat64(func(index int, val float64) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectFloat64 uses the specified collector function to collect a value -// for each of the float64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectFloat64(collector func(int, float64) interface{}) *Value { - - arr := v.MustFloat64Slice() - collected := make([]interface{}, len(arr)) - - v.EachFloat64(func(index int, val float64) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Complex64 (complex64 and []complex64) - -------------------------------------------------- -*/ - -// Complex64 gets the value as a complex64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Complex64(optionalDefault ...complex64) complex64 { - if s, ok := v.data.(complex64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustComplex64 gets the value as a complex64. -// -// Panics if the object is not a complex64. -func (v *Value) MustComplex64() complex64 { - return v.data.(complex64) -} - -// Complex64Slice gets the value as a []complex64, returns the optionalDefault -// value or nil if the value is not a []complex64. -func (v *Value) Complex64Slice(optionalDefault ...[]complex64) []complex64 { - if s, ok := v.data.([]complex64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustComplex64Slice gets the value as a []complex64. -// -// Panics if the object is not a []complex64. -func (v *Value) MustComplex64Slice() []complex64 { - return v.data.([]complex64) -} - -// IsComplex64 gets whether the object contained is a complex64 or not. -func (v *Value) IsComplex64() bool { - _, ok := v.data.(complex64) - return ok -} - -// IsComplex64Slice gets whether the object contained is a []complex64 or not. -func (v *Value) IsComplex64Slice() bool { - _, ok := v.data.([]complex64) - return ok -} - -// EachComplex64 calls the specified callback for each object -// in the []complex64. -// -// Panics if the object is the wrong type. -func (v *Value) EachComplex64(callback func(int, complex64) bool) *Value { - - for index, val := range v.MustComplex64Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereComplex64 uses the specified decider function to select items -// from the []complex64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereComplex64(decider func(int, complex64) bool) *Value { - - var selected []complex64 - - v.EachComplex64(func(index int, val complex64) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupComplex64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]complex64. -func (v *Value) GroupComplex64(grouper func(int, complex64) string) *Value { - - groups := make(map[string][]complex64) - - v.EachComplex64(func(index int, val complex64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]complex64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceComplex64 uses the specified function to replace each complex64s -// by iterating each item. The data in the returned result will be a -// []complex64 containing the replaced items. -func (v *Value) ReplaceComplex64(replacer func(int, complex64) complex64) *Value { - - arr := v.MustComplex64Slice() - replaced := make([]complex64, len(arr)) - - v.EachComplex64(func(index int, val complex64) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectComplex64 uses the specified collector function to collect a value -// for each of the complex64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectComplex64(collector func(int, complex64) interface{}) *Value { - - arr := v.MustComplex64Slice() - collected := make([]interface{}, len(arr)) - - v.EachComplex64(func(index int, val complex64) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} - -/* - Complex128 (complex128 and []complex128) - -------------------------------------------------- -*/ - -// Complex128 gets the value as a complex128, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Complex128(optionalDefault ...complex128) complex128 { - if s, ok := v.data.(complex128); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustComplex128 gets the value as a complex128. -// -// Panics if the object is not a complex128. -func (v *Value) MustComplex128() complex128 { - return v.data.(complex128) -} - -// Complex128Slice gets the value as a []complex128, returns the optionalDefault -// value or nil if the value is not a []complex128. -func (v *Value) Complex128Slice(optionalDefault ...[]complex128) []complex128 { - if s, ok := v.data.([]complex128); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustComplex128Slice gets the value as a []complex128. -// -// Panics if the object is not a []complex128. -func (v *Value) MustComplex128Slice() []complex128 { - return v.data.([]complex128) -} - -// IsComplex128 gets whether the object contained is a complex128 or not. -func (v *Value) IsComplex128() bool { - _, ok := v.data.(complex128) - return ok -} - -// IsComplex128Slice gets whether the object contained is a []complex128 or not. -func (v *Value) IsComplex128Slice() bool { - _, ok := v.data.([]complex128) - return ok -} - -// EachComplex128 calls the specified callback for each object -// in the []complex128. -// -// Panics if the object is the wrong type. -func (v *Value) EachComplex128(callback func(int, complex128) bool) *Value { - - for index, val := range v.MustComplex128Slice() { - carryon := callback(index, val) - if carryon == false { - break - } - } - - return v - -} - -// WhereComplex128 uses the specified decider function to select items -// from the []complex128. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereComplex128(decider func(int, complex128) bool) *Value { - - var selected []complex128 - - v.EachComplex128(func(index int, val complex128) bool { - shouldSelect := decider(index, val) - if shouldSelect == false { - selected = append(selected, val) - } - return true - }) - - return &Value{data: selected} - -} - -// GroupComplex128 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]complex128. -func (v *Value) GroupComplex128(grouper func(int, complex128) string) *Value { - - groups := make(map[string][]complex128) - - v.EachComplex128(func(index int, val complex128) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]complex128, 0) - } - groups[group] = append(groups[group], val) - return true - }) - - return &Value{data: groups} - -} - -// ReplaceComplex128 uses the specified function to replace each complex128s -// by iterating each item. The data in the returned result will be a -// []complex128 containing the replaced items. -func (v *Value) ReplaceComplex128(replacer func(int, complex128) complex128) *Value { - - arr := v.MustComplex128Slice() - replaced := make([]complex128, len(arr)) - - v.EachComplex128(func(index int, val complex128) bool { - replaced[index] = replacer(index, val) - return true - }) - - return &Value{data: replaced} - -} - -// CollectComplex128 uses the specified collector function to collect a value -// for each of the complex128s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectComplex128(collector func(int, complex128) interface{}) *Value { - - arr := v.MustComplex128Slice() - collected := make([]interface{}, len(arr)) - - v.EachComplex128(func(index int, val complex128) bool { - collected[index] = collector(index, val) - return true - }) - - return &Value{data: collected} -} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/value.go b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/value.go deleted file mode 100644 index 7aaef06b..00000000 --- a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/value.go +++ /dev/null @@ -1,13 +0,0 @@ -package objx - -// Value provides methods for extracting interface{} data in various -// types. -type Value struct { - // data contains the raw data being managed by this Value - data interface{} -} - -// Data returns the raw data contained by this Value -func (v *Value) Data() interface{} { - return v.data -}