diff --git a/glide.lock b/glide.lock index e1ff905e..1e5cc0f4 100644 --- a/glide.lock +++ b/glide.lock @@ -1,8 +1,8 @@ hash: d9cd9bf3ab1048a80f5ad90e05a7ea4c7614c30c561199b71147609de600a524 -updated: 2018-01-17T13:29:36.176987+01:00 +updated: 2018-05-24T13:39:22.013349+02:00 imports: - name: github.com/beorn7/perks - version: 4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9 + version: 3a771d992973f24aa725d07868b467d1ddfceafb subpackages: - quantile - name: github.com/cockroachdb/cmux @@ -12,13 +12,13 @@ imports: subpackages: - timeutil - name: github.com/davecgh/go-spew - version: 6d212800a42e8ab5c146b8ace3490ee17e5225f9 + version: 8991bc29aa16c548c550c7ff78260e27b9ab7c73 subpackages: - spew - name: github.com/fernet/fernet-go version: 1b2437bc582b3cfbb341ee5a29f8ef5b42912ff2 - name: github.com/golang/protobuf - version: 5a0f697c9ed9d68fef0116532c6e05cfeae00e55 + version: b4deda0973fb4c70b50d226b1af49f3da59f5265 subpackages: - jsonpb - proto @@ -30,32 +30,32 @@ imports: - ptypes/struct - ptypes/timestamp - name: github.com/grafeas/grafeas - version: 73210e9cadcba64b5b211a0ec64a9f4c2d4841b5 + version: cec245e0b6d978e8c6c371d5de1b8c30673b510a repo: https://github.com/Grafeas/Grafeas.git vcs: git subpackages: - samples/server/go-server/api/server/name - v1alpha1/proto - name: github.com/grpc-ecosystem/go-grpc-prometheus - version: 2500245aa6110c562d17020fb31a2c133d737799 + version: 39de4380c2e0353a115b80b1c730719c79bfb771 - name: github.com/grpc-ecosystem/grpc-gateway - version: 2a40dd79571b760642c30f62ada35c65ac2b779c + version: b502d2dcfc6bed7d2db69835a367cd2c6fb011d2 subpackages: - runtime - runtime/internal - utilities - name: github.com/guregu/null - version: 41961cea0328defc5f95c1c473f89ebf0d1813f6 + version: e81d6d8d57747b34d7c5fe0d20ebf57692f04ea9 subpackages: - zero - name: github.com/hashicorp/golang-lru - version: 0a025b7e63adc15a622f29b0b2c4c3848243bbf6 + version: 0fb14efe8c47ae851c0034ed7a448854d3d34cf3 subpackages: - simplelru - name: github.com/julienschmidt/httprouter version: 8c199fb6259ffc1af525cc3ad52ee60ba8359669 - name: github.com/lib/pq - version: 8837942c3e09574accbc5f150e2c5e057189cace + version: d34b9ff171c21ad295489235aec8b6626023cd04 subpackages: - oid - name: github.com/matttproud/golang_protobuf_extensions @@ -63,9 +63,9 @@ imports: subpackages: - pbutil - name: github.com/pborman/uuid - version: a97ce2ca70fa5a848076093f05e639a89ca34d06 + version: e790cca94e6cc75c7064b1332e63811d4aae1a53 - name: github.com/pmezard/go-difflib - version: d8ed2627bdf02c080bf22230dbb337003b7aba2d + version: 792786c7400a136282c1664665ae0a8db921c6c2 subpackages: - difflib - name: github.com/prometheus/client_golang @@ -73,33 +73,36 @@ imports: subpackages: - prometheus - name: github.com/prometheus/client_model - version: 6f3806018612930941127f2a7c6c453ba2c527d2 + version: 99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c subpackages: - go - name: github.com/prometheus/common - version: 13ba4ddd0caa9c28ca7b7bffe1dfa9ed8d5ef207 + version: 7600349dcfe1abd18d72d3a1770870d9800a7801 subpackages: - expfmt - internal/bitbucket.org/ww/goautoneg - model - name: github.com/prometheus/procfs - version: 65c1f6f8f0fc1e2185eb9863a3bc751496404259 + version: 8b1c2da0d56deffdbb9e48d4414b4e674bd8083e subpackages: + - internal/util + - nfs - xfs - name: github.com/remind101/migrate - version: d22d647232c20dbea6d2aa1dda7f2737cccce614 + version: 52c1edff7319858d56cbb1954ed32d520788a164 - name: github.com/sirupsen/logrus version: ba1b36c82c5e05c4f912a88eab0dcd91a171688f - name: github.com/stretchr/testify - version: 69483b4bd14f5845b5a1e55bca19e954e827f1d0 + version: 12b6f73e6084dad08a7c6e575284b177ecafbc71 subpackages: - assert - name: github.com/tylerb/graceful version: 4654dfbb6ad53cb5e27f37d99b02e16c1872fbbb - name: golang.org/x/net - version: 59a0b19b5533c7977ddeb86b017bf507ed407b12 + version: 5f9ae10d9af5b1c89ae6904293b14b064d4ada23 subpackages: - context + - http/httpguts - http2 - http2/hpack - idna @@ -107,37 +110,49 @@ imports: - lex/httplex - trace - name: golang.org/x/sys - version: b90f89a1e7a9c1f6b918820b3daa7f08488c8594 + version: bb9c189858d91f42db229b04d45a4c3d23a7662a subpackages: - unix - name: golang.org/x/text - version: ccbd3f7822129ff389f8ca4858a9b9d4d910531c + version: 7922cc490dd5a7dbaa7fd5d6196b49db59ac042f subpackages: - secure/bidirule - transform - unicode/bidi - unicode/norm - name: google.golang.org/genproto - version: aa2eb687b4d3e17154372564ad8d6bf11c3cf21f + version: 7fd901a49ba6a7f87732eb344f6e3c5b19d1b200 subpackages: - googleapis/api/annotations + - googleapis/longrunning - googleapis/rpc/status + - protobuf/field_mask - name: google.golang.org/grpc - version: 8de2dff78c3b968a51c99ec526d934f686537437 + version: 41344da2231b913fa3d983840a57a6b1b7b631a1 subpackages: + - balancer + - balancer/base + - balancer/roundrobin + - channelz - codes + - connectivity - credentials - - grpclb/grpc_lb_v1 + - encoding + - encoding/proto + - grpclb/grpc_lb_v1/messages - grpclog - internal - keepalive - metadata - naming - peer + - resolver + - resolver/dns + - resolver/passthrough - stats - status - tap - transport - name: gopkg.in/yaml.v2 - version: cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b + version: 5420a8b6744d3b0345ab293f6fcba19c978f1183 testImports: [] diff --git a/grafeas/grafeas.go b/grafeas/grafeas.go index 889a9232..2f5bbd1d 100644 --- a/grafeas/grafeas.go +++ b/grafeas/grafeas.go @@ -61,7 +61,9 @@ func (g *Grafeas) Export(datastore database.Datastore) error { log.Println("CreateProject") _, err = pClient.CreateProject(context, &pb.CreateProjectRequest{ - Name: fmt.Sprintf("projects/%s", pID), + Project: &pb.Project{ + Name: fmt.Sprintf("projects/%s", pID), + }, }) if err != nil { // Failed to access API diff --git a/vendor/github.com/beorn7/perks/quantile/stream.go b/vendor/github.com/beorn7/perks/quantile/stream.go index f4cabd66..d7d14f8e 100644 --- a/vendor/github.com/beorn7/perks/quantile/stream.go +++ b/vendor/github.com/beorn7/perks/quantile/stream.go @@ -77,15 +77,20 @@ func NewHighBiased(epsilon float64) *Stream { // is guaranteed to be within (Quantile±Epsilon). // // See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error properties. -func NewTargeted(targets map[float64]float64) *Stream { +func NewTargeted(targetMap map[float64]float64) *Stream { + // Convert map to slice to avoid slow iterations on a map. + // ƒ is called on the hot path, so converting the map to a slice + // beforehand results in significant CPU savings. + targets := targetMapToSlice(targetMap) + ƒ := func(s *stream, r float64) float64 { var m = math.MaxFloat64 var f float64 - for quantile, epsilon := range targets { - if quantile*s.n <= r { - f = (2 * epsilon * r) / quantile + for _, t := range targets { + if t.quantile*s.n <= r { + f = (2 * t.epsilon * r) / t.quantile } else { - f = (2 * epsilon * (s.n - r)) / (1 - quantile) + f = (2 * t.epsilon * (s.n - r)) / (1 - t.quantile) } if f < m { m = f @@ -96,6 +101,25 @@ func NewTargeted(targets map[float64]float64) *Stream { return newStream(ƒ) } +type target struct { + quantile float64 + epsilon float64 +} + +func targetMapToSlice(targetMap map[float64]float64) []target { + targets := make([]target, 0, len(targetMap)) + + for quantile, epsilon := range targetMap { + t := target{ + quantile: quantile, + epsilon: epsilon, + } + targets = append(targets, t) + } + + return targets +} + // Stream computes quantiles for a stream of float64s. It is not thread-safe by // design. Take care when using across multiple goroutines. type Stream struct { diff --git a/vendor/github.com/davecgh/go-spew/.travis.yml b/vendor/github.com/davecgh/go-spew/.travis.yml index 984e0736..1f4cbf54 100644 --- a/vendor/github.com/davecgh/go-spew/.travis.yml +++ b/vendor/github.com/davecgh/go-spew/.travis.yml @@ -1,14 +1,28 @@ language: go +go_import_path: github.com/davecgh/go-spew go: - - 1.5.4 - - 1.6.3 - - 1.7 + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - 1.10.x + - tip +sudo: false install: - - go get -v golang.org/x/tools/cmd/cover + - go get -v github.com/alecthomas/gometalinter + - gometalinter --install script: - - go test -v -tags=safe ./spew - - go test -v -tags=testcgo ./spew -covermode=count -coverprofile=profile.cov + - export PATH=$PATH:$HOME/gopath/bin + - export GORACE="halt_on_error=1" + - test -z "$(gometalinter --disable-all + --enable=gofmt + --enable=golint + --enable=vet + --enable=gosimple + --enable=unconvert + --deadline=4m ./spew | tee /dev/stderr)" + - go test -v -race -tags safe ./spew + - go test -v -race -tags testcgo ./spew -covermode=atomic -coverprofile=profile.cov after_success: - go get -v github.com/mattn/goveralls - - export PATH=$PATH:$HOME/gopath/bin - goveralls -coverprofile=profile.cov -service=travis-ci diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE index bb673323..bc52e96f 100644 --- a/vendor/github.com/davecgh/go-spew/LICENSE +++ b/vendor/github.com/davecgh/go-spew/LICENSE @@ -1,8 +1,8 @@ ISC License -Copyright (c) 2012-2013 Dave Collins +Copyright (c) 2012-2016 Dave Collins -Permission to use, copy, modify, and distribute this software for any +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. diff --git a/vendor/github.com/davecgh/go-spew/README.md b/vendor/github.com/davecgh/go-spew/README.md index 556170ae..f6ed02c3 100644 --- a/vendor/github.com/davecgh/go-spew/README.md +++ b/vendor/github.com/davecgh/go-spew/README.md @@ -1,10 +1,9 @@ go-spew ======= -[![Build Status](https://travis-ci.org/davecgh/go-spew.png?branch=master)] -(https://travis-ci.org/davecgh/go-spew) [![Coverage Status] -(https://coveralls.io/repos/davecgh/go-spew/badge.png?branch=master)] -(https://coveralls.io/r/davecgh/go-spew?branch=master) +[![Build Status](https://img.shields.io/travis/davecgh/go-spew.svg)](https://travis-ci.org/davecgh/go-spew) +[![ISC License](http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org) +[![Coverage Status](https://img.shields.io/coveralls/davecgh/go-spew.svg)](https://coveralls.io/r/davecgh/go-spew?branch=master) Go-spew implements a deep pretty printer for Go data structures to aid in debugging. A comprehensive suite of tests with 100% test coverage is provided @@ -19,8 +18,7 @@ post about it ## Documentation -[![GoDoc](https://godoc.org/github.com/davecgh/go-spew/spew?status.png)] -(http://godoc.org/github.com/davecgh/go-spew/spew) +[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)](http://godoc.org/github.com/davecgh/go-spew/spew) Full `go doc` style documentation for the project can be viewed online without installing this package by using the excellent GoDoc site here: @@ -160,6 +158,15 @@ options. See the ConfigState documentation for more details. App Engine or with the "safe" build tag specified. Pointer method invocation is enabled by default. +* DisablePointerAddresses + DisablePointerAddresses specifies whether to disable the printing of + pointer addresses. This is useful when diffing data structures in tests. + +* DisableCapacities + DisableCapacities specifies whether to disable the printing of capacities + for arrays, slices, maps and channels. This is useful when diffing data + structures in tests. + * ContinueOnMethod Enables recursion into types after invoking error and Stringer interface methods. Recursion after method invocation is disabled by default. @@ -191,4 +198,4 @@ using the unsafe package. ## License -Go-spew is licensed under the liberal ISC License. +Go-spew is licensed under the [copyfree](http://copyfree.org) ISC License. diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go index d42a0bc4..79299478 100644 --- a/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ b/vendor/github.com/davecgh/go-spew/spew/bypass.go @@ -1,4 +1,4 @@ -// Copyright (c) 2015 Dave Collins +// Copyright (c) 2015-2016 Dave Collins // // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above @@ -16,7 +16,9 @@ // when the code is not running on Google App Engine, compiled by GopherJS, and // "-tags safe" is not added to the go build command line. The "disableunsafe" // tag is deprecated and thus should not be used. -// +build !js,!appengine,!safe,!disableunsafe +// Go versions prior to 1.4 are disabled because they use a different layout +// for interfaces which make the implementation of unsafeReflectValue more complex. +// +build !js,!appengine,!safe,!disableunsafe,go1.4 package spew @@ -34,80 +36,49 @@ const ( ptrSize = unsafe.Sizeof((*byte)(nil)) ) -var ( - // offsetPtr, offsetScalar, and offsetFlag are the offsets for the - // internal reflect.Value fields. These values are valid before golang - // commit ecccf07e7f9d which changed the format. The are also valid - // after commit 82f48826c6c7 which changed the format again to mirror - // the original format. Code in the init function updates these offsets - // as necessary. - offsetPtr = uintptr(ptrSize) - offsetScalar = uintptr(0) - offsetFlag = uintptr(ptrSize * 2) +type flag uintptr - // flagKindWidth and flagKindShift indicate various bits that the - // reflect package uses internally to track kind information. - // - // flagRO indicates whether or not the value field of a reflect.Value is - // read-only. - // - // flagIndir indicates whether the value field of a reflect.Value is - // the actual data or a pointer to the data. - // - // These values are valid before golang commit 90a7c3c86944 which - // changed their positions. Code in the init function updates these - // flags as necessary. - flagKindWidth = uintptr(5) - flagKindShift = uintptr(flagKindWidth - 1) - flagRO = uintptr(1 << 0) - flagIndir = uintptr(1 << 1) +var ( + // flagRO indicates whether the value field of a reflect.Value + // is read-only. + flagRO flag + + // flagAddr indicates whether the address of the reflect.Value's + // value may be taken. + flagAddr flag ) -func init() { - // Older versions of reflect.Value stored small integers directly in the - // ptr field (which is named val in the older versions). Versions - // between commits ecccf07e7f9d and 82f48826c6c7 added a new field named - // scalar for this purpose which unfortunately came before the flag - // field, so the offset of the flag field is different for those - // versions. - // - // This code constructs a new reflect.Value from a known small integer - // and checks if the size of the reflect.Value struct indicates it has - // the scalar field. When it does, the offsets are updated accordingly. - vv := reflect.ValueOf(0xf00) - if unsafe.Sizeof(vv) == (ptrSize * 4) { - offsetScalar = ptrSize * 2 - offsetFlag = ptrSize * 3 - } +// flagKindMask holds the bits that make up the kind +// part of the flags field. In all the supported versions, +// it is in the lower 5 bits. +const flagKindMask = flag(0x1f) - // Commit 90a7c3c86944 changed the flag positions such that the low - // order bits are the kind. This code extracts the kind from the flags - // field and ensures it's the correct type. When it's not, the flag - // order has been changed to the newer format, so the flags are updated - // accordingly. - upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag) - upfv := *(*uintptr)(upf) - flagKindMask := uintptr((1<>flagKindShift != uintptr(reflect.Int) { - flagKindShift = 0 - flagRO = 1 << 5 - flagIndir = 1 << 6 +// Different versions of Go have used different +// bit layouts for the flags type. This table +// records the known combinations. +var okFlags = []struct { + ro, addr flag +}{{ + // From Go 1.4 to 1.5 + ro: 1 << 5, + addr: 1 << 7, +}, { + // Up to Go tip. + ro: 1<<5 | 1<<6, + addr: 1 << 8, +}} - // Commit adf9b30e5594 modified the flags to separate the - // flagRO flag into two bits which specifies whether or not the - // field is embedded. This causes flagIndir to move over a bit - // and means that flagRO is the combination of either of the - // original flagRO bit and the new bit. - // - // This code detects the change by extracting what used to be - // the indirect bit to ensure it's set. When it's not, the flag - // order has been changed to the newer format, so the flags are - // updated accordingly. - if upfv&flagIndir == 0 { - flagRO = 3 << 5 - flagIndir = 1 << 7 - } +var flagValOffset = func() uintptr { + field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") + if !ok { + panic("reflect.Value has no flag field") } + return field.Offset +}() + +// flagField returns a pointer to the flag field of a reflect.Value. +func flagField(v *reflect.Value) *flag { + return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset)) } // unsafeReflectValue converts the passed reflect.Value into a one that bypasses @@ -119,34 +90,56 @@ func init() { // This allows us to check for implementations of the Stringer and error // interfaces to be used for pretty printing ordinarily unaddressable and // inaccessible values such as unexported struct fields. -func unsafeReflectValue(v reflect.Value) (rv reflect.Value) { - indirects := 1 - vt := v.Type() - upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr) - rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag)) - if rvf&flagIndir != 0 { - vt = reflect.PtrTo(v.Type()) - indirects++ - } else if offsetScalar != 0 { - // The value is in the scalar field when it's not one of the - // reference types. - switch vt.Kind() { - case reflect.Uintptr: - case reflect.Chan: - case reflect.Func: - case reflect.Map: - case reflect.Ptr: - case reflect.UnsafePointer: - default: - upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + - offsetScalar) +func unsafeReflectValue(v reflect.Value) reflect.Value { + if !v.IsValid() || (v.CanInterface() && v.CanAddr()) { + return v + } + flagFieldPtr := flagField(&v) + *flagFieldPtr &^= flagRO + *flagFieldPtr |= flagAddr + return v +} + +// Sanity checks against future reflect package changes +// to the type or semantics of the Value.flag field. +func init() { + field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") + if !ok { + panic("reflect.Value has no flag field") + } + if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() { + panic("reflect.Value flag field has changed kind") + } + type t0 int + var t struct { + A t0 + // t0 will have flagEmbedRO set. + t0 + // a will have flagStickyRO set + a t0 + } + vA := reflect.ValueOf(t).FieldByName("A") + va := reflect.ValueOf(t).FieldByName("a") + vt0 := reflect.ValueOf(t).FieldByName("t0") + + // Infer flagRO from the difference between the flags + // for the (otherwise identical) fields in t. + flagPublic := *flagField(&vA) + flagWithRO := *flagField(&va) | *flagField(&vt0) + flagRO = flagPublic ^ flagWithRO + + // Infer flagAddr from the difference between a value + // taken from a pointer and not. + vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A") + flagNoPtr := *flagField(&vA) + flagPtr := *flagField(&vPtrA) + flagAddr = flagNoPtr ^ flagPtr + + // Check that the inferred flags tally with one of the known versions. + for _, f := range okFlags { + if flagRO == f.ro && flagAddr == f.addr { + return } } - - pv := reflect.NewAt(vt, upv) - rv = pv - for i := 0; i < indirects; i++ { - rv = rv.Elem() - } - return rv + panic("reflect.Value read-only flag has changed semantics") } diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go index e47a4e79..205c28d6 100644 --- a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go +++ b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go @@ -1,4 +1,4 @@ -// Copyright (c) 2015 Dave Collins +// Copyright (c) 2015-2016 Dave Collins // // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above @@ -16,7 +16,7 @@ // when the code is running on Google App Engine, compiled by GopherJS, or // "-tags safe" is added to the go build command line. The "disableunsafe" // tag is deprecated and thus should not be used. -// +build js appengine safe disableunsafe +// +build js appengine safe disableunsafe !go1.4 package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go index 14f02dc1..1be8ce94 100644 --- a/vendor/github.com/davecgh/go-spew/spew/common.go +++ b/vendor/github.com/davecgh/go-spew/spew/common.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -180,7 +180,7 @@ func printComplex(w io.Writer, c complex128, floatPrecision int) { w.Write(closeParenBytes) } -// printHexPtr outputs a uintptr formatted as hexidecimal with a leading '0x' +// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x' // prefix to Writer w. func printHexPtr(w io.Writer, p uintptr) { // Null pointer. diff --git a/vendor/github.com/davecgh/go-spew/spew/common_test.go b/vendor/github.com/davecgh/go-spew/spew/common_test.go index 39b7525b..0f5ce47d 100644 --- a/vendor/github.com/davecgh/go-spew/spew/common_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/common_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go index 55528272..2e3d22f3 100644 --- a/vendor/github.com/davecgh/go-spew/spew/config.go +++ b/vendor/github.com/davecgh/go-spew/spew/config.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -67,6 +67,15 @@ type ConfigState struct { // Google App Engine or with the "safe" build tag specified. DisablePointerMethods bool + // DisablePointerAddresses specifies whether to disable the printing of + // pointer addresses. This is useful when diffing data structures in tests. + DisablePointerAddresses bool + + // DisableCapacities specifies whether to disable the printing of capacities + // for arrays, slices, maps and channels. This is useful when diffing + // data structures in tests. + DisableCapacities bool + // ContinueOnMethod specifies whether or not recursion should continue once // a custom error or Stringer interface is invoked. The default, false, // means it will print the results of invoking the custom error or Stringer diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go index 5be0c406..aacaac6f 100644 --- a/vendor/github.com/davecgh/go-spew/spew/doc.go +++ b/vendor/github.com/davecgh/go-spew/spew/doc.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -91,6 +91,15 @@ The following configuration options are available: which only accept pointer receivers from non-pointer variables. Pointer method invocation is enabled by default. + * DisablePointerAddresses + DisablePointerAddresses specifies whether to disable the printing of + pointer addresses. This is useful when diffing data structures in tests. + + * DisableCapacities + DisableCapacities specifies whether to disable the printing of + capacities for arrays, slices, maps and channels. This is useful when + diffing data structures in tests. + * ContinueOnMethod Enables recursion into types after invoking error and Stringer interface methods. Recursion after method invocation is disabled by default. diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go index a0ff95e2..f78d89fc 100644 --- a/vendor/github.com/davecgh/go-spew/spew/dump.go +++ b/vendor/github.com/davecgh/go-spew/spew/dump.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -35,16 +35,16 @@ var ( // cCharRE is a regular expression that matches a cgo char. // It is used to detect character arrays to hexdump them. - cCharRE = regexp.MustCompile("^.*\\._Ctype_char$") + cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`) // cUnsignedCharRE is a regular expression that matches a cgo unsigned // char. It is used to detect unsigned character arrays to hexdump // them. - cUnsignedCharRE = regexp.MustCompile("^.*\\._Ctype_unsignedchar$") + cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`) // cUint8tCharRE is a regular expression that matches a cgo uint8_t. // It is used to detect uint8_t arrays to hexdump them. - cUint8tCharRE = regexp.MustCompile("^.*\\._Ctype_uint8_t$") + cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`) ) // dumpState contains information about the state of a dump operation. @@ -129,7 +129,7 @@ func (d *dumpState) dumpPtr(v reflect.Value) { d.w.Write(closeParenBytes) // Display pointer information. - if len(pointerChain) > 0 { + if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 { d.w.Write(openParenBytes) for i, addr := range pointerChain { if i > 0 { @@ -143,10 +143,10 @@ func (d *dumpState) dumpPtr(v reflect.Value) { // Display dereferenced value. d.w.Write(openParenBytes) switch { - case nilFound == true: + case nilFound: d.w.Write(nilAngleBytes) - case cycleFound == true: + case cycleFound: d.w.Write(circularBytes) default: @@ -282,13 +282,13 @@ func (d *dumpState) dump(v reflect.Value) { case reflect.Map, reflect.String: valueLen = v.Len() } - if valueLen != 0 || valueCap != 0 { + if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 { d.w.Write(openParenBytes) if valueLen != 0 { d.w.Write(lenEqualsBytes) printInt(d.w, int64(valueLen), 10) } - if valueCap != 0 { + if !d.cs.DisableCapacities && valueCap != 0 { if valueLen != 0 { d.w.Write(spaceBytes) } diff --git a/vendor/github.com/davecgh/go-spew/spew/dump_test.go b/vendor/github.com/davecgh/go-spew/spew/dump_test.go index 2b320401..4a31a2ee 100644 --- a/vendor/github.com/davecgh/go-spew/spew/dump_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/dump_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -70,7 +70,7 @@ import ( "github.com/davecgh/go-spew/spew" ) -// dumpTest is used to describe a test to be perfomed against the Dump method. +// dumpTest is used to describe a test to be performed against the Dump method. type dumpTest struct { in interface{} wants []string @@ -768,7 +768,7 @@ func addUintptrDumpTests() { func addUnsafePointerDumpTests() { // Null pointer. - v := unsafe.Pointer(uintptr(0)) + v := unsafe.Pointer(nil) nv := (*unsafe.Pointer)(nil) pv := &v vAddr := fmt.Sprintf("%p", pv) diff --git a/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go b/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go index ed3e3c31..108baa55 100644 --- a/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go @@ -1,4 +1,4 @@ -// Copyright (c) 2013 Dave Collins +// Copyright (c) 2013-2016 Dave Collins // // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above @@ -82,18 +82,20 @@ func addCgoDumpTests() { v5Len := fmt.Sprintf("%d", v5l) v5Cap := fmt.Sprintf("%d", v5c) v5t := "[6]testdata._Ctype_uint8_t" + v5t2 := "[6]testdata._Ctype_uchar" v5s := "(len=" + v5Len + " cap=" + v5Cap + ") " + "{\n 00000000 74 65 73 74 35 00 " + " |test5.|\n}" - addDumpTest(v5, "("+v5t+") "+v5s+"\n") + addDumpTest(v5, "("+v5t+") "+v5s+"\n", "("+v5t2+") "+v5s+"\n") // C typedefed unsigned char array. v6, v6l, v6c := testdata.GetCgoTypdefedUnsignedCharArray() v6Len := fmt.Sprintf("%d", v6l) v6Cap := fmt.Sprintf("%d", v6c) v6t := "[6]testdata._Ctype_custom_uchar_t" + v6t2 := "[6]testdata._Ctype_uchar" v6s := "(len=" + v6Len + " cap=" + v6Cap + ") " + "{\n 00000000 74 65 73 74 36 00 " + " |test6.|\n}" - addDumpTest(v6, "("+v6t+") "+v6s+"\n") + addDumpTest(v6, "("+v6t+") "+v6s+"\n", "("+v6t2+") "+v6s+"\n") } diff --git a/vendor/github.com/davecgh/go-spew/spew/example_test.go b/vendor/github.com/davecgh/go-spew/spew/example_test.go index de6c4e30..c6ec8c6d 100644 --- a/vendor/github.com/davecgh/go-spew/spew/example_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/example_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go index ecf3b80e..b04edb7d 100644 --- a/vendor/github.com/davecgh/go-spew/spew/format.go +++ b/vendor/github.com/davecgh/go-spew/spew/format.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -182,10 +182,10 @@ func (f *formatState) formatPtr(v reflect.Value) { // Display dereferenced value. switch { - case nilFound == true: + case nilFound: f.fs.Write(nilAngleBytes) - case cycleFound == true: + case cycleFound: f.fs.Write(circularShortBytes) default: diff --git a/vendor/github.com/davecgh/go-spew/spew/format_test.go b/vendor/github.com/davecgh/go-spew/spew/format_test.go index b664b3f1..87ee9651 100644 --- a/vendor/github.com/davecgh/go-spew/spew/format_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/format_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -75,7 +75,7 @@ import ( "github.com/davecgh/go-spew/spew" ) -// formatterTest is used to describe a test to be perfomed against NewFormatter. +// formatterTest is used to describe a test to be performed against NewFormatter. type formatterTest struct { format string in interface{} @@ -1083,7 +1083,7 @@ func addUintptrFormatterTests() { func addUnsafePointerFormatterTests() { // Null pointer. - v := unsafe.Pointer(uintptr(0)) + v := unsafe.Pointer(nil) nv := (*unsafe.Pointer)(nil) pv := &v vAddr := fmt.Sprintf("%p", pv) @@ -1536,14 +1536,14 @@ func TestPrintSortedKeys(t *testing.T) { t.Errorf("Sorted keys mismatch 3:\n %v %v", s, expected) } - s = cfg.Sprint(map[testStruct]int{testStruct{1}: 1, testStruct{3}: 3, testStruct{2}: 2}) + s = cfg.Sprint(map[testStruct]int{{1}: 1, {3}: 3, {2}: 2}) expected = "map[ts.1:1 ts.2:2 ts.3:3]" if s != expected { t.Errorf("Sorted keys mismatch 4:\n %v %v", s, expected) } if !spew.UnsafeDisabled { - s = cfg.Sprint(map[testStructP]int{testStructP{1}: 1, testStructP{3}: 3, testStructP{2}: 2}) + s = cfg.Sprint(map[testStructP]int{{1}: 1, {3}: 3, {2}: 2}) expected = "map[ts.1:1 ts.2:2 ts.3:3]" if s != expected { t.Errorf("Sorted keys mismatch 5:\n %v %v", s, expected) diff --git a/vendor/github.com/davecgh/go-spew/spew/internal_test.go b/vendor/github.com/davecgh/go-spew/spew/internal_test.go index 1069ee21..e312b4fa 100644 --- a/vendor/github.com/davecgh/go-spew/spew/internal_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/internal_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -36,10 +36,7 @@ type dummyFmtState struct { } func (dfs *dummyFmtState) Flag(f int) bool { - if f == int('+') { - return true - } - return false + return f == int('+') } func (dfs *dummyFmtState) Precision() (int, bool) { diff --git a/vendor/github.com/davecgh/go-spew/spew/internalunsafe_test.go b/vendor/github.com/davecgh/go-spew/spew/internalunsafe_test.go index 863b62cf..80dc2217 100644 --- a/vendor/github.com/davecgh/go-spew/spew/internalunsafe_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/internalunsafe_test.go @@ -1,4 +1,4 @@ -// Copyright (c) 2013-2015 Dave Collins +// Copyright (c) 2013-2016 Dave Collins // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above @@ -16,7 +16,7 @@ // when the code is not running on Google App Engine, compiled by GopherJS, and // "-tags safe" is not added to the go build command line. The "disableunsafe" // tag is deprecated and thus should not be used. -// +build !js,!appengine,!safe,!disableunsafe +// +build !js,!appengine,!safe,!disableunsafe,go1.4 /* This test file is part of the spew package rather than than the spew_test @@ -30,7 +30,6 @@ import ( "bytes" "reflect" "testing" - "unsafe" ) // changeKind uses unsafe to intentionally change the kind of a reflect.Value to @@ -38,13 +37,13 @@ import ( // fallback code which punts to the standard fmt library for new types that // might get added to the language. func changeKind(v *reflect.Value, readOnly bool) { - rvf := (*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + offsetFlag)) - *rvf = *rvf | ((1< + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above diff --git a/vendor/github.com/davecgh/go-spew/spew/spew_test.go b/vendor/github.com/davecgh/go-spew/spew/spew_test.go index dbbc0856..b70466c6 100644 --- a/vendor/github.com/davecgh/go-spew/spew/spew_test.go +++ b/vendor/github.com/davecgh/go-spew/spew/spew_test.go @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 Dave Collins + * Copyright (c) 2013-2016 Dave Collins * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above @@ -130,12 +130,19 @@ func initSpewTests() { scsNoPmethods := &spew.ConfigState{Indent: " ", DisablePointerMethods: true} scsMaxDepth := &spew.ConfigState{Indent: " ", MaxDepth: 1} scsContinue := &spew.ConfigState{Indent: " ", ContinueOnMethod: true} + scsNoPtrAddr := &spew.ConfigState{DisablePointerAddresses: true} + scsNoCap := &spew.ConfigState{DisableCapacities: true} // Variables for tests on types which implement Stringer interface with and // without a pointer receiver. ts := stringer("test") tps := pstringer("test") + type ptrTester struct { + s *struct{} + } + tptr := &ptrTester{s: &struct{}{}} + // depthTester is used to test max depth handling for structs, array, slices // and maps. type depthTester struct { @@ -192,6 +199,10 @@ func initSpewTests() { {scsContinue, fCSFprint, "", te, "(error: 10) 10"}, {scsContinue, fCSFdump, "", te, "(spew_test.customError) " + "(error: 10) 10\n"}, + {scsNoPtrAddr, fCSFprint, "", tptr, "<*>{<*>{}}"}, + {scsNoPtrAddr, fCSSdump, "", tptr, "(*spew_test.ptrTester)({\ns: (*struct {})({\n})\n})\n"}, + {scsNoCap, fCSSdump, "", make([]string, 0, 10), "([]string) {\n}\n"}, + {scsNoCap, fCSSdump, "", make([]string, 1, 10), "([]string) (len=1) {\n(string) \"\"\n}\n"}, } } diff --git a/vendor/github.com/golang/protobuf/.gitignore b/vendor/github.com/golang/protobuf/.gitignore index 8f5b596b..c7dd4058 100644 --- a/vendor/github.com/golang/protobuf/.gitignore +++ b/vendor/github.com/golang/protobuf/.gitignore @@ -12,5 +12,6 @@ core _obj _test _testmain.go -protoc-gen-go/testdata/multi/*.pb.go -_conformance/_conformance + +# Conformance test output and transient files. +conformance/failing_tests.txt diff --git a/vendor/github.com/golang/protobuf/.travis.yml b/vendor/github.com/golang/protobuf/.travis.yml new file mode 100644 index 00000000..455fa660 --- /dev/null +++ b/vendor/github.com/golang/protobuf/.travis.yml @@ -0,0 +1,30 @@ +sudo: false +language: go +go: +- 1.6.x +- 1.10.x +- 1.x + +install: + - go get -v -d -t github.com/golang/protobuf/... + - curl -L https://github.com/google/protobuf/releases/download/v3.5.1/protoc-3.5.1-linux-x86_64.zip -o /tmp/protoc.zip + - unzip /tmp/protoc.zip -d "$HOME"/protoc + - mkdir -p "$HOME"/src && ln -s "$HOME"/protoc "$HOME"/src/protobuf + +env: + - PATH=$HOME/protoc/bin:$PATH + +script: + - make all + - make regenerate + # TODO(tamird): When https://github.com/travis-ci/gimme/pull/130 is + # released, make this look for "1.x". + - if [[ "$TRAVIS_GO_VERSION" == 1.10* ]]; then + if [[ "$(git status --porcelain 2>&1)" != "" ]]; then + git status >&2; + git diff -a >&2; + exit 1; + fi; + echo "git status is clean."; + fi; + - make test diff --git a/vendor/github.com/golang/protobuf/Make.protobuf b/vendor/github.com/golang/protobuf/Make.protobuf deleted file mode 100644 index 15071de1..00000000 --- a/vendor/github.com/golang/protobuf/Make.protobuf +++ /dev/null @@ -1,40 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Includable Makefile to add a rule for generating .pb.go files from .proto files -# (Google protocol buffer descriptions). -# Typical use if myproto.proto is a file in package mypackage in this directory: -# -# include $(GOROOT)/src/pkg/github.com/golang/protobuf/Make.protobuf - -%.pb.go: %.proto - protoc --go_out=. $< - diff --git a/vendor/github.com/golang/protobuf/Makefile b/vendor/github.com/golang/protobuf/Makefile index a1421d8b..2bc2621a 100644 --- a/vendor/github.com/golang/protobuf/Makefile +++ b/vendor/github.com/golang/protobuf/Makefile @@ -29,16 +29,14 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - all: install install: - go install ./proto ./jsonpb ./ptypes - go install ./protoc-gen-go + go install ./proto ./jsonpb ./ptypes ./protoc-gen-go test: - go test ./proto ./jsonpb ./ptypes - make -C protoc-gen-go/testdata test + go test ./... ./protoc-gen-go/testdata + make -C conformance test clean: go clean ./... @@ -47,9 +45,4 @@ nuke: go clean -i ./... regenerate: - make -C protoc-gen-go/descriptor regenerate - make -C protoc-gen-go/plugin regenerate - make -C protoc-gen-go/testdata regenerate - make -C proto/testdata regenerate - make -C jsonpb/jsonpb_test_proto regenerate - make -C _conformance regenerate + ./regenerate.sh diff --git a/vendor/github.com/golang/protobuf/README.md b/vendor/github.com/golang/protobuf/README.md index e560b732..01b29daf 100644 --- a/vendor/github.com/golang/protobuf/README.md +++ b/vendor/github.com/golang/protobuf/README.md @@ -1,10 +1,13 @@ # Go support for Protocol Buffers +[![Build Status](https://travis-ci.org/golang/protobuf.svg?branch=master)](https://travis-ci.org/golang/protobuf) +[![GoDoc](https://godoc.org/github.com/golang/protobuf?status.svg)](https://godoc.org/github.com/golang/protobuf) + Google's data interchange format. Copyright 2010 The Go Authors. https://github.com/golang/protobuf -This package and the code it generates requires at least Go 1.4. +This package and the code it generates requires at least Go 1.6. This software implements Go bindings for protocol buffers. For information about protocol buffers themselves, see @@ -53,13 +56,49 @@ parameter set to the directory you want to output the Go code to. The generated files will be suffixed .pb.go. See the Test code below for an example using such a file. +## Packages and input paths ## + +The protocol buffer language has a concept of "packages" which does not +correspond well to the Go notion of packages. In generated Go code, +each source `.proto` file is associated with a single Go package. The +name and import path for this package is specified with the `go_package` +proto option: + + option go_package = "github.com/golang/protobuf/ptypes/any"; + +The protocol buffer compiler will attempt to derive a package name and +import path if a `go_package` option is not present, but it is +best to always specify one explicitly. + +There is a one-to-one relationship between source `.proto` files and +generated `.pb.go` files, but any number of `.pb.go` files may be +contained in the same Go package. + +The output name of a generated file is produced by replacing the +`.proto` suffix with `.pb.go` (e.g., `foo.proto` produces `foo.pb.go`). +However, the output directory is selected in one of two ways. Let +us say we have `inputs/x.proto` with a `go_package` option of +`github.com/golang/protobuf/p`. The corresponding output file may +be: + +- Relative to the import path: + + protoc --go_out=. inputs/x.proto + # writes ./github.com/golang/protobuf/p/x.pb.go + + (This can work well with `--go_out=$GOPATH`.) + +- Relative to the input file: + + protoc --go_out=paths=source_relative:. inputs/x.proto + # generate ./inputs/x.pb.go + +## Generated code ## The package comment for the proto library contains text describing the interface provided in Go for protocol buffers. Here is an edited version. -========== - The proto package converts data structures to and from the wire format of protocol buffers. It works in concert with the Go source code generated for .proto files by the protocol compiler. @@ -109,10 +148,11 @@ When the .proto file specifies `syntax="proto3"`, there are some differences: Consider file test.proto, containing ```proto + syntax = "proto2"; package example; - + enum FOO { X = 17; }; - + message Test { required string label = 1; optional int32 type = 2 [default=77]; @@ -166,22 +206,25 @@ To create and play with a Test object from the example package, To pass extra parameters to the plugin, use a comma-separated parameter list separated from the output directory by a colon: - protoc --go_out=plugins=grpc,import_path=mypackage:. *.proto - -- `import_prefix=xxx` - a prefix that is added onto the beginning of - all imports. Useful for things like generating protos in a - subdirectory, or regenerating vendored protobufs in-place. -- `import_path=foo/bar` - used as the package if no input files - declare `go_package`. If it contains slashes, everything up to the - rightmost slash is ignored. +- `paths=(import | source_relative)` - specifies how the paths of + generated files are structured. See the "Packages and imports paths" + section above. The default is `import`. - `plugins=plugin1+plugin2` - specifies the list of sub-plugins to load. The only plugin in this repo is `grpc`. - `Mfoo/bar.proto=quux/shme` - declares that foo/bar.proto is associated with Go package quux/shme. This is subject to the import_prefix parameter. +The following parameters are deprecated and should not be used: + +- `import_prefix=xxx` - a prefix that is added onto the beginning of + all imports. +- `import_path=foo/bar` - used as the package if no input files + declare `go_package`. If it contains slashes, everything up to the + rightmost slash is ignored. + ## gRPC Support ## If a proto file specifies RPC services, protoc-gen-go can be instructed to diff --git a/vendor/github.com/golang/protobuf/_conformance/Makefile b/vendor/github.com/golang/protobuf/conformance/Makefile similarity index 76% rename from vendor/github.com/golang/protobuf/_conformance/Makefile rename to vendor/github.com/golang/protobuf/conformance/Makefile index 89800e2d..b99e4ed6 100644 --- a/vendor/github.com/golang/protobuf/_conformance/Makefile +++ b/vendor/github.com/golang/protobuf/conformance/Makefile @@ -29,5 +29,21 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -regenerate: - protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers,Mgoogle/protobuf/field_mask.proto=google.golang.org/genproto/protobuf:. conformance_proto/conformance.proto +PROTOBUF_ROOT=$(HOME)/src/protobuf + +all: + @echo To run the tests in this directory, acquire the main protobuf + @echo distribution from: + @echo + @echo ' https://github.com/google/protobuf' + @echo + @echo Build the test runner with: + @echo + @echo ' cd conformance && make conformance-test-runner' + @echo + @echo And run the tests in this directory with: + @echo + @echo ' make test PROTOBUF_ROOT=' + +test: + ./test.sh $(PROTOBUF_ROOT) diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance.go b/vendor/github.com/golang/protobuf/conformance/conformance.go similarity index 94% rename from vendor/github.com/golang/protobuf/_conformance/conformance.go rename to vendor/github.com/golang/protobuf/conformance/conformance.go index c54212c8..3029312a 100644 --- a/vendor/github.com/golang/protobuf/_conformance/conformance.go +++ b/vendor/github.com/golang/protobuf/conformance/conformance.go @@ -39,7 +39,7 @@ import ( "io" "os" - pb "github.com/golang/protobuf/_conformance/conformance_proto" + pb "github.com/golang/protobuf/conformance/internal/conformance_proto" "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" ) @@ -101,13 +101,6 @@ func handle(req *pb.ConformanceRequest) *pb.ConformanceResponse { err = proto.Unmarshal(p.ProtobufPayload, &msg) case *pb.ConformanceRequest_JsonPayload: err = jsonpb.UnmarshalString(p.JsonPayload, &msg) - if err != nil && err.Error() == "unmarshaling Any not supported yet" { - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_Skipped{ - Skipped: err.Error(), - }, - } - } default: return &pb.ConformanceResponse{ Result: &pb.ConformanceResponse_RuntimeError{ diff --git a/vendor/github.com/golang/protobuf/conformance/conformance.sh b/vendor/github.com/golang/protobuf/conformance/conformance.sh new file mode 100755 index 00000000..8532f571 --- /dev/null +++ b/vendor/github.com/golang/protobuf/conformance/conformance.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +cd $(dirname $0) +exec go run conformance.go $* diff --git a/vendor/github.com/golang/protobuf/conformance/failure_list_go.txt b/vendor/github.com/golang/protobuf/conformance/failure_list_go.txt new file mode 100644 index 00000000..d3728089 --- /dev/null +++ b/vendor/github.com/golang/protobuf/conformance/failure_list_go.txt @@ -0,0 +1,61 @@ +# This is the list of conformance tests that are known ot fail right now. +# TODO: These should be fixed. + +DurationProtoInputTooLarge.JsonOutput +DurationProtoInputTooSmall.JsonOutput +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.AnyWithFieldMask.JsonOutput +JsonInput.AnyWithFieldMask.ProtobufOutput +JsonInput.DoubleFieldQuotedValue.JsonOutput +JsonInput.DoubleFieldQuotedValue.ProtobufOutput +JsonInput.DurationHas3FractionalDigits.Validator +JsonInput.DurationHas6FractionalDigits.Validator +JsonInput.DurationHas9FractionalDigits.Validator +JsonInput.DurationHasZeroFractionalDigit.Validator +JsonInput.DurationMaxValue.JsonOutput +JsonInput.DurationMaxValue.ProtobufOutput +JsonInput.DurationMinValue.JsonOutput +JsonInput.DurationMinValue.ProtobufOutput +JsonInput.EnumFieldUnknownValue.Validator +JsonInput.FieldMask.JsonOutput +JsonInput.FieldMask.ProtobufOutput +JsonInput.FieldNameInLowerCamelCase.Validator +JsonInput.FieldNameWithMixedCases.JsonOutput +JsonInput.FieldNameWithMixedCases.ProtobufOutput +JsonInput.FieldNameWithMixedCases.Validator +JsonInput.FieldNameWithNumbers.Validator +JsonInput.FloatFieldQuotedValue.JsonOutput +JsonInput.FloatFieldQuotedValue.ProtobufOutput +JsonInput.Int32FieldExponentialFormat.JsonOutput +JsonInput.Int32FieldExponentialFormat.ProtobufOutput +JsonInput.Int32FieldFloatTrailingZero.JsonOutput +JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput +JsonInput.Int32FieldMaxFloatValue.JsonOutput +JsonInput.Int32FieldMaxFloatValue.ProtobufOutput +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinFloatValue.ProtobufOutput +JsonInput.Int32FieldStringValue.JsonOutput +JsonInput.Int32FieldStringValue.ProtobufOutput +JsonInput.Int32FieldStringValueEscaped.JsonOutput +JsonInput.Int32FieldStringValueEscaped.ProtobufOutput +JsonInput.Int64FieldBeString.Validator +JsonInput.MapFieldValueIsNull +JsonInput.OneofFieldDuplicate +JsonInput.RepeatedFieldMessageElementIsNull +JsonInput.RepeatedFieldPrimitiveElementIsNull +JsonInput.StringFieldSurrogateInWrongOrder +JsonInput.StringFieldUnpairedHighSurrogate +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.TimestampHas3FractionalDigits.Validator +JsonInput.TimestampHas6FractionalDigits.Validator +JsonInput.TimestampHas9FractionalDigits.Validator +JsonInput.TimestampHasZeroFractionalDigit.Validator +JsonInput.TimestampJsonInputTooSmall +JsonInput.TimestampZeroNormalized.Validator +JsonInput.Uint32FieldMaxFloatValue.JsonOutput +JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput +JsonInput.Uint64FieldBeString.Validator +TimestampProtoInputTooLarge.JsonOutput +TimestampProtoInputTooSmall.JsonOutput diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go b/vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.pb.go similarity index 55% rename from vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go rename to vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.pb.go index ec354ead..82d45412 100644 --- a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go +++ b/vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.pb.go @@ -1,29 +1,17 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: conformance_proto/conformance.proto +// source: conformance.proto -/* -Package conformance is a generated protocol buffer package. - -It is generated from these files: - conformance_proto/conformance.proto - -It has these top-level messages: - ConformanceRequest - ConformanceResponse - TestAllTypes - ForeignMessage -*/ package conformance import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" -import google_protobuf2 "google.golang.org/genproto/protobuf" -import google_protobuf3 "github.com/golang/protobuf/ptypes/struct" -import google_protobuf4 "github.com/golang/protobuf/ptypes/timestamp" -import google_protobuf5 "github.com/golang/protobuf/ptypes/wrappers" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import field_mask "google.golang.org/genproto/protobuf/field_mask" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -58,7 +46,9 @@ var WireFormat_value = map[string]int32{ func (x WireFormat) String() string { return proto.EnumName(WireFormat_name, int32(x)) } -func (WireFormat) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (WireFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{0} +} type ForeignEnum int32 @@ -82,7 +72,9 @@ var ForeignEnum_value = map[string]int32{ func (x ForeignEnum) String() string { return proto.EnumName(ForeignEnum_name, int32(x)) } -func (ForeignEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (ForeignEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{1} +} type TestAllTypes_NestedEnum int32 @@ -109,7 +101,9 @@ var TestAllTypes_NestedEnum_value = map[string]int32{ func (x TestAllTypes_NestedEnum) String() string { return proto.EnumName(TestAllTypes_NestedEnum_name, int32(x)) } -func (TestAllTypes_NestedEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } +func (TestAllTypes_NestedEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{2, 0} +} // Represents a single test case's input. The testee should: // @@ -126,12 +120,34 @@ type ConformanceRequest struct { Payload isConformanceRequest_Payload `protobuf_oneof:"payload"` // Which format should the testee serialize its message to? RequestedOutputFormat WireFormat `protobuf:"varint,3,opt,name=requested_output_format,json=requestedOutputFormat,enum=conformance.WireFormat" json:"requested_output_format,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ConformanceRequest) Reset() { *m = ConformanceRequest{} } -func (m *ConformanceRequest) String() string { return proto.CompactTextString(m) } -func (*ConformanceRequest) ProtoMessage() {} -func (*ConformanceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (m *ConformanceRequest) Reset() { *m = ConformanceRequest{} } +func (m *ConformanceRequest) String() string { return proto.CompactTextString(m) } +func (*ConformanceRequest) ProtoMessage() {} +func (*ConformanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{0} +} +func (m *ConformanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConformanceRequest.Unmarshal(m, b) +} +func (m *ConformanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConformanceRequest.Marshal(b, m, deterministic) +} +func (dst *ConformanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConformanceRequest.Merge(dst, src) +} +func (m *ConformanceRequest) XXX_Size() int { + return xxx_messageInfo_ConformanceRequest.Size(m) +} +func (m *ConformanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ConformanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ConformanceRequest proto.InternalMessageInfo type isConformanceRequest_Payload interface { isConformanceRequest_Payload() @@ -227,11 +243,11 @@ func _ConformanceRequest_OneofSizer(msg proto.Message) (n int) { // payload switch x := m.Payload.(type) { case *ConformanceRequest_ProtobufPayload: - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) n += len(x.ProtobufPayload) case *ConformanceRequest_JsonPayload: - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.JsonPayload))) n += len(x.JsonPayload) case nil: @@ -250,13 +266,35 @@ type ConformanceResponse struct { // *ConformanceResponse_ProtobufPayload // *ConformanceResponse_JsonPayload // *ConformanceResponse_Skipped - Result isConformanceResponse_Result `protobuf_oneof:"result"` + Result isConformanceResponse_Result `protobuf_oneof:"result"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ConformanceResponse) Reset() { *m = ConformanceResponse{} } -func (m *ConformanceResponse) String() string { return proto.CompactTextString(m) } -func (*ConformanceResponse) ProtoMessage() {} -func (*ConformanceResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *ConformanceResponse) Reset() { *m = ConformanceResponse{} } +func (m *ConformanceResponse) String() string { return proto.CompactTextString(m) } +func (*ConformanceResponse) ProtoMessage() {} +func (*ConformanceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{1} +} +func (m *ConformanceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConformanceResponse.Unmarshal(m, b) +} +func (m *ConformanceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConformanceResponse.Marshal(b, m, deterministic) +} +func (dst *ConformanceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConformanceResponse.Merge(dst, src) +} +func (m *ConformanceResponse) XXX_Size() int { + return xxx_messageInfo_ConformanceResponse.Size(m) +} +func (m *ConformanceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ConformanceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ConformanceResponse proto.InternalMessageInfo type isConformanceResponse_Result interface { isConformanceResponse_Result() @@ -433,27 +471,27 @@ func _ConformanceResponse_OneofSizer(msg proto.Message) (n int) { // result switch x := m.Result.(type) { case *ConformanceResponse_ParseError: - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.ParseError))) n += len(x.ParseError) case *ConformanceResponse_SerializeError: - n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.SerializeError))) n += len(x.SerializeError) case *ConformanceResponse_RuntimeError: - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.RuntimeError))) n += len(x.RuntimeError) case *ConformanceResponse_ProtobufPayload: - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) n += len(x.ProtobufPayload) case *ConformanceResponse_JsonPayload: - n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.JsonPayload))) n += len(x.JsonPayload) case *ConformanceResponse_Skipped: - n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Skipped))) n += len(x.Skipped) case nil: @@ -536,69 +574,79 @@ type TestAllTypes struct { // *TestAllTypes_OneofNestedMessage // *TestAllTypes_OneofString // *TestAllTypes_OneofBytes - // *TestAllTypes_OneofBool - // *TestAllTypes_OneofUint64 - // *TestAllTypes_OneofFloat - // *TestAllTypes_OneofDouble - // *TestAllTypes_OneofEnum OneofField isTestAllTypes_OneofField `protobuf_oneof:"oneof_field"` // Well-known types - OptionalBoolWrapper *google_protobuf5.BoolValue `protobuf:"bytes,201,opt,name=optional_bool_wrapper,json=optionalBoolWrapper" json:"optional_bool_wrapper,omitempty"` - OptionalInt32Wrapper *google_protobuf5.Int32Value `protobuf:"bytes,202,opt,name=optional_int32_wrapper,json=optionalInt32Wrapper" json:"optional_int32_wrapper,omitempty"` - OptionalInt64Wrapper *google_protobuf5.Int64Value `protobuf:"bytes,203,opt,name=optional_int64_wrapper,json=optionalInt64Wrapper" json:"optional_int64_wrapper,omitempty"` - OptionalUint32Wrapper *google_protobuf5.UInt32Value `protobuf:"bytes,204,opt,name=optional_uint32_wrapper,json=optionalUint32Wrapper" json:"optional_uint32_wrapper,omitempty"` - OptionalUint64Wrapper *google_protobuf5.UInt64Value `protobuf:"bytes,205,opt,name=optional_uint64_wrapper,json=optionalUint64Wrapper" json:"optional_uint64_wrapper,omitempty"` - OptionalFloatWrapper *google_protobuf5.FloatValue `protobuf:"bytes,206,opt,name=optional_float_wrapper,json=optionalFloatWrapper" json:"optional_float_wrapper,omitempty"` - OptionalDoubleWrapper *google_protobuf5.DoubleValue `protobuf:"bytes,207,opt,name=optional_double_wrapper,json=optionalDoubleWrapper" json:"optional_double_wrapper,omitempty"` - OptionalStringWrapper *google_protobuf5.StringValue `protobuf:"bytes,208,opt,name=optional_string_wrapper,json=optionalStringWrapper" json:"optional_string_wrapper,omitempty"` - OptionalBytesWrapper *google_protobuf5.BytesValue `protobuf:"bytes,209,opt,name=optional_bytes_wrapper,json=optionalBytesWrapper" json:"optional_bytes_wrapper,omitempty"` - RepeatedBoolWrapper []*google_protobuf5.BoolValue `protobuf:"bytes,211,rep,name=repeated_bool_wrapper,json=repeatedBoolWrapper" json:"repeated_bool_wrapper,omitempty"` - RepeatedInt32Wrapper []*google_protobuf5.Int32Value `protobuf:"bytes,212,rep,name=repeated_int32_wrapper,json=repeatedInt32Wrapper" json:"repeated_int32_wrapper,omitempty"` - RepeatedInt64Wrapper []*google_protobuf5.Int64Value `protobuf:"bytes,213,rep,name=repeated_int64_wrapper,json=repeatedInt64Wrapper" json:"repeated_int64_wrapper,omitempty"` - RepeatedUint32Wrapper []*google_protobuf5.UInt32Value `protobuf:"bytes,214,rep,name=repeated_uint32_wrapper,json=repeatedUint32Wrapper" json:"repeated_uint32_wrapper,omitempty"` - RepeatedUint64Wrapper []*google_protobuf5.UInt64Value `protobuf:"bytes,215,rep,name=repeated_uint64_wrapper,json=repeatedUint64Wrapper" json:"repeated_uint64_wrapper,omitempty"` - RepeatedFloatWrapper []*google_protobuf5.FloatValue `protobuf:"bytes,216,rep,name=repeated_float_wrapper,json=repeatedFloatWrapper" json:"repeated_float_wrapper,omitempty"` - RepeatedDoubleWrapper []*google_protobuf5.DoubleValue `protobuf:"bytes,217,rep,name=repeated_double_wrapper,json=repeatedDoubleWrapper" json:"repeated_double_wrapper,omitempty"` - RepeatedStringWrapper []*google_protobuf5.StringValue `protobuf:"bytes,218,rep,name=repeated_string_wrapper,json=repeatedStringWrapper" json:"repeated_string_wrapper,omitempty"` - RepeatedBytesWrapper []*google_protobuf5.BytesValue `protobuf:"bytes,219,rep,name=repeated_bytes_wrapper,json=repeatedBytesWrapper" json:"repeated_bytes_wrapper,omitempty"` - OptionalDuration *google_protobuf1.Duration `protobuf:"bytes,301,opt,name=optional_duration,json=optionalDuration" json:"optional_duration,omitempty"` - OptionalTimestamp *google_protobuf4.Timestamp `protobuf:"bytes,302,opt,name=optional_timestamp,json=optionalTimestamp" json:"optional_timestamp,omitempty"` - OptionalFieldMask *google_protobuf2.FieldMask `protobuf:"bytes,303,opt,name=optional_field_mask,json=optionalFieldMask" json:"optional_field_mask,omitempty"` - OptionalStruct *google_protobuf3.Struct `protobuf:"bytes,304,opt,name=optional_struct,json=optionalStruct" json:"optional_struct,omitempty"` - OptionalAny *google_protobuf.Any `protobuf:"bytes,305,opt,name=optional_any,json=optionalAny" json:"optional_any,omitempty"` - OptionalValue *google_protobuf3.Value `protobuf:"bytes,306,opt,name=optional_value,json=optionalValue" json:"optional_value,omitempty"` - RepeatedDuration []*google_protobuf1.Duration `protobuf:"bytes,311,rep,name=repeated_duration,json=repeatedDuration" json:"repeated_duration,omitempty"` - RepeatedTimestamp []*google_protobuf4.Timestamp `protobuf:"bytes,312,rep,name=repeated_timestamp,json=repeatedTimestamp" json:"repeated_timestamp,omitempty"` - RepeatedFieldmask []*google_protobuf2.FieldMask `protobuf:"bytes,313,rep,name=repeated_fieldmask,json=repeatedFieldmask" json:"repeated_fieldmask,omitempty"` - RepeatedStruct []*google_protobuf3.Struct `protobuf:"bytes,324,rep,name=repeated_struct,json=repeatedStruct" json:"repeated_struct,omitempty"` - RepeatedAny []*google_protobuf.Any `protobuf:"bytes,315,rep,name=repeated_any,json=repeatedAny" json:"repeated_any,omitempty"` - RepeatedValue []*google_protobuf3.Value `protobuf:"bytes,316,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` + OptionalBoolWrapper *wrappers.BoolValue `protobuf:"bytes,201,opt,name=optional_bool_wrapper,json=optionalBoolWrapper" json:"optional_bool_wrapper,omitempty"` + OptionalInt32Wrapper *wrappers.Int32Value `protobuf:"bytes,202,opt,name=optional_int32_wrapper,json=optionalInt32Wrapper" json:"optional_int32_wrapper,omitempty"` + OptionalInt64Wrapper *wrappers.Int64Value `protobuf:"bytes,203,opt,name=optional_int64_wrapper,json=optionalInt64Wrapper" json:"optional_int64_wrapper,omitempty"` + OptionalUint32Wrapper *wrappers.UInt32Value `protobuf:"bytes,204,opt,name=optional_uint32_wrapper,json=optionalUint32Wrapper" json:"optional_uint32_wrapper,omitempty"` + OptionalUint64Wrapper *wrappers.UInt64Value `protobuf:"bytes,205,opt,name=optional_uint64_wrapper,json=optionalUint64Wrapper" json:"optional_uint64_wrapper,omitempty"` + OptionalFloatWrapper *wrappers.FloatValue `protobuf:"bytes,206,opt,name=optional_float_wrapper,json=optionalFloatWrapper" json:"optional_float_wrapper,omitempty"` + OptionalDoubleWrapper *wrappers.DoubleValue `protobuf:"bytes,207,opt,name=optional_double_wrapper,json=optionalDoubleWrapper" json:"optional_double_wrapper,omitempty"` + OptionalStringWrapper *wrappers.StringValue `protobuf:"bytes,208,opt,name=optional_string_wrapper,json=optionalStringWrapper" json:"optional_string_wrapper,omitempty"` + OptionalBytesWrapper *wrappers.BytesValue `protobuf:"bytes,209,opt,name=optional_bytes_wrapper,json=optionalBytesWrapper" json:"optional_bytes_wrapper,omitempty"` + RepeatedBoolWrapper []*wrappers.BoolValue `protobuf:"bytes,211,rep,name=repeated_bool_wrapper,json=repeatedBoolWrapper" json:"repeated_bool_wrapper,omitempty"` + RepeatedInt32Wrapper []*wrappers.Int32Value `protobuf:"bytes,212,rep,name=repeated_int32_wrapper,json=repeatedInt32Wrapper" json:"repeated_int32_wrapper,omitempty"` + RepeatedInt64Wrapper []*wrappers.Int64Value `protobuf:"bytes,213,rep,name=repeated_int64_wrapper,json=repeatedInt64Wrapper" json:"repeated_int64_wrapper,omitempty"` + RepeatedUint32Wrapper []*wrappers.UInt32Value `protobuf:"bytes,214,rep,name=repeated_uint32_wrapper,json=repeatedUint32Wrapper" json:"repeated_uint32_wrapper,omitempty"` + RepeatedUint64Wrapper []*wrappers.UInt64Value `protobuf:"bytes,215,rep,name=repeated_uint64_wrapper,json=repeatedUint64Wrapper" json:"repeated_uint64_wrapper,omitempty"` + RepeatedFloatWrapper []*wrappers.FloatValue `protobuf:"bytes,216,rep,name=repeated_float_wrapper,json=repeatedFloatWrapper" json:"repeated_float_wrapper,omitempty"` + RepeatedDoubleWrapper []*wrappers.DoubleValue `protobuf:"bytes,217,rep,name=repeated_double_wrapper,json=repeatedDoubleWrapper" json:"repeated_double_wrapper,omitempty"` + RepeatedStringWrapper []*wrappers.StringValue `protobuf:"bytes,218,rep,name=repeated_string_wrapper,json=repeatedStringWrapper" json:"repeated_string_wrapper,omitempty"` + RepeatedBytesWrapper []*wrappers.BytesValue `protobuf:"bytes,219,rep,name=repeated_bytes_wrapper,json=repeatedBytesWrapper" json:"repeated_bytes_wrapper,omitempty"` + OptionalDuration *duration.Duration `protobuf:"bytes,301,opt,name=optional_duration,json=optionalDuration" json:"optional_duration,omitempty"` + OptionalTimestamp *timestamp.Timestamp `protobuf:"bytes,302,opt,name=optional_timestamp,json=optionalTimestamp" json:"optional_timestamp,omitempty"` + OptionalFieldMask *field_mask.FieldMask `protobuf:"bytes,303,opt,name=optional_field_mask,json=optionalFieldMask" json:"optional_field_mask,omitempty"` + OptionalStruct *_struct.Struct `protobuf:"bytes,304,opt,name=optional_struct,json=optionalStruct" json:"optional_struct,omitempty"` + OptionalAny *any.Any `protobuf:"bytes,305,opt,name=optional_any,json=optionalAny" json:"optional_any,omitempty"` + OptionalValue *_struct.Value `protobuf:"bytes,306,opt,name=optional_value,json=optionalValue" json:"optional_value,omitempty"` + RepeatedDuration []*duration.Duration `protobuf:"bytes,311,rep,name=repeated_duration,json=repeatedDuration" json:"repeated_duration,omitempty"` + RepeatedTimestamp []*timestamp.Timestamp `protobuf:"bytes,312,rep,name=repeated_timestamp,json=repeatedTimestamp" json:"repeated_timestamp,omitempty"` + RepeatedFieldmask []*field_mask.FieldMask `protobuf:"bytes,313,rep,name=repeated_fieldmask,json=repeatedFieldmask" json:"repeated_fieldmask,omitempty"` + RepeatedStruct []*_struct.Struct `protobuf:"bytes,324,rep,name=repeated_struct,json=repeatedStruct" json:"repeated_struct,omitempty"` + RepeatedAny []*any.Any `protobuf:"bytes,315,rep,name=repeated_any,json=repeatedAny" json:"repeated_any,omitempty"` + RepeatedValue []*_struct.Value `protobuf:"bytes,316,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` // Test field-name-to-JSON-name convention. - // (protobuf says names can be any valid C/C++ identifier.) - Fieldname1 int32 `protobuf:"varint,401,opt,name=fieldname1" json:"fieldname1,omitempty"` - FieldName2 int32 `protobuf:"varint,402,opt,name=field_name2,json=fieldName2" json:"field_name2,omitempty"` - XFieldName3 int32 `protobuf:"varint,403,opt,name=_field_name3,json=FieldName3" json:"_field_name3,omitempty"` - Field_Name4_ int32 `protobuf:"varint,404,opt,name=field__name4_,json=fieldName4" json:"field__name4_,omitempty"` - Field0Name5 int32 `protobuf:"varint,405,opt,name=field0name5" json:"field0name5,omitempty"` - Field_0Name6 int32 `protobuf:"varint,406,opt,name=field_0_name6,json=field0Name6" json:"field_0_name6,omitempty"` - FieldName7 int32 `protobuf:"varint,407,opt,name=fieldName7" json:"fieldName7,omitempty"` - FieldName8 int32 `protobuf:"varint,408,opt,name=FieldName8" json:"FieldName8,omitempty"` - Field_Name9 int32 `protobuf:"varint,409,opt,name=field_Name9,json=fieldName9" json:"field_Name9,omitempty"` - Field_Name10 int32 `protobuf:"varint,410,opt,name=Field_Name10,json=FieldName10" json:"Field_Name10,omitempty"` - FIELD_NAME11 int32 `protobuf:"varint,411,opt,name=FIELD_NAME11,json=FIELDNAME11" json:"FIELD_NAME11,omitempty"` - FIELDName12 int32 `protobuf:"varint,412,opt,name=FIELD_name12,json=FIELDName12" json:"FIELD_name12,omitempty"` - XFieldName13 int32 `protobuf:"varint,413,opt,name=__field_name13,json=FieldName13" json:"__field_name13,omitempty"` - X_FieldName14 int32 `protobuf:"varint,414,opt,name=__Field_name14,json=FieldName14" json:"__Field_name14,omitempty"` - Field_Name15 int32 `protobuf:"varint,415,opt,name=field__name15,json=fieldName15" json:"field__name15,omitempty"` - Field__Name16 int32 `protobuf:"varint,416,opt,name=field__Name16,json=fieldName16" json:"field__Name16,omitempty"` - FieldName17__ int32 `protobuf:"varint,417,opt,name=field_name17__,json=fieldName17" json:"field_name17__,omitempty"` - FieldName18__ int32 `protobuf:"varint,418,opt,name=Field_name18__,json=FieldName18" json:"Field_name18__,omitempty"` + Fieldname1 int32 `protobuf:"varint,401,opt,name=fieldname1" json:"fieldname1,omitempty"` + FieldName2 int32 `protobuf:"varint,402,opt,name=field_name2,json=fieldName2" json:"field_name2,omitempty"` + XFieldName3 int32 `protobuf:"varint,403,opt,name=_field_name3,json=FieldName3" json:"_field_name3,omitempty"` + Field_Name4_ int32 `protobuf:"varint,404,opt,name=field__name4_,json=fieldName4" json:"field__name4_,omitempty"` + Field0Name5 int32 `protobuf:"varint,405,opt,name=field0name5" json:"field0name5,omitempty"` + Field_0Name6 int32 `protobuf:"varint,406,opt,name=field_0_name6,json=field0Name6" json:"field_0_name6,omitempty"` + FieldName7 int32 `protobuf:"varint,407,opt,name=fieldName7" json:"fieldName7,omitempty"` + FieldName8 int32 `protobuf:"varint,408,opt,name=FieldName8" json:"FieldName8,omitempty"` + Field_Name9 int32 `protobuf:"varint,409,opt,name=field_Name9,json=fieldName9" json:"field_Name9,omitempty"` + Field_Name10 int32 `protobuf:"varint,410,opt,name=Field_Name10,json=FieldName10" json:"Field_Name10,omitempty"` + FIELD_NAME11 int32 `protobuf:"varint,411,opt,name=FIELD_NAME11,json=FIELDNAME11" json:"FIELD_NAME11,omitempty"` + FIELDName12 int32 `protobuf:"varint,412,opt,name=FIELD_name12,json=FIELDName12" json:"FIELD_name12,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *TestAllTypes) Reset() { *m = TestAllTypes{} } -func (m *TestAllTypes) String() string { return proto.CompactTextString(m) } -func (*TestAllTypes) ProtoMessage() {} -func (*TestAllTypes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *TestAllTypes) Reset() { *m = TestAllTypes{} } +func (m *TestAllTypes) String() string { return proto.CompactTextString(m) } +func (*TestAllTypes) ProtoMessage() {} +func (*TestAllTypes) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{2} +} +func (m *TestAllTypes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestAllTypes.Unmarshal(m, b) +} +func (m *TestAllTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestAllTypes.Marshal(b, m, deterministic) +} +func (dst *TestAllTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestAllTypes.Merge(dst, src) +} +func (m *TestAllTypes) XXX_Size() int { + return xxx_messageInfo_TestAllTypes.Size(m) +} +func (m *TestAllTypes) XXX_DiscardUnknown() { + xxx_messageInfo_TestAllTypes.DiscardUnknown(m) +} + +var xxx_messageInfo_TestAllTypes proto.InternalMessageInfo type isTestAllTypes_OneofField interface { isTestAllTypes_OneofField() @@ -616,31 +664,11 @@ type TestAllTypes_OneofString struct { type TestAllTypes_OneofBytes struct { OneofBytes []byte `protobuf:"bytes,114,opt,name=oneof_bytes,json=oneofBytes,proto3,oneof"` } -type TestAllTypes_OneofBool struct { - OneofBool bool `protobuf:"varint,115,opt,name=oneof_bool,json=oneofBool,oneof"` -} -type TestAllTypes_OneofUint64 struct { - OneofUint64 uint64 `protobuf:"varint,116,opt,name=oneof_uint64,json=oneofUint64,oneof"` -} -type TestAllTypes_OneofFloat struct { - OneofFloat float32 `protobuf:"fixed32,117,opt,name=oneof_float,json=oneofFloat,oneof"` -} -type TestAllTypes_OneofDouble struct { - OneofDouble float64 `protobuf:"fixed64,118,opt,name=oneof_double,json=oneofDouble,oneof"` -} -type TestAllTypes_OneofEnum struct { - OneofEnum TestAllTypes_NestedEnum `protobuf:"varint,119,opt,name=oneof_enum,json=oneofEnum,enum=conformance.TestAllTypes_NestedEnum,oneof"` -} func (*TestAllTypes_OneofUint32) isTestAllTypes_OneofField() {} func (*TestAllTypes_OneofNestedMessage) isTestAllTypes_OneofField() {} func (*TestAllTypes_OneofString) isTestAllTypes_OneofField() {} func (*TestAllTypes_OneofBytes) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofBool) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofUint64) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofFloat) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofDouble) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofEnum) isTestAllTypes_OneofField() {} func (m *TestAllTypes) GetOneofField() isTestAllTypes_OneofField { if m != nil { @@ -1111,245 +1139,210 @@ func (m *TestAllTypes) GetOneofBytes() []byte { return nil } -func (m *TestAllTypes) GetOneofBool() bool { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofBool); ok { - return x.OneofBool - } - return false -} - -func (m *TestAllTypes) GetOneofUint64() uint64 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofUint64); ok { - return x.OneofUint64 - } - return 0 -} - -func (m *TestAllTypes) GetOneofFloat() float32 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofFloat); ok { - return x.OneofFloat - } - return 0 -} - -func (m *TestAllTypes) GetOneofDouble() float64 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofDouble); ok { - return x.OneofDouble - } - return 0 -} - -func (m *TestAllTypes) GetOneofEnum() TestAllTypes_NestedEnum { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofEnum); ok { - return x.OneofEnum - } - return TestAllTypes_FOO -} - -func (m *TestAllTypes) GetOptionalBoolWrapper() *google_protobuf5.BoolValue { +func (m *TestAllTypes) GetOptionalBoolWrapper() *wrappers.BoolValue { if m != nil { return m.OptionalBoolWrapper } return nil } -func (m *TestAllTypes) GetOptionalInt32Wrapper() *google_protobuf5.Int32Value { +func (m *TestAllTypes) GetOptionalInt32Wrapper() *wrappers.Int32Value { if m != nil { return m.OptionalInt32Wrapper } return nil } -func (m *TestAllTypes) GetOptionalInt64Wrapper() *google_protobuf5.Int64Value { +func (m *TestAllTypes) GetOptionalInt64Wrapper() *wrappers.Int64Value { if m != nil { return m.OptionalInt64Wrapper } return nil } -func (m *TestAllTypes) GetOptionalUint32Wrapper() *google_protobuf5.UInt32Value { +func (m *TestAllTypes) GetOptionalUint32Wrapper() *wrappers.UInt32Value { if m != nil { return m.OptionalUint32Wrapper } return nil } -func (m *TestAllTypes) GetOptionalUint64Wrapper() *google_protobuf5.UInt64Value { +func (m *TestAllTypes) GetOptionalUint64Wrapper() *wrappers.UInt64Value { if m != nil { return m.OptionalUint64Wrapper } return nil } -func (m *TestAllTypes) GetOptionalFloatWrapper() *google_protobuf5.FloatValue { +func (m *TestAllTypes) GetOptionalFloatWrapper() *wrappers.FloatValue { if m != nil { return m.OptionalFloatWrapper } return nil } -func (m *TestAllTypes) GetOptionalDoubleWrapper() *google_protobuf5.DoubleValue { +func (m *TestAllTypes) GetOptionalDoubleWrapper() *wrappers.DoubleValue { if m != nil { return m.OptionalDoubleWrapper } return nil } -func (m *TestAllTypes) GetOptionalStringWrapper() *google_protobuf5.StringValue { +func (m *TestAllTypes) GetOptionalStringWrapper() *wrappers.StringValue { if m != nil { return m.OptionalStringWrapper } return nil } -func (m *TestAllTypes) GetOptionalBytesWrapper() *google_protobuf5.BytesValue { +func (m *TestAllTypes) GetOptionalBytesWrapper() *wrappers.BytesValue { if m != nil { return m.OptionalBytesWrapper } return nil } -func (m *TestAllTypes) GetRepeatedBoolWrapper() []*google_protobuf5.BoolValue { +func (m *TestAllTypes) GetRepeatedBoolWrapper() []*wrappers.BoolValue { if m != nil { return m.RepeatedBoolWrapper } return nil } -func (m *TestAllTypes) GetRepeatedInt32Wrapper() []*google_protobuf5.Int32Value { +func (m *TestAllTypes) GetRepeatedInt32Wrapper() []*wrappers.Int32Value { if m != nil { return m.RepeatedInt32Wrapper } return nil } -func (m *TestAllTypes) GetRepeatedInt64Wrapper() []*google_protobuf5.Int64Value { +func (m *TestAllTypes) GetRepeatedInt64Wrapper() []*wrappers.Int64Value { if m != nil { return m.RepeatedInt64Wrapper } return nil } -func (m *TestAllTypes) GetRepeatedUint32Wrapper() []*google_protobuf5.UInt32Value { +func (m *TestAllTypes) GetRepeatedUint32Wrapper() []*wrappers.UInt32Value { if m != nil { return m.RepeatedUint32Wrapper } return nil } -func (m *TestAllTypes) GetRepeatedUint64Wrapper() []*google_protobuf5.UInt64Value { +func (m *TestAllTypes) GetRepeatedUint64Wrapper() []*wrappers.UInt64Value { if m != nil { return m.RepeatedUint64Wrapper } return nil } -func (m *TestAllTypes) GetRepeatedFloatWrapper() []*google_protobuf5.FloatValue { +func (m *TestAllTypes) GetRepeatedFloatWrapper() []*wrappers.FloatValue { if m != nil { return m.RepeatedFloatWrapper } return nil } -func (m *TestAllTypes) GetRepeatedDoubleWrapper() []*google_protobuf5.DoubleValue { +func (m *TestAllTypes) GetRepeatedDoubleWrapper() []*wrappers.DoubleValue { if m != nil { return m.RepeatedDoubleWrapper } return nil } -func (m *TestAllTypes) GetRepeatedStringWrapper() []*google_protobuf5.StringValue { +func (m *TestAllTypes) GetRepeatedStringWrapper() []*wrappers.StringValue { if m != nil { return m.RepeatedStringWrapper } return nil } -func (m *TestAllTypes) GetRepeatedBytesWrapper() []*google_protobuf5.BytesValue { +func (m *TestAllTypes) GetRepeatedBytesWrapper() []*wrappers.BytesValue { if m != nil { return m.RepeatedBytesWrapper } return nil } -func (m *TestAllTypes) GetOptionalDuration() *google_protobuf1.Duration { +func (m *TestAllTypes) GetOptionalDuration() *duration.Duration { if m != nil { return m.OptionalDuration } return nil } -func (m *TestAllTypes) GetOptionalTimestamp() *google_protobuf4.Timestamp { +func (m *TestAllTypes) GetOptionalTimestamp() *timestamp.Timestamp { if m != nil { return m.OptionalTimestamp } return nil } -func (m *TestAllTypes) GetOptionalFieldMask() *google_protobuf2.FieldMask { +func (m *TestAllTypes) GetOptionalFieldMask() *field_mask.FieldMask { if m != nil { return m.OptionalFieldMask } return nil } -func (m *TestAllTypes) GetOptionalStruct() *google_protobuf3.Struct { +func (m *TestAllTypes) GetOptionalStruct() *_struct.Struct { if m != nil { return m.OptionalStruct } return nil } -func (m *TestAllTypes) GetOptionalAny() *google_protobuf.Any { +func (m *TestAllTypes) GetOptionalAny() *any.Any { if m != nil { return m.OptionalAny } return nil } -func (m *TestAllTypes) GetOptionalValue() *google_protobuf3.Value { +func (m *TestAllTypes) GetOptionalValue() *_struct.Value { if m != nil { return m.OptionalValue } return nil } -func (m *TestAllTypes) GetRepeatedDuration() []*google_protobuf1.Duration { +func (m *TestAllTypes) GetRepeatedDuration() []*duration.Duration { if m != nil { return m.RepeatedDuration } return nil } -func (m *TestAllTypes) GetRepeatedTimestamp() []*google_protobuf4.Timestamp { +func (m *TestAllTypes) GetRepeatedTimestamp() []*timestamp.Timestamp { if m != nil { return m.RepeatedTimestamp } return nil } -func (m *TestAllTypes) GetRepeatedFieldmask() []*google_protobuf2.FieldMask { +func (m *TestAllTypes) GetRepeatedFieldmask() []*field_mask.FieldMask { if m != nil { return m.RepeatedFieldmask } return nil } -func (m *TestAllTypes) GetRepeatedStruct() []*google_protobuf3.Struct { +func (m *TestAllTypes) GetRepeatedStruct() []*_struct.Struct { if m != nil { return m.RepeatedStruct } return nil } -func (m *TestAllTypes) GetRepeatedAny() []*google_protobuf.Any { +func (m *TestAllTypes) GetRepeatedAny() []*any.Any { if m != nil { return m.RepeatedAny } return nil } -func (m *TestAllTypes) GetRepeatedValue() []*google_protobuf3.Value { +func (m *TestAllTypes) GetRepeatedValue() []*_struct.Value { if m != nil { return m.RepeatedValue } @@ -1440,48 +1433,6 @@ func (m *TestAllTypes) GetFIELDName12() int32 { return 0 } -func (m *TestAllTypes) GetXFieldName13() int32 { - if m != nil { - return m.XFieldName13 - } - return 0 -} - -func (m *TestAllTypes) GetX_FieldName14() int32 { - if m != nil { - return m.X_FieldName14 - } - return 0 -} - -func (m *TestAllTypes) GetField_Name15() int32 { - if m != nil { - return m.Field_Name15 - } - return 0 -} - -func (m *TestAllTypes) GetField__Name16() int32 { - if m != nil { - return m.Field__Name16 - } - return 0 -} - -func (m *TestAllTypes) GetFieldName17__() int32 { - if m != nil { - return m.FieldName17__ - } - return 0 -} - -func (m *TestAllTypes) GetFieldName18__() int32 { - if m != nil { - return m.FieldName18__ - } - return 0 -} - // XXX_OneofFuncs is for the internal use of the proto package. func (*TestAllTypes) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _TestAllTypes_OneofMarshaler, _TestAllTypes_OneofUnmarshaler, _TestAllTypes_OneofSizer, []interface{}{ @@ -1489,11 +1440,6 @@ func (*TestAllTypes) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) (*TestAllTypes_OneofNestedMessage)(nil), (*TestAllTypes_OneofString)(nil), (*TestAllTypes_OneofBytes)(nil), - (*TestAllTypes_OneofBool)(nil), - (*TestAllTypes_OneofUint64)(nil), - (*TestAllTypes_OneofFloat)(nil), - (*TestAllTypes_OneofDouble)(nil), - (*TestAllTypes_OneofEnum)(nil), } } @@ -1515,25 +1461,6 @@ func _TestAllTypes_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { case *TestAllTypes_OneofBytes: b.EncodeVarint(114<<3 | proto.WireBytes) b.EncodeRawBytes(x.OneofBytes) - case *TestAllTypes_OneofBool: - t := uint64(0) - if x.OneofBool { - t = 1 - } - b.EncodeVarint(115<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *TestAllTypes_OneofUint64: - b.EncodeVarint(116<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.OneofUint64)) - case *TestAllTypes_OneofFloat: - b.EncodeVarint(117<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.OneofFloat))) - case *TestAllTypes_OneofDouble: - b.EncodeVarint(118<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.OneofDouble)) - case *TestAllTypes_OneofEnum: - b.EncodeVarint(119<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.OneofEnum)) case nil: default: return fmt.Errorf("TestAllTypes.OneofField has unexpected type %T", x) @@ -1573,41 +1500,6 @@ func _TestAllTypes_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.B x, err := b.DecodeRawBytes(true) m.OneofField = &TestAllTypes_OneofBytes{x} return true, err - case 115: // oneof_field.oneof_bool - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofBool{x != 0} - return true, err - case 116: // oneof_field.oneof_uint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofUint64{x} - return true, err - case 117: // oneof_field.oneof_float - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.OneofField = &TestAllTypes_OneofFloat{math.Float32frombits(uint32(x))} - return true, err - case 118: // oneof_field.oneof_double - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.OneofField = &TestAllTypes_OneofDouble{math.Float64frombits(x)} - return true, err - case 119: // oneof_field.oneof_enum - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofEnum{TestAllTypes_NestedEnum(x)} - return true, err default: return false, nil } @@ -1618,36 +1510,21 @@ func _TestAllTypes_OneofSizer(msg proto.Message) (n int) { // oneof_field switch x := m.OneofField.(type) { case *TestAllTypes_OneofUint32: - n += proto.SizeVarint(111<<3 | proto.WireVarint) + n += 2 // tag and wire n += proto.SizeVarint(uint64(x.OneofUint32)) case *TestAllTypes_OneofNestedMessage: s := proto.Size(x.OneofNestedMessage) - n += proto.SizeVarint(112<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *TestAllTypes_OneofString: - n += proto.SizeVarint(113<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(len(x.OneofString))) n += len(x.OneofString) case *TestAllTypes_OneofBytes: - n += proto.SizeVarint(114<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(len(x.OneofBytes))) n += len(x.OneofBytes) - case *TestAllTypes_OneofBool: - n += proto.SizeVarint(115<<3 | proto.WireVarint) - n += 1 - case *TestAllTypes_OneofUint64: - n += proto.SizeVarint(116<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.OneofUint64)) - case *TestAllTypes_OneofFloat: - n += proto.SizeVarint(117<<3 | proto.WireFixed32) - n += 4 - case *TestAllTypes_OneofDouble: - n += proto.SizeVarint(118<<3 | proto.WireFixed64) - n += 8 - case *TestAllTypes_OneofEnum: - n += proto.SizeVarint(119<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.OneofEnum)) case nil: default: panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) @@ -1656,14 +1533,36 @@ func _TestAllTypes_OneofSizer(msg proto.Message) (n int) { } type TestAllTypes_NestedMessage struct { - A int32 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` - Corecursive *TestAllTypes `protobuf:"bytes,2,opt,name=corecursive" json:"corecursive,omitempty"` + A int32 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` + Corecursive *TestAllTypes `protobuf:"bytes,2,opt,name=corecursive" json:"corecursive,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *TestAllTypes_NestedMessage) Reset() { *m = TestAllTypes_NestedMessage{} } -func (m *TestAllTypes_NestedMessage) String() string { return proto.CompactTextString(m) } -func (*TestAllTypes_NestedMessage) ProtoMessage() {} -func (*TestAllTypes_NestedMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } +func (m *TestAllTypes_NestedMessage) Reset() { *m = TestAllTypes_NestedMessage{} } +func (m *TestAllTypes_NestedMessage) String() string { return proto.CompactTextString(m) } +func (*TestAllTypes_NestedMessage) ProtoMessage() {} +func (*TestAllTypes_NestedMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{2, 0} +} +func (m *TestAllTypes_NestedMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestAllTypes_NestedMessage.Unmarshal(m, b) +} +func (m *TestAllTypes_NestedMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestAllTypes_NestedMessage.Marshal(b, m, deterministic) +} +func (dst *TestAllTypes_NestedMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestAllTypes_NestedMessage.Merge(dst, src) +} +func (m *TestAllTypes_NestedMessage) XXX_Size() int { + return xxx_messageInfo_TestAllTypes_NestedMessage.Size(m) +} +func (m *TestAllTypes_NestedMessage) XXX_DiscardUnknown() { + xxx_messageInfo_TestAllTypes_NestedMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_TestAllTypes_NestedMessage proto.InternalMessageInfo func (m *TestAllTypes_NestedMessage) GetA() int32 { if m != nil { @@ -1680,13 +1579,35 @@ func (m *TestAllTypes_NestedMessage) GetCorecursive() *TestAllTypes { } type ForeignMessage struct { - C int32 `protobuf:"varint,1,opt,name=c" json:"c,omitempty"` + C int32 `protobuf:"varint,1,opt,name=c" json:"c,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ForeignMessage) Reset() { *m = ForeignMessage{} } -func (m *ForeignMessage) String() string { return proto.CompactTextString(m) } -func (*ForeignMessage) ProtoMessage() {} -func (*ForeignMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *ForeignMessage) Reset() { *m = ForeignMessage{} } +func (m *ForeignMessage) String() string { return proto.CompactTextString(m) } +func (*ForeignMessage) ProtoMessage() {} +func (*ForeignMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_48ac832451f5d6c3, []int{3} +} +func (m *ForeignMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ForeignMessage.Unmarshal(m, b) +} +func (m *ForeignMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ForeignMessage.Marshal(b, m, deterministic) +} +func (dst *ForeignMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ForeignMessage.Merge(dst, src) +} +func (m *ForeignMessage) XXX_Size() int { + return xxx_messageInfo_ForeignMessage.Size(m) +} +func (m *ForeignMessage) XXX_DiscardUnknown() { + xxx_messageInfo_ForeignMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_ForeignMessage proto.InternalMessageInfo func (m *ForeignMessage) GetC() int32 { if m != nil { @@ -1699,6 +1620,25 @@ func init() { proto.RegisterType((*ConformanceRequest)(nil), "conformance.ConformanceRequest") proto.RegisterType((*ConformanceResponse)(nil), "conformance.ConformanceResponse") proto.RegisterType((*TestAllTypes)(nil), "conformance.TestAllTypes") + proto.RegisterMapType((map[bool]bool)(nil), "conformance.TestAllTypes.MapBoolBoolEntry") + proto.RegisterMapType((map[uint32]uint32)(nil), "conformance.TestAllTypes.MapFixed32Fixed32Entry") + proto.RegisterMapType((map[uint64]uint64)(nil), "conformance.TestAllTypes.MapFixed64Fixed64Entry") + proto.RegisterMapType((map[int32]float64)(nil), "conformance.TestAllTypes.MapInt32DoubleEntry") + proto.RegisterMapType((map[int32]float32)(nil), "conformance.TestAllTypes.MapInt32FloatEntry") + proto.RegisterMapType((map[int32]int32)(nil), "conformance.TestAllTypes.MapInt32Int32Entry") + proto.RegisterMapType((map[int64]int64)(nil), "conformance.TestAllTypes.MapInt64Int64Entry") + proto.RegisterMapType((map[int32]int32)(nil), "conformance.TestAllTypes.MapSfixed32Sfixed32Entry") + proto.RegisterMapType((map[int64]int64)(nil), "conformance.TestAllTypes.MapSfixed64Sfixed64Entry") + proto.RegisterMapType((map[int32]int32)(nil), "conformance.TestAllTypes.MapSint32Sint32Entry") + proto.RegisterMapType((map[int64]int64)(nil), "conformance.TestAllTypes.MapSint64Sint64Entry") + proto.RegisterMapType((map[string][]byte)(nil), "conformance.TestAllTypes.MapStringBytesEntry") + proto.RegisterMapType((map[string]ForeignEnum)(nil), "conformance.TestAllTypes.MapStringForeignEnumEntry") + proto.RegisterMapType((map[string]*ForeignMessage)(nil), "conformance.TestAllTypes.MapStringForeignMessageEntry") + proto.RegisterMapType((map[string]TestAllTypes_NestedEnum)(nil), "conformance.TestAllTypes.MapStringNestedEnumEntry") + proto.RegisterMapType((map[string]*TestAllTypes_NestedMessage)(nil), "conformance.TestAllTypes.MapStringNestedMessageEntry") + proto.RegisterMapType((map[string]string)(nil), "conformance.TestAllTypes.MapStringStringEntry") + proto.RegisterMapType((map[uint32]uint32)(nil), "conformance.TestAllTypes.MapUint32Uint32Entry") + proto.RegisterMapType((map[uint64]uint64)(nil), "conformance.TestAllTypes.MapUint64Uint64Entry") proto.RegisterType((*TestAllTypes_NestedMessage)(nil), "conformance.TestAllTypes.NestedMessage") proto.RegisterType((*ForeignMessage)(nil), "conformance.ForeignMessage") proto.RegisterEnum("conformance.WireFormat", WireFormat_name, WireFormat_value) @@ -1706,180 +1646,171 @@ func init() { proto.RegisterEnum("conformance.TestAllTypes_NestedEnum", TestAllTypes_NestedEnum_name, TestAllTypes_NestedEnum_value) } -func init() { proto.RegisterFile("conformance_proto/conformance.proto", fileDescriptor0) } +func init() { proto.RegisterFile("conformance.proto", fileDescriptor_conformance_48ac832451f5d6c3) } -var fileDescriptor0 = []byte{ - // 2737 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xd9, 0x72, 0xdb, 0xc8, - 0xd5, 0x16, 0x08, 0x59, 0x4b, 0x93, 0x92, 0xa8, 0xd6, 0xd6, 0x96, 0x5d, 0x63, 0x58, 0xb2, 0x7f, - 0xd3, 0xf6, 0x8c, 0xac, 0x05, 0x86, 0x65, 0xcf, 0x3f, 0x8e, 0x45, 0x9b, 0xb4, 0xe4, 0x8c, 0x25, - 0x17, 0x64, 0x8d, 0xab, 0x9c, 0x0b, 0x06, 0xa6, 0x20, 0x15, 0xc7, 0x24, 0xc1, 0x01, 0x48, 0x4f, - 0x94, 0xcb, 0xbc, 0x41, 0xf6, 0x7d, 0xbd, 0xcf, 0x7a, 0x93, 0xa4, 0x92, 0xab, 0x54, 0x6e, 0xb2, - 0x27, 0x95, 0x3d, 0x79, 0x85, 0xbc, 0x43, 0x52, 0xbd, 0xa2, 0xbb, 0x01, 0x50, 0xf4, 0x54, 0x0d, - 0x25, 0x1e, 0x7c, 0xfd, 0x9d, 0xd3, 0xe7, 0x1c, 0x7c, 0x2d, 0x1c, 0x18, 0x2c, 0xd7, 0x83, 0xf6, - 0x51, 0x10, 0xb6, 0xbc, 0x76, 0xdd, 0xaf, 0x75, 0xc2, 0xa0, 0x1b, 0xdc, 0x90, 0x2c, 0x2b, 0xc4, - 0x02, 0xf3, 0x92, 0x69, 0xf1, 0xec, 0x71, 0x10, 0x1c, 0x37, 0xfd, 0x1b, 0xe4, 0xd2, 0x8b, 0xde, - 0xd1, 0x0d, 0xaf, 0x7d, 0x42, 0x71, 0x8b, 0x6f, 0xe8, 0x97, 0x0e, 0x7b, 0xa1, 0xd7, 0x6d, 0x04, - 0x6d, 0x76, 0xdd, 0xd2, 0xaf, 0x1f, 0x35, 0xfc, 0xe6, 0x61, 0xad, 0xe5, 0x45, 0x2f, 0x19, 0xe2, - 0xbc, 0x8e, 0x88, 0xba, 0x61, 0xaf, 0xde, 0x65, 0x57, 0x2f, 0xe8, 0x57, 0xbb, 0x8d, 0x96, 0x1f, - 0x75, 0xbd, 0x56, 0x27, 0x2b, 0x80, 0x0f, 0x43, 0xaf, 0xd3, 0xf1, 0xc3, 0x88, 0x5e, 0x5f, 0xfa, - 0x85, 0x01, 0xe0, 0xfd, 0x78, 0x2f, 0xae, 0xff, 0x41, 0xcf, 0x8f, 0xba, 0xf0, 0x3a, 0x28, 0xf2, - 0x15, 0xb5, 0x8e, 0x77, 0xd2, 0x0c, 0xbc, 0x43, 0x64, 0x58, 0x46, 0xa9, 0xb0, 0x3d, 0xe4, 0x4e, - 0xf1, 0x2b, 0x4f, 0xe8, 0x05, 0xb8, 0x0c, 0x0a, 0xef, 0x47, 0x41, 0x5b, 0x00, 0x73, 0x96, 0x51, - 0x1a, 0xdf, 0x1e, 0x72, 0xf3, 0xd8, 0xca, 0x41, 0x7b, 0x60, 0x21, 0xa4, 0xe4, 0xfe, 0x61, 0x2d, - 0xe8, 0x75, 0x3b, 0xbd, 0x6e, 0x8d, 0x78, 0xed, 0x22, 0xd3, 0x32, 0x4a, 0x93, 0xeb, 0x0b, 0x2b, - 0x72, 0x9a, 0x9f, 0x35, 0x42, 0xbf, 0x4a, 0x2e, 0xbb, 0x73, 0x62, 0xdd, 0x1e, 0x59, 0x46, 0xcd, - 0xe5, 0x71, 0x30, 0xca, 0x1c, 0x2e, 0x7d, 0x2a, 0x07, 0x66, 0x94, 0x4d, 0x44, 0x9d, 0xa0, 0x1d, - 0xf9, 0xf0, 0x22, 0xc8, 0x77, 0xbc, 0x30, 0xf2, 0x6b, 0x7e, 0x18, 0x06, 0x21, 0xd9, 0x00, 0x8e, - 0x0b, 0x10, 0x63, 0x05, 0xdb, 0xe0, 0x55, 0x30, 0x15, 0xf9, 0x61, 0xc3, 0x6b, 0x36, 0x3e, 0xc9, - 0x61, 0x23, 0x0c, 0x36, 0x29, 0x2e, 0x50, 0xe8, 0x65, 0x30, 0x11, 0xf6, 0xda, 0x38, 0xc1, 0x0c, - 0xc8, 0xf7, 0x59, 0x60, 0x66, 0x0a, 0x4b, 0x4b, 0x9d, 0x39, 0x68, 0xea, 0x86, 0xd3, 0x52, 0xb7, - 0x08, 0x46, 0xa3, 0x97, 0x8d, 0x4e, 0xc7, 0x3f, 0x44, 0x67, 0xd8, 0x75, 0x6e, 0x28, 0x8f, 0x81, - 0x91, 0xd0, 0x8f, 0x7a, 0xcd, 0xee, 0xd2, 0x7f, 0xaa, 0xa0, 0xf0, 0xd4, 0x8f, 0xba, 0x5b, 0xcd, - 0xe6, 0xd3, 0x93, 0x8e, 0x1f, 0xc1, 0xcb, 0x60, 0x32, 0xe8, 0xe0, 0x5e, 0xf3, 0x9a, 0xb5, 0x46, - 0xbb, 0xbb, 0xb1, 0x4e, 0x12, 0x70, 0xc6, 0x9d, 0xe0, 0xd6, 0x1d, 0x6c, 0xd4, 0x61, 0x8e, 0x4d, - 0xf6, 0x65, 0x2a, 0x30, 0xc7, 0x86, 0x57, 0xc0, 0x94, 0x80, 0xf5, 0x28, 0x1d, 0xde, 0xd5, 0x84, - 0x2b, 0x56, 0x1f, 0x10, 0x6b, 0x02, 0xe8, 0xd8, 0x64, 0x57, 0xc3, 0x2a, 0x50, 0x63, 0x8c, 0x28, - 0x23, 0xde, 0xde, 0x74, 0x0c, 0xdc, 0x4f, 0x32, 0x46, 0x94, 0x11, 0xd7, 0x08, 0xaa, 0x40, 0xc7, - 0x86, 0x57, 0x41, 0x51, 0x00, 0x8f, 0x1a, 0x9f, 0xf0, 0x0f, 0x37, 0xd6, 0xd1, 0xa8, 0x65, 0x94, - 0x46, 0x5d, 0x41, 0x50, 0xa5, 0xe6, 0x24, 0xd4, 0xb1, 0xd1, 0x98, 0x65, 0x94, 0x46, 0x34, 0xa8, - 0x63, 0xc3, 0xeb, 0x60, 0x3a, 0x76, 0xcf, 0x69, 0xc7, 0x2d, 0xa3, 0x34, 0xe5, 0x0a, 0x8e, 0x7d, - 0x66, 0x4f, 0x01, 0x3b, 0x36, 0x02, 0x96, 0x51, 0x2a, 0xea, 0x60, 0xc7, 0x56, 0x52, 0x7f, 0xd4, - 0x0c, 0xbc, 0x2e, 0xca, 0x5b, 0x46, 0x29, 0x17, 0xa7, 0xbe, 0x8a, 0x8d, 0xca, 0xfe, 0x0f, 0x83, - 0xde, 0x8b, 0xa6, 0x8f, 0x0a, 0x96, 0x51, 0x32, 0xe2, 0xfd, 0x3f, 0x20, 0x56, 0xb8, 0x0c, 0xc4, - 0xca, 0xda, 0x8b, 0x20, 0x68, 0xa2, 0x09, 0xcb, 0x28, 0x8d, 0xb9, 0x05, 0x6e, 0x2c, 0x07, 0x41, - 0x53, 0xcd, 0x66, 0x37, 0x6c, 0xb4, 0x8f, 0xd1, 0x24, 0xee, 0x2a, 0x29, 0x9b, 0xc4, 0xaa, 0x44, - 0xf7, 0xe2, 0xa4, 0xeb, 0x47, 0x68, 0x0a, 0xb7, 0x71, 0x1c, 0x5d, 0x19, 0x1b, 0x61, 0x0d, 0x2c, - 0x08, 0x58, 0x9b, 0xde, 0xde, 0x2d, 0x3f, 0x8a, 0xbc, 0x63, 0x1f, 0x41, 0xcb, 0x28, 0xe5, 0xd7, - 0xaf, 0x28, 0x37, 0xb6, 0xdc, 0xa2, 0x2b, 0xbb, 0x04, 0xff, 0x98, 0xc2, 0xdd, 0x39, 0xce, 0xa3, - 0x98, 0xe1, 0x01, 0x40, 0x71, 0x96, 0x82, 0xd0, 0x6f, 0x1c, 0xb7, 0x85, 0x87, 0x19, 0xe2, 0xe1, - 0x9c, 0xe2, 0xa1, 0x4a, 0x31, 0x9c, 0x75, 0x5e, 0x24, 0x53, 0xb1, 0xc3, 0xf7, 0xc0, 0xac, 0x1e, - 0xb7, 0xdf, 0xee, 0xb5, 0xd0, 0x1c, 0x51, 0xa3, 0x4b, 0xa7, 0x05, 0x5d, 0x69, 0xf7, 0x5a, 0x2e, - 0x54, 0x23, 0xc6, 0x36, 0xf8, 0x2e, 0x98, 0x4b, 0x84, 0x4b, 0x88, 0xe7, 0x09, 0x31, 0x4a, 0x8b, - 0x95, 0x90, 0xcd, 0x68, 0x81, 0x12, 0x36, 0x47, 0x62, 0xa3, 0xd5, 0xaa, 0x75, 0x1a, 0x7e, 0xdd, - 0x47, 0x08, 0xd7, 0xac, 0x9c, 0x1b, 0xcb, 0xc5, 0xeb, 0x68, 0xdd, 0x9e, 0xe0, 0xcb, 0xf0, 0x8a, - 0xd4, 0x0a, 0xf5, 0x20, 0x3c, 0x44, 0x67, 0x19, 0xde, 0x88, 0xdb, 0xe1, 0x7e, 0x10, 0x1e, 0xc2, - 0x2a, 0x98, 0x0e, 0xfd, 0x7a, 0x2f, 0x8c, 0x1a, 0xaf, 0x7c, 0x91, 0xd6, 0x73, 0x24, 0xad, 0x67, - 0x33, 0x73, 0xe0, 0x16, 0xc5, 0x1a, 0x9e, 0xce, 0xcb, 0x60, 0x32, 0xf4, 0x3b, 0xbe, 0x87, 0xf3, - 0x48, 0x6f, 0xe6, 0x0b, 0x96, 0x89, 0xd5, 0x86, 0x5b, 0x85, 0xda, 0xc8, 0x30, 0xc7, 0x46, 0x96, - 0x65, 0x62, 0xb5, 0x91, 0x60, 0x54, 0x1b, 0x04, 0x8c, 0xa9, 0xcd, 0x45, 0xcb, 0xc4, 0x6a, 0xc3, - 0xcd, 0xb1, 0xda, 0x28, 0x40, 0xc7, 0x46, 0x4b, 0x96, 0x89, 0xd5, 0x46, 0x06, 0x6a, 0x8c, 0x4c, - 0x6d, 0x96, 0x2d, 0x13, 0xab, 0x0d, 0x37, 0xef, 0x27, 0x19, 0x99, 0xda, 0x5c, 0xb2, 0x4c, 0xac, - 0x36, 0x32, 0x90, 0xaa, 0x8d, 0x00, 0x72, 0x59, 0xb8, 0x6c, 0x99, 0x58, 0x6d, 0xb8, 0x5d, 0x52, - 0x1b, 0x15, 0xea, 0xd8, 0xe8, 0xff, 0x2c, 0x13, 0xab, 0x8d, 0x02, 0xa5, 0x6a, 0x13, 0xbb, 0xe7, - 0xb4, 0x57, 0x2c, 0x13, 0xab, 0x8d, 0x08, 0x40, 0x52, 0x1b, 0x0d, 0xec, 0xd8, 0xa8, 0x64, 0x99, - 0x58, 0x6d, 0x54, 0x30, 0x55, 0x9b, 0x38, 0x08, 0xa2, 0x36, 0x57, 0x2d, 0x13, 0xab, 0x8d, 0x08, - 0x81, 0xab, 0x8d, 0x80, 0x31, 0xb5, 0xb9, 0x66, 0x99, 0x58, 0x6d, 0xb8, 0x39, 0x56, 0x1b, 0x01, - 0x24, 0x6a, 0x73, 0xdd, 0x32, 0xb1, 0xda, 0x70, 0x23, 0x57, 0x9b, 0x38, 0x42, 0xaa, 0x36, 0x6f, - 0x5a, 0x26, 0x56, 0x1b, 0x11, 0x9f, 0x50, 0x9b, 0x98, 0x8d, 0xa8, 0xcd, 0x5b, 0x96, 0x89, 0xd5, - 0x46, 0xd0, 0x71, 0xb5, 0x11, 0x30, 0x4d, 0x6d, 0x56, 0x2d, 0xf3, 0xb5, 0xd4, 0x86, 0xf3, 0x24, - 0xd4, 0x26, 0xce, 0x92, 0xa6, 0x36, 0x6b, 0xc4, 0x43, 0x7f, 0xb5, 0x11, 0xc9, 0x4c, 0xa8, 0x8d, - 0x1e, 0x37, 0x11, 0x85, 0x0d, 0xcb, 0x1c, 0x5c, 0x6d, 0xd4, 0x88, 0xb9, 0xda, 0x24, 0xc2, 0x25, - 0xc4, 0x36, 0x21, 0xee, 0xa3, 0x36, 0x5a, 0xa0, 0x5c, 0x6d, 0xb4, 0x6a, 0x31, 0xb5, 0x71, 0x70, - 0xcd, 0xa8, 0xda, 0xa8, 0x75, 0x13, 0x6a, 0x23, 0xd6, 0x11, 0xb5, 0xb9, 0xc5, 0xf0, 0x46, 0xdc, - 0x0e, 0x44, 0x6d, 0x9e, 0x82, 0xa9, 0x96, 0xd7, 0xa1, 0x02, 0xc1, 0x64, 0x62, 0x93, 0x24, 0xf5, - 0xcd, 0xec, 0x0c, 0x3c, 0xf6, 0x3a, 0x44, 0x3b, 0xc8, 0x47, 0xa5, 0xdd, 0x0d, 0x4f, 0xdc, 0x89, - 0x96, 0x6c, 0x93, 0x58, 0x1d, 0x9b, 0xa9, 0xca, 0xed, 0xc1, 0x58, 0x1d, 0x9b, 0x7c, 0x28, 0xac, - 0xcc, 0x06, 0x9f, 0x83, 0x69, 0xcc, 0x4a, 0xe5, 0x87, 0xab, 0xd0, 0x1d, 0xc2, 0xbb, 0xd2, 0x97, - 0x97, 0x4a, 0x13, 0xfd, 0xa4, 0xcc, 0x38, 0x3c, 0xd9, 0x2a, 0x73, 0x3b, 0x36, 0x17, 0xae, 0xb7, - 0x07, 0xe4, 0x76, 0x6c, 0xfa, 0xa9, 0x72, 0x73, 0x2b, 0xe7, 0xa6, 0x22, 0xc7, 0xb5, 0xee, 0xff, - 0x07, 0xe0, 0xa6, 0x02, 0xb8, 0xaf, 0xc5, 0x2d, 0x5b, 0x65, 0x6e, 0xc7, 0xe6, 0xf2, 0xf8, 0xce, - 0x80, 0xdc, 0x8e, 0xbd, 0xaf, 0xc5, 0x2d, 0x5b, 0xe1, 0xc7, 0xc1, 0x0c, 0xe6, 0x66, 0xda, 0x26, - 0x24, 0xf5, 0x2e, 0x61, 0x5f, 0xed, 0xcb, 0xce, 0x74, 0x96, 0xfd, 0xa0, 0xfc, 0x38, 0x50, 0xd5, - 0xae, 0x78, 0x70, 0x6c, 0xa1, 0xc4, 0x1f, 0x19, 0xd4, 0x83, 0x63, 0xb3, 0x1f, 0x9a, 0x07, 0x61, - 0x87, 0x47, 0x60, 0x8e, 0xe4, 0x87, 0x6f, 0x42, 0x28, 0xf8, 0x3d, 0xe2, 0x63, 0xbd, 0x7f, 0x8e, - 0x18, 0x98, 0xff, 0xa4, 0x5e, 0x70, 0xc8, 0xfa, 0x15, 0xd5, 0x0f, 0xae, 0x04, 0xdf, 0xcb, 0xd6, - 0xc0, 0x7e, 0x1c, 0x9b, 0xff, 0xd4, 0xfd, 0xc4, 0x57, 0xd4, 0xfb, 0x95, 0x1e, 0x1a, 0xe5, 0x41, - 0xef, 0x57, 0x72, 0x9c, 0x68, 0xf7, 0x2b, 0x3d, 0x62, 0x9e, 0x81, 0x62, 0xcc, 0xca, 0xce, 0x98, - 0xfb, 0x84, 0xf6, 0xad, 0xd3, 0x69, 0xe9, 0xe9, 0x43, 0x79, 0x27, 0x5b, 0x8a, 0x11, 0xee, 0x02, - 0xec, 0x89, 0x9c, 0x46, 0xf4, 0x48, 0x7a, 0x40, 0x58, 0xaf, 0xf5, 0x65, 0xc5, 0xe7, 0x14, 0xfe, - 0x9f, 0x52, 0xe6, 0x5b, 0xb1, 0x45, 0xb4, 0x3b, 0x95, 0x42, 0x76, 0x7e, 0x55, 0x06, 0x69, 0x77, - 0x02, 0xa5, 0x9f, 0x52, 0xbb, 0x4b, 0x56, 0x9e, 0x04, 0xc6, 0x4d, 0x8f, 0xbc, 0xea, 0x00, 0x49, - 0xa0, 0xcb, 0xc9, 0x69, 0x18, 0x27, 0x41, 0x32, 0xc2, 0x0e, 0x38, 0x2b, 0x11, 0x6b, 0x87, 0xe4, - 0x43, 0xe2, 0xe1, 0xe6, 0x00, 0x1e, 0x94, 0x63, 0x91, 0x7a, 0x9a, 0x6f, 0xa5, 0x5e, 0x84, 0x11, - 0x58, 0x94, 0x3c, 0xea, 0xa7, 0xe6, 0x36, 0x71, 0xe9, 0x0c, 0xe0, 0x52, 0x3d, 0x33, 0xa9, 0xcf, - 0x85, 0x56, 0xfa, 0x55, 0x78, 0x0c, 0xe6, 0x93, 0xdb, 0x24, 0x47, 0xdf, 0xce, 0x20, 0xf7, 0x80, - 0xb4, 0x0d, 0x7c, 0xf4, 0x49, 0xf7, 0x80, 0x76, 0x05, 0xbe, 0x0f, 0x16, 0x52, 0x76, 0x47, 0x3c, - 0x3d, 0x22, 0x9e, 0x36, 0x06, 0xdf, 0x5a, 0xec, 0x6a, 0xb6, 0x95, 0x72, 0x09, 0x2e, 0x83, 0x42, - 0xd0, 0xf6, 0x83, 0x23, 0x7e, 0xdc, 0x04, 0xf8, 0x11, 0x7b, 0x7b, 0xc8, 0xcd, 0x13, 0x2b, 0x3b, - 0x3c, 0x3e, 0x06, 0x66, 0x29, 0x48, 0xab, 0x6d, 0xe7, 0xb5, 0x1e, 0xb7, 0xb6, 0x87, 0x5c, 0x48, - 0x68, 0xd4, 0x5a, 0x8a, 0x08, 0x58, 0xb7, 0x7f, 0xc0, 0x27, 0x12, 0xc4, 0xca, 0x7a, 0xf7, 0x22, - 0xa0, 0x5f, 0x59, 0xdb, 0x86, 0x6c, 0xbc, 0x01, 0x88, 0x91, 0x76, 0xe1, 0x05, 0x00, 0x18, 0x04, - 0xdf, 0x87, 0x11, 0x7e, 0x10, 0xdd, 0x1e, 0x72, 0xc7, 0x29, 0x02, 0xdf, 0x5b, 0xca, 0x56, 0x1d, - 0x1b, 0x75, 0x2d, 0xa3, 0x34, 0xac, 0x6c, 0xd5, 0xb1, 0x63, 0x47, 0x54, 0x7b, 0x7a, 0xf8, 0xf1, - 0x58, 0x38, 0xa2, 0x62, 0x22, 0x78, 0x98, 0x90, 0xbc, 0xc2, 0x8f, 0xc6, 0x82, 0x87, 0x09, 0x43, - 0x85, 0x47, 0x43, 0xca, 0xf6, 0xe1, 0xe0, 0x8f, 0x78, 0x22, 0x66, 0x52, 0x9e, 0x3d, 0xe9, 0x69, - 0x8c, 0x88, 0x0c, 0x9b, 0xa6, 0xa1, 0x5f, 0x19, 0x24, 0xf7, 0x8b, 0x2b, 0x74, 0xdc, 0xb6, 0xc2, - 0xe7, 0x3c, 0x2b, 0x78, 0xab, 0xef, 0x79, 0xcd, 0x9e, 0x1f, 0x3f, 0xa6, 0x61, 0xd3, 0x33, 0xba, - 0x0e, 0xba, 0x60, 0x5e, 0x9d, 0xd1, 0x08, 0xc6, 0x5f, 0x1b, 0xec, 0xd1, 0x56, 0x67, 0x24, 0x7a, - 0x47, 0x29, 0x67, 0x95, 0x49, 0x4e, 0x06, 0xa7, 0x63, 0x0b, 0xce, 0xdf, 0xf4, 0xe1, 0x74, 0xec, - 0x24, 0xa7, 0x63, 0x73, 0xce, 0x03, 0xe9, 0x21, 0xbf, 0xa7, 0x06, 0xfa, 0x5b, 0x4a, 0x7a, 0x3e, - 0x41, 0x7a, 0x20, 0x45, 0x3a, 0xa7, 0x0e, 0x89, 0xb2, 0x68, 0xa5, 0x58, 0x7f, 0xd7, 0x8f, 0x96, - 0x07, 0x3b, 0xa7, 0x8e, 0x94, 0xd2, 0x32, 0x40, 0x1a, 0x47, 0xb0, 0xfe, 0x3e, 0x2b, 0x03, 0xa4, - 0x97, 0xb4, 0x0c, 0x10, 0x5b, 0x5a, 0xa8, 0xb4, 0xd3, 0x04, 0xe9, 0x1f, 0xb2, 0x42, 0xa5, 0xcd, - 0xa7, 0x85, 0x4a, 0x8d, 0x69, 0xb4, 0x4c, 0x61, 0x38, 0xed, 0x1f, 0xb3, 0x68, 0xe9, 0x4d, 0xa8, - 0xd1, 0x52, 0x63, 0x5a, 0x06, 0xc8, 0x3d, 0x2a, 0x58, 0xff, 0x94, 0x95, 0x01, 0x72, 0xdb, 0x6a, - 0x19, 0x20, 0x36, 0xce, 0xb9, 0x27, 0x3d, 0x1c, 0x28, 0xcd, 0xff, 0x67, 0x83, 0xc8, 0x60, 0xdf, - 0xe6, 0x97, 0x1f, 0x0a, 0xa5, 0x20, 0xd5, 0x91, 0x81, 0x60, 0xfc, 0x8b, 0xc1, 0x9e, 0xb4, 0xfa, - 0x35, 0xbf, 0x32, 0x58, 0xc8, 0xe0, 0x94, 0x1a, 0xea, 0xaf, 0x7d, 0x38, 0x45, 0xf3, 0x2b, 0x53, - 0x08, 0xa9, 0x46, 0xda, 0x30, 0x42, 0x90, 0xfe, 0x8d, 0x92, 0x9e, 0xd2, 0xfc, 0xea, 0xcc, 0x22, - 0x8b, 0x56, 0x8a, 0xf5, 0xef, 0xfd, 0x68, 0x45, 0xf3, 0xab, 0x13, 0x8e, 0xb4, 0x0c, 0xa8, 0xcd, - 0xff, 0x8f, 0xac, 0x0c, 0xc8, 0xcd, 0xaf, 0x0c, 0x03, 0xd2, 0x42, 0xd5, 0x9a, 0xff, 0x9f, 0x59, - 0xa1, 0x2a, 0xcd, 0xaf, 0x8e, 0x0e, 0xd2, 0x68, 0xb5, 0xe6, 0xff, 0x57, 0x16, 0xad, 0xd2, 0xfc, - 0xea, 0xb3, 0x68, 0x5a, 0x06, 0xd4, 0xe6, 0xff, 0x77, 0x56, 0x06, 0xe4, 0xe6, 0x57, 0x06, 0x0e, - 0x9c, 0xf3, 0xa1, 0x34, 0xd7, 0xe5, 0xef, 0x70, 0xd0, 0x77, 0x73, 0x6c, 0x4e, 0x96, 0xd8, 0x3b, - 0x43, 0xc4, 0x33, 0x5f, 0x6e, 0x81, 0x8f, 0x80, 0x18, 0x1a, 0xd6, 0xc4, 0xcb, 0x1a, 0xf4, 0xbd, - 0x5c, 0xc6, 0xf9, 0xf1, 0x94, 0x43, 0x5c, 0xe1, 0x5f, 0x98, 0xe0, 0x47, 0xc1, 0x8c, 0x34, 0xc4, - 0xe6, 0x2f, 0x8e, 0xd0, 0xf7, 0xb3, 0xc8, 0xaa, 0x18, 0xf3, 0xd8, 0x8b, 0x5e, 0xc6, 0x64, 0xc2, - 0x04, 0xb7, 0xd4, 0xb9, 0x70, 0xaf, 0xde, 0x45, 0x3f, 0xa0, 0x44, 0x0b, 0x69, 0x45, 0xe8, 0xd5, - 0xbb, 0xca, 0xc4, 0xb8, 0x57, 0xef, 0xc2, 0x4d, 0x20, 0x66, 0x8b, 0x35, 0xaf, 0x7d, 0x82, 0x7e, - 0x48, 0xd7, 0xcf, 0x26, 0xd6, 0x6f, 0xb5, 0x4f, 0xdc, 0x3c, 0x87, 0x6e, 0xb5, 0x4f, 0xe0, 0x5d, - 0x69, 0xd6, 0xfc, 0x0a, 0x97, 0x01, 0xfd, 0x88, 0xae, 0x9d, 0x4f, 0xac, 0xa5, 0x55, 0x12, 0xd3, - 0x4d, 0xf2, 0x15, 0x97, 0x27, 0x6e, 0x50, 0x5e, 0x9e, 0x1f, 0xe7, 0x48, 0xb5, 0xfb, 0x95, 0x47, - 0xf4, 0xa5, 0x54, 0x1e, 0x41, 0x14, 0x97, 0xe7, 0x27, 0xb9, 0x0c, 0x85, 0x93, 0xca, 0xc3, 0x97, - 0xc5, 0xe5, 0x91, 0xb9, 0x48, 0x79, 0x48, 0x75, 0x7e, 0x9a, 0xc5, 0x25, 0x55, 0x27, 0x1e, 0x0a, - 0xb2, 0x55, 0xb8, 0x3a, 0xf2, 0xad, 0x82, 0xab, 0xf3, 0x4b, 0x4a, 0x94, 0x5d, 0x1d, 0xe9, 0xee, - 0x60, 0xd5, 0x11, 0x14, 0xb8, 0x3a, 0x3f, 0xa3, 0xeb, 0x33, 0xaa, 0xc3, 0xa1, 0xac, 0x3a, 0x62, - 0x25, 0xad, 0xce, 0xcf, 0xe9, 0xda, 0xcc, 0xea, 0x70, 0x38, 0xad, 0xce, 0x05, 0x00, 0xc8, 0xfe, - 0xdb, 0x5e, 0xcb, 0x5f, 0x43, 0x9f, 0x36, 0xc9, 0x6b, 0x28, 0xc9, 0x04, 0x2d, 0x90, 0xa7, 0xfd, - 0x8b, 0xbf, 0xae, 0xa3, 0xcf, 0xc8, 0x88, 0x5d, 0x6c, 0x82, 0x17, 0x41, 0xa1, 0x16, 0x43, 0x36, - 0xd0, 0x67, 0x19, 0xa4, 0xca, 0x21, 0x1b, 0x70, 0x09, 0x4c, 0x50, 0x04, 0x81, 0xd8, 0x35, 0xf4, - 0x39, 0x9d, 0x86, 0xfc, 0x3d, 0x49, 0xbe, 0xad, 0x62, 0xc8, 0x4d, 0xf4, 0x79, 0x8a, 0x90, 0x6d, - 0x70, 0x99, 0xd3, 0xac, 0x12, 0x1e, 0x07, 0x7d, 0x41, 0x01, 0x61, 0x1e, 0x47, 0xec, 0x08, 0x7f, - 0xbb, 0x85, 0xbe, 0xa8, 0x3b, 0xba, 0x85, 0x01, 0x22, 0xb4, 0x4d, 0xf4, 0x25, 0x3d, 0xda, 0xcd, - 0x78, 0xcb, 0xf8, 0xeb, 0x6d, 0xf4, 0x65, 0x9d, 0xe2, 0x36, 0x5c, 0x02, 0x85, 0xaa, 0x40, 0xac, - 0xad, 0xa2, 0xaf, 0xb0, 0x38, 0x04, 0xc9, 0xda, 0x2a, 0xc1, 0xec, 0x54, 0xde, 0x7d, 0x50, 0xdb, - 0xdd, 0x7a, 0x5c, 0x59, 0x5b, 0x43, 0x5f, 0xe5, 0x18, 0x6c, 0xa4, 0xb6, 0x18, 0x43, 0x72, 0xbd, - 0x8e, 0xbe, 0xa6, 0x60, 0x88, 0x0d, 0x5e, 0x02, 0x93, 0x35, 0x29, 0xbf, 0x6b, 0x1b, 0xe8, 0xeb, - 0x09, 0x6f, 0x1b, 0x14, 0x55, 0x8d, 0x51, 0x36, 0xfa, 0x46, 0x02, 0x65, 0xc7, 0x09, 0xa4, 0xa0, - 0x9b, 0xe8, 0x9b, 0x72, 0x02, 0x09, 0x48, 0xca, 0x32, 0xdd, 0x9d, 0x83, 0xbe, 0x95, 0x00, 0x39, - 0xd8, 0x9f, 0x14, 0xd3, 0xad, 0x5a, 0x0d, 0x7d, 0x3b, 0x81, 0xba, 0x85, 0x51, 0x52, 0x4c, 0x9b, - 0xb5, 0x1a, 0xfa, 0x4e, 0x22, 0xaa, 0xcd, 0xc5, 0xe7, 0x60, 0x42, 0x7d, 0xd0, 0x29, 0x00, 0xc3, - 0x63, 0x6f, 0x44, 0x0d, 0x0f, 0xbe, 0x0d, 0xf2, 0xf5, 0x40, 0xbc, 0xd4, 0x40, 0xb9, 0xd3, 0x5e, - 0x80, 0xc8, 0xe8, 0xc5, 0x7b, 0x00, 0x26, 0x87, 0x94, 0xb0, 0x08, 0xcc, 0x97, 0xfe, 0x09, 0x73, - 0x81, 0x7f, 0x85, 0xb3, 0xe0, 0x0c, 0xbd, 0x7d, 0x72, 0xc4, 0x46, 0xbf, 0xdc, 0xc9, 0x6d, 0x1a, - 0x31, 0x83, 0x3c, 0x90, 0x94, 0x19, 0xcc, 0x14, 0x06, 0x53, 0x66, 0x28, 0x83, 0xd9, 0xb4, 0xd1, - 0xa3, 0xcc, 0x31, 0x91, 0xc2, 0x31, 0x91, 0xce, 0xa1, 0x8c, 0x18, 0x65, 0x8e, 0xe1, 0x14, 0x8e, - 0xe1, 0x24, 0x47, 0x62, 0x94, 0x28, 0x73, 0x4c, 0xa7, 0x70, 0x4c, 0xa7, 0x73, 0x28, 0x23, 0x43, - 0x99, 0x03, 0xa6, 0x70, 0x40, 0x99, 0xe3, 0x01, 0x98, 0x4f, 0x1f, 0x0c, 0xca, 0x2c, 0xa3, 0x29, - 0x2c, 0xa3, 0x19, 0x2c, 0xea, 0xf0, 0x4f, 0x66, 0x19, 0x49, 0x61, 0x19, 0x91, 0x59, 0xaa, 0x00, - 0x65, 0x8d, 0xf7, 0x64, 0x9e, 0xa9, 0x14, 0x9e, 0xa9, 0x2c, 0x1e, 0x6d, 0x7c, 0x27, 0xf3, 0x14, - 0x53, 0x78, 0x8a, 0xa9, 0xdd, 0x26, 0x0f, 0xe9, 0x4e, 0xeb, 0xd7, 0x9c, 0xcc, 0xb0, 0x05, 0x66, - 0x52, 0xe6, 0x71, 0xa7, 0x51, 0x18, 0x32, 0xc5, 0x5d, 0x50, 0xd4, 0x87, 0x6f, 0xf2, 0xfa, 0xb1, - 0x94, 0xf5, 0x63, 0x29, 0x4d, 0xa2, 0x0f, 0xda, 0x64, 0x8e, 0xf1, 0x14, 0x8e, 0xf1, 0xe4, 0x36, - 0xf4, 0x89, 0xda, 0x69, 0x14, 0x05, 0x99, 0x22, 0x04, 0xe7, 0xfa, 0x8c, 0xcc, 0x52, 0xa8, 0xde, - 0x91, 0xa9, 0x5e, 0xe3, 0x7d, 0x95, 0xe4, 0xf3, 0x18, 0x9c, 0xef, 0x37, 0x33, 0x4b, 0x71, 0xba, - 0xa6, 0x3a, 0xed, 0xfb, 0x0a, 0x4b, 0x72, 0xd4, 0xa4, 0x0d, 0x97, 0x36, 0x2b, 0x4b, 0x71, 0x72, - 0x47, 0x76, 0x32, 0xe8, 0x4b, 0x2d, 0xc9, 0x9b, 0x07, 0xce, 0x66, 0xce, 0xcb, 0x52, 0xdc, 0xad, - 0xa8, 0xee, 0xb2, 0x5f, 0x75, 0xc5, 0x2e, 0x96, 0x6e, 0x03, 0x20, 0x4d, 0xf6, 0x46, 0x81, 0x59, - 0xdd, 0xdb, 0x2b, 0x0e, 0xe1, 0x5f, 0xca, 0x5b, 0x6e, 0xd1, 0xa0, 0xbf, 0x3c, 0x2f, 0xe6, 0xb0, - 0xbb, 0xdd, 0xca, 0xc3, 0xe2, 0x7f, 0xf9, 0x7f, 0x46, 0x79, 0x42, 0x8c, 0xa2, 0xf0, 0xa9, 0xb2, - 0xf4, 0x06, 0x98, 0xd4, 0x06, 0x92, 0x05, 0x60, 0xd4, 0xf9, 0x81, 0x52, 0xbf, 0x76, 0x13, 0x80, - 0xf8, 0xdf, 0x30, 0xc1, 0x29, 0x90, 0x3f, 0xd8, 0xdd, 0x7f, 0x52, 0xb9, 0xbf, 0x53, 0xdd, 0xa9, - 0x3c, 0x28, 0x0e, 0xc1, 0x02, 0x18, 0x7b, 0xe2, 0xee, 0x3d, 0xdd, 0x2b, 0x1f, 0x54, 0x8b, 0x06, - 0x1c, 0x03, 0xc3, 0x8f, 0xf6, 0xf7, 0x76, 0x8b, 0xb9, 0x6b, 0xf7, 0x40, 0x5e, 0x9e, 0x07, 0x4e, - 0x81, 0x7c, 0x75, 0xcf, 0xad, 0xec, 0x3c, 0xdc, 0xad, 0xd1, 0x48, 0x25, 0x03, 0x8d, 0x58, 0x31, - 0x3c, 0x2f, 0xe6, 0xca, 0x17, 0xc1, 0x85, 0x7a, 0xd0, 0x4a, 0xfc, 0x61, 0x26, 0x25, 0xe7, 0xc5, - 0x08, 0xb1, 0x6e, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x33, 0xc2, 0x0c, 0xb6, 0xeb, 0x26, 0x00, - 0x00, +var fileDescriptor_conformance_48ac832451f5d6c3 = []byte{ + // 2600 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0x5b, 0x73, 0x13, 0xc9, + 0x15, 0xf6, 0x68, 0xc0, 0x36, 0x2d, 0xd9, 0x96, 0xdb, 0xb7, 0xc6, 0x50, 0xcb, 0x60, 0x96, 0x20, + 0x60, 0xd7, 0xeb, 0xcb, 0x30, 0x5c, 0x36, 0x4b, 0xb0, 0xc0, 0x02, 0x93, 0xc5, 0xa2, 0xc6, 0x78, + 0xa9, 0x22, 0x0f, 0xca, 0x20, 0x8f, 0x5d, 0x5a, 0x24, 0x8d, 0x76, 0x66, 0xb4, 0x89, 0xf3, 0x98, + 0x7f, 0x90, 0xfb, 0xf5, 0x2f, 0xe4, 0x5a, 0x95, 0x4a, 0x52, 0xc9, 0x53, 0x2a, 0x2f, 0xb9, 0x27, + 0x95, 0x7b, 0xf2, 0x63, 0x92, 0xea, 0xeb, 0x74, 0xb7, 0x7a, 0x64, 0xb1, 0x55, 0x2b, 0x5b, 0xa7, + 0xbf, 0xfe, 0xce, 0xe9, 0xd3, 0x67, 0xbe, 0x76, 0x9f, 0x01, 0xcc, 0x36, 0xa3, 0xee, 0x61, 0x14, + 0x77, 0x82, 0x6e, 0x33, 0x5c, 0xed, 0xc5, 0x51, 0x1a, 0xc1, 0xa2, 0x64, 0x5a, 0x3e, 0x7b, 0x14, + 0x45, 0x47, 0xed, 0xf0, 0x1d, 0x32, 0xf4, 0xb2, 0x7f, 0xf8, 0x4e, 0xd0, 0x3d, 0xa6, 0xb8, 0xe5, + 0x37, 0xf4, 0xa1, 0x83, 0x7e, 0x1c, 0xa4, 0xad, 0xa8, 0xcb, 0xc6, 0x1d, 0x7d, 0xfc, 0xb0, 0x15, + 0xb6, 0x0f, 0x1a, 0x9d, 0x20, 0x79, 0xc5, 0x10, 0xe7, 0x75, 0x44, 0x92, 0xc6, 0xfd, 0x66, 0xca, + 0x46, 0x2f, 0xe8, 0xa3, 0x69, 0xab, 0x13, 0x26, 0x69, 0xd0, 0xe9, 0xe5, 0x05, 0xf0, 0xb9, 0x38, + 0xe8, 0xf5, 0xc2, 0x38, 0xa1, 0xe3, 0x2b, 0xbf, 0xb2, 0x00, 0xbc, 0x9f, 0xad, 0xc5, 0x0f, 0x3f, + 0xea, 0x87, 0x49, 0x0a, 0xaf, 0x83, 0x32, 0x9f, 0xd1, 0xe8, 0x05, 0xc7, 0xed, 0x28, 0x38, 0x40, + 0x96, 0x63, 0x55, 0x4a, 0x8f, 0xc6, 0xfc, 0x19, 0x3e, 0xf2, 0x94, 0x0e, 0xc0, 0x4b, 0xa0, 0xf4, + 0x61, 0x12, 0x75, 0x05, 0xb0, 0xe0, 0x58, 0x95, 0x33, 0x8f, 0xc6, 0xfc, 0x22, 0xb6, 0x72, 0x50, + 0x1d, 0x2c, 0xc5, 0x94, 0x3c, 0x3c, 0x68, 0x44, 0xfd, 0xb4, 0xd7, 0x4f, 0x1b, 0xc4, 0x6b, 0x8a, + 0x6c, 0xc7, 0xaa, 0x4c, 0x6f, 0x2c, 0xad, 0xca, 0x69, 0x7e, 0xde, 0x8a, 0xc3, 0x1a, 0x19, 0xf6, + 0x17, 0xc4, 0xbc, 0x3a, 0x99, 0x46, 0xcd, 0xd5, 0x33, 0x60, 0x82, 0x39, 0x5c, 0xf9, 0x62, 0x01, + 0xcc, 0x29, 0x8b, 0x48, 0x7a, 0x51, 0x37, 0x09, 0xe1, 0x45, 0x50, 0xec, 0x05, 0x71, 0x12, 0x36, + 0xc2, 0x38, 0x8e, 0x62, 0xb2, 0x00, 0x1c, 0x17, 0x20, 0xc6, 0x6d, 0x6c, 0x83, 0x57, 0xc1, 0x4c, + 0x12, 0xc6, 0xad, 0xa0, 0xdd, 0xfa, 0x02, 0x87, 0x8d, 0x33, 0xd8, 0xb4, 0x18, 0xa0, 0xd0, 0xcb, + 0x60, 0x2a, 0xee, 0x77, 0x71, 0x82, 0x19, 0x90, 0xaf, 0xb3, 0xc4, 0xcc, 0x14, 0x66, 0x4a, 0x9d, + 0x3d, 0x6a, 0xea, 0x4e, 0x99, 0x52, 0xb7, 0x0c, 0x26, 0x92, 0x57, 0xad, 0x5e, 0x2f, 0x3c, 0x40, + 0xa7, 0xd9, 0x38, 0x37, 0x54, 0x27, 0xc1, 0x78, 0x1c, 0x26, 0xfd, 0x76, 0xba, 0xf2, 0x93, 0xfb, + 0xa0, 0xf4, 0x2c, 0x4c, 0xd2, 0xad, 0x76, 0xfb, 0xd9, 0x71, 0x2f, 0x4c, 0xe0, 0x65, 0x30, 0x1d, + 0xf5, 0x70, 0xad, 0x05, 0xed, 0x46, 0xab, 0x9b, 0x6e, 0x6e, 0x90, 0x04, 0x9c, 0xf6, 0xa7, 0xb8, + 0x75, 0x07, 0x1b, 0x75, 0x98, 0xe7, 0x92, 0x75, 0xd9, 0x0a, 0xcc, 0x73, 0xe1, 0x15, 0x30, 0x23, + 0x60, 0x7d, 0x4a, 0x87, 0x57, 0x35, 0xe5, 0x8b, 0xd9, 0xfb, 0xc4, 0x3a, 0x00, 0xf4, 0x5c, 0xb2, + 0xaa, 0x53, 0x2a, 0x50, 0x63, 0x4c, 0x28, 0x23, 0x5e, 0xde, 0x6c, 0x06, 0xdc, 0x1b, 0x64, 0x4c, + 0x28, 0x23, 0xde, 0x23, 0xa8, 0x02, 0x3d, 0x17, 0x5e, 0x05, 0x65, 0x01, 0x3c, 0x6c, 0x7d, 0x3e, + 0x3c, 0xd8, 0xdc, 0x40, 0x13, 0x8e, 0x55, 0x99, 0xf0, 0x05, 0x41, 0x8d, 0x9a, 0x07, 0xa1, 0x9e, + 0x8b, 0x26, 0x1d, 0xab, 0x32, 0xae, 0x41, 0x3d, 0x17, 0x5e, 0x07, 0xb3, 0x99, 0x7b, 0x4e, 0x7b, + 0xc6, 0xb1, 0x2a, 0x33, 0xbe, 0xe0, 0xd8, 0x63, 0x76, 0x03, 0xd8, 0x73, 0x11, 0x70, 0xac, 0x4a, + 0x59, 0x07, 0x7b, 0xae, 0x92, 0xfa, 0xc3, 0x76, 0x14, 0xa4, 0xa8, 0xe8, 0x58, 0x95, 0x42, 0x96, + 0xfa, 0x1a, 0x36, 0x2a, 0xeb, 0x3f, 0x88, 0xfa, 0x2f, 0xdb, 0x21, 0x2a, 0x39, 0x56, 0xc5, 0xca, + 0xd6, 0xff, 0x80, 0x58, 0xe1, 0x25, 0x20, 0x66, 0x36, 0x5e, 0x46, 0x51, 0x1b, 0x4d, 0x39, 0x56, + 0x65, 0xd2, 0x2f, 0x71, 0x63, 0x35, 0x8a, 0xda, 0x6a, 0x36, 0xd3, 0xb8, 0xd5, 0x3d, 0x42, 0xd3, + 0xb8, 0xaa, 0xa4, 0x6c, 0x12, 0xab, 0x12, 0xdd, 0xcb, 0xe3, 0x34, 0x4c, 0xd0, 0x0c, 0x2e, 0xe3, + 0x2c, 0xba, 0x2a, 0x36, 0xc2, 0x06, 0x58, 0x12, 0xb0, 0x2e, 0x7d, 0xbc, 0x3b, 0x61, 0x92, 0x04, + 0x47, 0x21, 0x82, 0x8e, 0x55, 0x29, 0x6e, 0x5c, 0x51, 0x1e, 0x6c, 0xb9, 0x44, 0x57, 0x77, 0x09, + 0xfe, 0x09, 0x85, 0xfb, 0x0b, 0x9c, 0x47, 0x31, 0xc3, 0x7d, 0x80, 0xb2, 0x2c, 0x45, 0x71, 0xd8, + 0x3a, 0xea, 0x0a, 0x0f, 0x73, 0xc4, 0xc3, 0x39, 0xc5, 0x43, 0x8d, 0x62, 0x38, 0xeb, 0xa2, 0x48, + 0xa6, 0x62, 0x87, 0x1f, 0x80, 0x79, 0x3d, 0xee, 0xb0, 0xdb, 0xef, 0xa0, 0x05, 0xa2, 0x46, 0x6f, + 0x9e, 0x14, 0xf4, 0x76, 0xb7, 0xdf, 0xf1, 0xa1, 0x1a, 0x31, 0xb6, 0xc1, 0xf7, 0xc1, 0xc2, 0x40, + 0xb8, 0x84, 0x78, 0x91, 0x10, 0x23, 0x53, 0xac, 0x84, 0x6c, 0x4e, 0x0b, 0x94, 0xb0, 0x79, 0x12, + 0x1b, 0xdd, 0xad, 0x46, 0xaf, 0x15, 0x36, 0x43, 0x84, 0xf0, 0x9e, 0x55, 0x0b, 0x93, 0x85, 0x6c, + 0x1e, 0xdd, 0xb7, 0xa7, 0x78, 0x18, 0x5e, 0x91, 0x4a, 0xa1, 0x19, 0xc5, 0x07, 0xe8, 0x2c, 0xc3, + 0x5b, 0x59, 0x39, 0xdc, 0x8f, 0xe2, 0x03, 0x58, 0x03, 0xb3, 0x71, 0xd8, 0xec, 0xc7, 0x49, 0xeb, + 0xe3, 0x50, 0xa4, 0xf5, 0x1c, 0x49, 0xeb, 0xd9, 0xdc, 0x1c, 0xf8, 0x65, 0x31, 0x87, 0xa7, 0xf3, + 0x32, 0x98, 0x8e, 0xc3, 0x5e, 0x18, 0xe0, 0x3c, 0xd2, 0x87, 0xf9, 0x82, 0x63, 0x63, 0xb5, 0xe1, + 0x56, 0xa1, 0x36, 0x32, 0xcc, 0x73, 0x91, 0xe3, 0xd8, 0x58, 0x6d, 0x24, 0x18, 0xd5, 0x06, 0x01, + 0x63, 0x6a, 0x73, 0xd1, 0xb1, 0xb1, 0xda, 0x70, 0x73, 0xa6, 0x36, 0x0a, 0xd0, 0x73, 0xd1, 0x8a, + 0x63, 0x63, 0xb5, 0x91, 0x81, 0x1a, 0x23, 0x53, 0x9b, 0x4b, 0x8e, 0x8d, 0xd5, 0x86, 0x9b, 0xf7, + 0x06, 0x19, 0x99, 0xda, 0xbc, 0xe9, 0xd8, 0x58, 0x6d, 0x64, 0x20, 0x55, 0x1b, 0x01, 0xe4, 0xb2, + 0x70, 0xd9, 0xb1, 0xb1, 0xda, 0x70, 0xbb, 0xa4, 0x36, 0x2a, 0xd4, 0x73, 0xd1, 0x27, 0x1c, 0x1b, + 0xab, 0x8d, 0x02, 0xa5, 0x6a, 0x93, 0xb9, 0xe7, 0xb4, 0x57, 0x1c, 0x1b, 0xab, 0x8d, 0x08, 0x40, + 0x52, 0x1b, 0x0d, 0xec, 0xb9, 0xa8, 0xe2, 0xd8, 0x58, 0x6d, 0x54, 0x30, 0x55, 0x9b, 0x2c, 0x08, + 0xa2, 0x36, 0x57, 0x1d, 0x1b, 0xab, 0x8d, 0x08, 0x81, 0xab, 0x8d, 0x80, 0x31, 0xb5, 0xb9, 0xe6, + 0xd8, 0x58, 0x6d, 0xb8, 0x39, 0x53, 0x1b, 0x01, 0x24, 0x6a, 0x73, 0xdd, 0xb1, 0xb1, 0xda, 0x70, + 0x23, 0x57, 0x9b, 0x2c, 0x42, 0xaa, 0x36, 0x6f, 0x39, 0x36, 0x56, 0x1b, 0x11, 0x9f, 0x50, 0x9b, + 0x8c, 0x8d, 0xa8, 0xcd, 0xdb, 0x8e, 0x8d, 0xd5, 0x46, 0xd0, 0x71, 0xb5, 0x11, 0x30, 0x4d, 0x6d, + 0xd6, 0x1c, 0xfb, 0xb5, 0xd4, 0x86, 0xf3, 0x0c, 0xa8, 0x4d, 0x96, 0x25, 0x4d, 0x6d, 0xd6, 0x89, + 0x87, 0xe1, 0x6a, 0x23, 0x92, 0x39, 0xa0, 0x36, 0x7a, 0xdc, 0x44, 0x14, 0x36, 0x1d, 0x7b, 0x74, + 0xb5, 0x51, 0x23, 0xe6, 0x6a, 0x33, 0x10, 0x2e, 0x21, 0x76, 0x09, 0xf1, 0x10, 0xb5, 0xd1, 0x02, + 0xe5, 0x6a, 0xa3, 0xed, 0x16, 0x53, 0x1b, 0x0f, 0xef, 0x19, 0x55, 0x1b, 0x75, 0xdf, 0x84, 0xda, + 0x88, 0x79, 0x44, 0x6d, 0x6e, 0x32, 0xbc, 0x95, 0x95, 0x03, 0x51, 0x9b, 0x67, 0x60, 0xa6, 0x13, + 0xf4, 0xa8, 0x40, 0x30, 0x99, 0xb8, 0x45, 0x92, 0xfa, 0x56, 0x7e, 0x06, 0x9e, 0x04, 0x3d, 0xa2, + 0x1d, 0xe4, 0x63, 0xbb, 0x9b, 0xc6, 0xc7, 0xfe, 0x54, 0x47, 0xb6, 0x49, 0xac, 0x9e, 0xcb, 0x54, + 0xe5, 0xf6, 0x68, 0xac, 0x9e, 0x4b, 0x3e, 0x14, 0x56, 0x66, 0x83, 0x2f, 0xc0, 0x2c, 0x66, 0xa5, + 0xf2, 0xc3, 0x55, 0xe8, 0x0e, 0xe1, 0x5d, 0x1d, 0xca, 0x4b, 0xa5, 0x89, 0x7e, 0x52, 0x66, 0x1c, + 0x9e, 0x6c, 0x95, 0xb9, 0x3d, 0x97, 0x0b, 0xd7, 0xbb, 0x23, 0x72, 0x7b, 0x2e, 0xfd, 0x54, 0xb9, + 0xb9, 0x95, 0x73, 0x53, 0x91, 0xe3, 0x5a, 0xf7, 0xc9, 0x11, 0xb8, 0xa9, 0x00, 0xee, 0x69, 0x71, + 0xcb, 0x56, 0x99, 0xdb, 0x73, 0xb9, 0x3c, 0xbe, 0x37, 0x22, 0xb7, 0xe7, 0xee, 0x69, 0x71, 0xcb, + 0x56, 0xf8, 0x59, 0x30, 0x87, 0xb9, 0x99, 0xb6, 0x09, 0x49, 0xbd, 0x4b, 0xd8, 0xd7, 0x86, 0xb2, + 0x33, 0x9d, 0x65, 0x3f, 0x28, 0x3f, 0x0e, 0x54, 0xb5, 0x2b, 0x1e, 0x3c, 0x57, 0x28, 0xf1, 0xa7, + 0x46, 0xf5, 0xe0, 0xb9, 0xec, 0x87, 0xe6, 0x41, 0xd8, 0xe1, 0x21, 0x58, 0x20, 0xf9, 0xe1, 0x8b, + 0x10, 0x0a, 0x7e, 0x8f, 0xf8, 0xd8, 0x18, 0x9e, 0x23, 0x06, 0xe6, 0x3f, 0xa9, 0x17, 0x1c, 0xb2, + 0x3e, 0xa2, 0xfa, 0xc1, 0x3b, 0xc1, 0xd7, 0xb2, 0x35, 0xb2, 0x1f, 0xcf, 0xe5, 0x3f, 0x75, 0x3f, + 0xd9, 0x88, 0xfa, 0xbc, 0xd2, 0x43, 0xa3, 0x3a, 0xea, 0xf3, 0x4a, 0x8e, 0x13, 0xed, 0x79, 0xa5, + 0x47, 0xcc, 0x73, 0x50, 0xce, 0x58, 0xd9, 0x19, 0x73, 0x9f, 0xd0, 0xbe, 0x7d, 0x32, 0x2d, 0x3d, + 0x7d, 0x28, 0xef, 0x74, 0x47, 0x31, 0xc2, 0x5d, 0x80, 0x3d, 0x91, 0xd3, 0x88, 0x1e, 0x49, 0x0f, + 0x08, 0xeb, 0xb5, 0xa1, 0xac, 0xf8, 0x9c, 0xc2, 0xff, 0x53, 0xca, 0x62, 0x27, 0xb3, 0x88, 0x72, + 0xa7, 0x52, 0xc8, 0xce, 0xaf, 0xed, 0x51, 0xca, 0x9d, 0x40, 0xe9, 0xa7, 0x54, 0xee, 0x92, 0x95, + 0x27, 0x81, 0x71, 0xd3, 0x23, 0xaf, 0x36, 0x42, 0x12, 0xe8, 0x74, 0x72, 0x1a, 0x66, 0x49, 0x90, + 0x8c, 0xb0, 0x07, 0xce, 0x4a, 0xc4, 0xda, 0x21, 0xf9, 0x90, 0x78, 0xb8, 0x31, 0x82, 0x07, 0xe5, + 0x58, 0xa4, 0x9e, 0x16, 0x3b, 0xc6, 0x41, 0x98, 0x80, 0x65, 0xc9, 0xa3, 0x7e, 0x6a, 0x3e, 0x22, + 0x2e, 0xbd, 0x11, 0x5c, 0xaa, 0x67, 0x26, 0xf5, 0xb9, 0xd4, 0x31, 0x8f, 0xc2, 0x23, 0xb0, 0x38, + 0xb8, 0x4c, 0x72, 0xf4, 0xed, 0x8c, 0xf2, 0x0c, 0x48, 0xcb, 0xc0, 0x47, 0x9f, 0xf4, 0x0c, 0x68, + 0x23, 0xf0, 0x43, 0xb0, 0x64, 0x58, 0x1d, 0xf1, 0xf4, 0x98, 0x78, 0xda, 0x1c, 0x7d, 0x69, 0x99, + 0xab, 0xf9, 0x8e, 0x61, 0x08, 0x5e, 0x02, 0xa5, 0xa8, 0x1b, 0x46, 0x87, 0xfc, 0xb8, 0x89, 0xf0, + 0x15, 0xfb, 0xd1, 0x98, 0x5f, 0x24, 0x56, 0x76, 0x78, 0x7c, 0x06, 0xcc, 0x53, 0x90, 0xb6, 0xb7, + 0xbd, 0xd7, 0xba, 0x6e, 0x3d, 0x1a, 0xf3, 0x21, 0xa1, 0x51, 0xf7, 0x52, 0x44, 0xc0, 0xaa, 0xfd, + 0x23, 0xde, 0x91, 0x20, 0x56, 0x56, 0xbb, 0x17, 0x01, 0xfd, 0xca, 0xca, 0x36, 0x66, 0xed, 0x0d, + 0x40, 0x8c, 0xb4, 0x0a, 0xeb, 0xd2, 0xc5, 0x85, 0x3c, 0x8f, 0xac, 0xf1, 0x84, 0x7e, 0x63, 0x91, + 0x30, 0x97, 0x57, 0x69, 0x67, 0x6a, 0x95, 0xb7, 0x44, 0x56, 0xf1, 0x13, 0xf7, 0x41, 0xd0, 0xee, + 0x87, 0xd9, 0x8d, 0x06, 0x9b, 0x9e, 0xd3, 0x79, 0xd0, 0x07, 0x8b, 0x6a, 0x3b, 0x43, 0x30, 0xfe, + 0xd6, 0x62, 0xb7, 0x40, 0x9d, 0x91, 0x48, 0x03, 0xa5, 0x9c, 0x57, 0x9a, 0x1e, 0x39, 0x9c, 0x9e, + 0x2b, 0x38, 0x7f, 0x37, 0x84, 0xd3, 0x73, 0x07, 0x39, 0x3d, 0x97, 0x73, 0xee, 0x4b, 0xf7, 0xe1, + 0xbe, 0x1a, 0xe8, 0xef, 0x29, 0xe9, 0xf9, 0x01, 0xd2, 0x7d, 0x29, 0xd2, 0x05, 0xb5, 0x9f, 0x92, + 0x47, 0x2b, 0xc5, 0xfa, 0x87, 0x61, 0xb4, 0x3c, 0xd8, 0x05, 0xb5, 0xfb, 0x62, 0xca, 0x00, 0xd1, + 0x77, 0xc1, 0xfa, 0xc7, 0xbc, 0x0c, 0x10, 0x0d, 0xd7, 0x32, 0x40, 0x6c, 0xa6, 0x50, 0xa9, 0xba, + 0x0b, 0xd2, 0x3f, 0xe5, 0x85, 0x4a, 0x05, 0x5c, 0x0b, 0x95, 0x1a, 0x4d, 0xb4, 0xec, 0x61, 0xe4, + 0xb4, 0x7f, 0xce, 0xa3, 0xa5, 0xf5, 0xaa, 0xd1, 0x52, 0xa3, 0x29, 0x03, 0xa4, 0x9c, 0x05, 0xeb, + 0x5f, 0xf2, 0x32, 0x40, 0x2a, 0x5c, 0xcb, 0x00, 0xb1, 0x71, 0xce, 0xba, 0xf4, 0x77, 0xb4, 0x52, + 0xfc, 0x7f, 0xb5, 0x88, 0x62, 0x0c, 0x2d, 0x7e, 0xf9, 0xfe, 0x24, 0x05, 0xa9, 0xde, 0xae, 0x05, + 0xe3, 0xdf, 0x2c, 0x76, 0x29, 0x19, 0x56, 0xfc, 0xca, 0x1d, 0x3c, 0x87, 0x53, 0x2a, 0xa8, 0xbf, + 0x0f, 0xe1, 0x14, 0xc5, 0xaf, 0x5c, 0xd8, 0xa5, 0x3d, 0xd2, 0xee, 0xed, 0x82, 0xf4, 0x1f, 0x94, + 0xf4, 0x84, 0xe2, 0x57, 0xaf, 0xf7, 0x79, 0xb4, 0x52, 0xac, 0xff, 0x1c, 0x46, 0x2b, 0x8a, 0x5f, + 0x6d, 0x06, 0x98, 0x32, 0xa0, 0x16, 0xff, 0xbf, 0xf2, 0x32, 0x20, 0x17, 0xbf, 0x72, 0x6f, 0x36, + 0x85, 0xaa, 0x15, 0xff, 0xbf, 0xf3, 0x42, 0x55, 0x8a, 0x5f, 0xbd, 0x65, 0x9b, 0x68, 0xb5, 0xe2, + 0xff, 0x4f, 0x1e, 0xad, 0x52, 0xfc, 0xea, 0xb5, 0xcd, 0x94, 0x01, 0xb5, 0xf8, 0xff, 0x9b, 0x97, + 0x01, 0xb9, 0xf8, 0x95, 0xbb, 0x39, 0xe7, 0x7c, 0x28, 0xb5, 0x40, 0xf9, 0xeb, 0x0e, 0xf4, 0xbd, + 0x02, 0x6b, 0x29, 0x0d, 0xac, 0x9d, 0x21, 0xb2, 0xf6, 0x28, 0xb7, 0xc0, 0xc7, 0x40, 0xf4, 0xd7, + 0x1a, 0xe2, 0xbd, 0x06, 0xfa, 0x7e, 0x21, 0xe7, 0xfc, 0x78, 0xc6, 0x21, 0xbe, 0xf0, 0x2f, 0x4c, + 0xf0, 0xd3, 0x60, 0x4e, 0xea, 0xf7, 0xf2, 0x77, 0x2c, 0xe8, 0x07, 0x79, 0x64, 0x35, 0x8c, 0x79, + 0x12, 0x24, 0xaf, 0x32, 0x32, 0x61, 0x82, 0x5b, 0x6a, 0x0b, 0xb5, 0xdf, 0x4c, 0xd1, 0x0f, 0x29, + 0xd1, 0x92, 0x69, 0x13, 0xfa, 0xcd, 0x54, 0x69, 0xae, 0xf6, 0x9b, 0x29, 0xbc, 0x05, 0x44, 0x1b, + 0xae, 0x11, 0x74, 0x8f, 0xd1, 0x8f, 0xe8, 0xfc, 0xf9, 0x81, 0xf9, 0x5b, 0xdd, 0x63, 0xbf, 0xc8, + 0xa1, 0x5b, 0xdd, 0x63, 0x78, 0x57, 0x6a, 0xcb, 0x7e, 0x8c, 0xb7, 0x01, 0xfd, 0x98, 0xce, 0x5d, + 0x1c, 0x98, 0x4b, 0x77, 0x49, 0x34, 0x02, 0xc9, 0x57, 0xbc, 0x3d, 0x59, 0x81, 0xf2, 0xed, 0xf9, + 0x69, 0x81, 0xec, 0xf6, 0xb0, 0xed, 0x11, 0x75, 0x29, 0x6d, 0x8f, 0x20, 0xca, 0xb6, 0xe7, 0x67, + 0x85, 0x1c, 0x85, 0x93, 0xb6, 0x87, 0x4f, 0xcb, 0xb6, 0x47, 0xe6, 0x22, 0xdb, 0x43, 0x76, 0xe7, + 0xe7, 0x79, 0x5c, 0xd2, 0xee, 0x64, 0xfd, 0x33, 0x36, 0x0b, 0xef, 0x8e, 0xfc, 0xa8, 0xe0, 0xdd, + 0xf9, 0x35, 0x25, 0xca, 0xdf, 0x1d, 0xe9, 0xe9, 0x60, 0xbb, 0x23, 0x28, 0xf0, 0xee, 0xfc, 0x82, + 0xce, 0xcf, 0xd9, 0x1d, 0x0e, 0x65, 0xbb, 0x23, 0x66, 0xd2, 0xdd, 0xf9, 0x25, 0x9d, 0x9b, 0xbb, + 0x3b, 0x1c, 0x4e, 0x77, 0xe7, 0x02, 0x00, 0x64, 0xfd, 0xdd, 0xa0, 0x13, 0xae, 0xa3, 0x2f, 0xd9, + 0xe4, 0x8d, 0x8d, 0x64, 0x82, 0x0e, 0x28, 0xd2, 0xfa, 0xc5, 0x5f, 0x37, 0xd0, 0x97, 0x65, 0xc4, + 0x2e, 0x36, 0xc1, 0x8b, 0xa0, 0xd4, 0xc8, 0x20, 0x9b, 0xe8, 0x2b, 0x0c, 0x52, 0xe3, 0x90, 0x4d, + 0xb8, 0x02, 0xa6, 0x28, 0x82, 0x40, 0xdc, 0x06, 0xfa, 0xaa, 0x4e, 0xe3, 0xe2, 0xbf, 0xf1, 0xc8, + 0xb7, 0x35, 0x0c, 0xb9, 0x81, 0xbe, 0x46, 0x11, 0xb2, 0x0d, 0x5e, 0xe2, 0x34, 0x6b, 0x84, 0xc7, + 0x43, 0x5f, 0x57, 0x40, 0x98, 0xc7, 0x13, 0x2b, 0xc2, 0xdf, 0x6e, 0xa2, 0x6f, 0xe8, 0x8e, 0x6e, + 0x62, 0x80, 0x08, 0xed, 0x16, 0xfa, 0xa6, 0x1e, 0xed, 0xad, 0x6c, 0xc9, 0xf8, 0xeb, 0x6d, 0xf4, + 0x2d, 0x9d, 0xe2, 0x36, 0x5c, 0x01, 0xa5, 0x9a, 0x40, 0xac, 0xaf, 0xa1, 0x6f, 0xb3, 0x38, 0x04, + 0xc9, 0xfa, 0x1a, 0xc1, 0xec, 0x6c, 0xbf, 0xff, 0xa0, 0xb1, 0xbb, 0xf5, 0x64, 0x7b, 0x7d, 0x1d, + 0x7d, 0x87, 0x63, 0xb0, 0x91, 0xda, 0x32, 0x0c, 0xc9, 0xf5, 0x06, 0xfa, 0xae, 0x82, 0x21, 0xb6, + 0xe5, 0x17, 0x60, 0x4a, 0xfd, 0x8b, 0xb9, 0x04, 0xac, 0x80, 0xbd, 0x5a, 0xb3, 0x02, 0xf8, 0x2e, + 0x28, 0x36, 0x23, 0xd1, 0x1d, 0x47, 0x85, 0x93, 0x3a, 0xe9, 0x32, 0x7a, 0xf9, 0x1e, 0x80, 0x83, + 0xdd, 0x2e, 0x58, 0x06, 0xf6, 0xab, 0xf0, 0x98, 0xb9, 0xc0, 0xbf, 0xc2, 0x79, 0x70, 0x9a, 0x16, + 0x57, 0x81, 0xd8, 0xe8, 0x97, 0x3b, 0x85, 0x5b, 0x56, 0xc6, 0x20, 0x77, 0xb6, 0x64, 0x06, 0xdb, + 0xc0, 0x60, 0xcb, 0x0c, 0x55, 0x30, 0x6f, 0xea, 0x61, 0xc9, 0x1c, 0x53, 0x06, 0x8e, 0x29, 0x33, + 0x87, 0xd2, 0xab, 0x92, 0x39, 0x4e, 0x19, 0x38, 0x4e, 0x0d, 0x72, 0x0c, 0xf4, 0xa4, 0x64, 0x8e, + 0x59, 0x03, 0xc7, 0xac, 0x99, 0x43, 0xe9, 0x3d, 0xc9, 0x1c, 0xd0, 0xc0, 0x01, 0x65, 0x8e, 0x07, + 0x60, 0xd1, 0xdc, 0x61, 0x92, 0x59, 0x26, 0x0c, 0x2c, 0x13, 0x39, 0x2c, 0x6a, 0x17, 0x49, 0x66, + 0x19, 0x37, 0xb0, 0x8c, 0xcb, 0x2c, 0x35, 0x80, 0xf2, 0xfa, 0x44, 0x32, 0xcf, 0x8c, 0x81, 0x67, + 0x26, 0x8f, 0x47, 0xeb, 0x03, 0xc9, 0x3c, 0x65, 0x03, 0x4f, 0xd9, 0x58, 0x6d, 0x72, 0xb7, 0xe7, + 0xa4, 0x7a, 0x2d, 0xc8, 0x0c, 0x5b, 0x60, 0xce, 0xd0, 0xd8, 0x39, 0x89, 0xc2, 0x92, 0x29, 0xee, + 0x82, 0xb2, 0xde, 0xc5, 0x91, 0xe7, 0x4f, 0x1a, 0xe6, 0x4f, 0x1a, 0x8a, 0x44, 0xef, 0xd8, 0xc8, + 0x1c, 0x67, 0x0c, 0x1c, 0x67, 0x06, 0x97, 0xa1, 0xb7, 0x66, 0x4e, 0xa2, 0x28, 0xc9, 0x14, 0x31, + 0x38, 0x37, 0xa4, 0xf7, 0x62, 0xa0, 0x7a, 0x4f, 0xa6, 0x7a, 0x8d, 0x17, 0x1f, 0x92, 0xcf, 0x23, + 0x70, 0x7e, 0x58, 0xf3, 0xc5, 0xe0, 0x74, 0x5d, 0x75, 0x3a, 0xf4, 0x5d, 0x88, 0xe4, 0xa8, 0x4d, + 0x0b, 0xce, 0xd4, 0x74, 0x31, 0x38, 0xb9, 0x23, 0x3b, 0x19, 0xf5, 0xed, 0x88, 0xe4, 0x2d, 0x00, + 0x67, 0x73, 0x1b, 0x2f, 0x06, 0x77, 0xab, 0xaa, 0xbb, 0xfc, 0x77, 0x26, 0x99, 0x8b, 0x95, 0xdb, + 0x00, 0x48, 0x2d, 0xa2, 0x09, 0x60, 0xd7, 0xea, 0xf5, 0xf2, 0x18, 0xfe, 0xa5, 0xba, 0xe5, 0x97, + 0x2d, 0xfa, 0xcb, 0x8b, 0x72, 0x01, 0xbb, 0xdb, 0xdd, 0x7e, 0x58, 0xfe, 0x1f, 0xff, 0xcf, 0xaa, + 0x4e, 0xf1, 0xe6, 0x09, 0x39, 0xc0, 0x56, 0xde, 0x00, 0xd3, 0x5a, 0x67, 0xab, 0x04, 0xac, 0x26, + 0x3f, 0x50, 0x9a, 0xd7, 0x6e, 0x00, 0x90, 0xfd, 0x63, 0x18, 0x38, 0x03, 0x8a, 0xfb, 0xbb, 0x7b, + 0x4f, 0xb7, 0xef, 0xef, 0xd4, 0x76, 0xb6, 0x1f, 0x94, 0xc7, 0x60, 0x09, 0x4c, 0x3e, 0xf5, 0xeb, + 0xcf, 0xea, 0xd5, 0xfd, 0x5a, 0xd9, 0x82, 0x93, 0xe0, 0xd4, 0xe3, 0xbd, 0xfa, 0x6e, 0xb9, 0x70, + 0xed, 0x1e, 0x28, 0xca, 0x8d, 0xa5, 0x19, 0x50, 0xac, 0xd5, 0xfd, 0xed, 0x9d, 0x87, 0xbb, 0x0d, + 0x1a, 0xa9, 0x64, 0xa0, 0x11, 0x2b, 0x86, 0x17, 0xe5, 0x42, 0xf5, 0x22, 0xb8, 0xd0, 0x8c, 0x3a, + 0x03, 0x7f, 0xb6, 0x48, 0xc9, 0x79, 0x39, 0x4e, 0xac, 0x9b, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, + 0x29, 0x30, 0x51, 0x54, 0x22, 0x25, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto b/vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.proto similarity index 96% rename from vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto rename to vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.proto index 95a8fd13..fc96074a 100644 --- a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto +++ b/vendor/github.com/golang/protobuf/conformance/internal/conformance_proto/conformance.proto @@ -210,11 +210,6 @@ message TestAllTypes { NestedMessage oneof_nested_message = 112; string oneof_string = 113; bytes oneof_bytes = 114; - bool oneof_bool = 115; - uint64 oneof_uint64 = 116; - float oneof_float = 117; - double oneof_double = 118; - NestedEnum oneof_enum = 119; } // Well-known types @@ -253,7 +248,6 @@ message TestAllTypes { repeated google.protobuf.Value repeated_value = 316; // Test field-name-to-JSON-name convention. - // (protobuf says names can be any valid C/C++ identifier.) int32 fieldname1 = 401; int32 field_name2 = 402; int32 _field_name3 = 403; @@ -266,12 +260,6 @@ message TestAllTypes { int32 Field_Name10 = 410; int32 FIELD_NAME11 = 411; int32 FIELD_name12 = 412; - int32 __field_name13 = 413; - int32 __Field_name14 = 414; - int32 field__name15 = 415; - int32 field__Name16 = 416; - int32 field_name17__ = 417; - int32 Field_name18__ = 418; } message ForeignMessage { diff --git a/vendor/github.com/golang/protobuf/conformance/test.sh b/vendor/github.com/golang/protobuf/conformance/test.sh new file mode 100755 index 00000000..e6de29b9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/conformance/test.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +PROTOBUF_ROOT=$1 +CONFORMANCE_ROOT=$1/conformance +CONFORMANCE_TEST_RUNNER=$CONFORMANCE_ROOT/conformance-test-runner + +cd $(dirname $0) + +if [[ $PROTOBUF_ROOT == "" ]]; then + echo "usage: test.sh " >/dev/stderr + exit 1 +fi + +if [[ ! -x $CONFORMANCE_TEST_RUNNER ]]; then + echo "SKIP: conformance test runner not installed" >/dev/stderr + exit 0 +fi + +a=$CONFORMANCE_ROOT/conformance.proto +b=internal/conformance_proto/conformance.proto +if [[ $(diff $a $b) != "" ]]; then + cp $a $b + echo "WARNING: conformance.proto is out of date" >/dev/stderr +fi + +$CONFORMANCE_TEST_RUNNER --failure_list failure_list_go.txt ./conformance.sh diff --git a/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go index 27b0729c..bf5174d3 100644 --- a/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go +++ b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/golang/protobuf/descriptor" - tpb "github.com/golang/protobuf/proto/testdata" + tpb "github.com/golang/protobuf/proto/test_proto" protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" ) @@ -20,7 +20,7 @@ func TestMessage(t *testing.T) { } } -func Example_Options() { +func Example_options() { var msg *tpb.MyMessageSet _, md := descriptor.ForMessage(msg) if md.GetOptions().GetMessageSetWireFormat() { diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go index c7a45d6f..ff368f33 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go @@ -56,6 +56,8 @@ import ( stpb "github.com/golang/protobuf/ptypes/struct" ) +const secondInNanos = int64(time.Second / time.Nanosecond) + // Marshaler is a configurable object for converting between // protocol buffer objects and a JSON representation for them. type Marshaler struct { @@ -73,6 +75,31 @@ type Marshaler struct { // Whether to use the original (.proto) name for fields. OrigName bool + + // A custom URL resolver to use when marshaling Any messages to JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver +} + +// AnyResolver takes a type URL, present in an Any message, and resolves it into +// an instance of the associated message. +type AnyResolver interface { + Resolve(typeUrl string) (proto.Message, error) +} + +func defaultResolveAny(typeUrl string) (proto.Message, error) { + // Only the part of typeUrl after the last slash is relevant. + mname := typeUrl + if slash := strings.LastIndex(mname, "/"); slash >= 0 { + mname = mname[slash+1:] + } + mt := proto.MessageType(mname) + if mt == nil { + return nil, fmt.Errorf("unknown message type %q", mname) + } + return reflect.New(mt.Elem()).Interface().(proto.Message), nil } // JSONPBMarshaler is implemented by protobuf messages that customize the @@ -93,6 +120,14 @@ type JSONPBUnmarshaler interface { // Marshal marshals a protocol buffer into JSON. func (m *Marshaler) Marshal(out io.Writer, pb proto.Message) error { + v := reflect.ValueOf(pb) + if pb == nil || (v.Kind() == reflect.Ptr && v.IsNil()) { + return errors.New("Marshal called with nil") + } + // Check for unset required fields first. + if err := checkRequiredFields(pb); err != nil { + return err + } writer := &errWriter{writer: out} return m.marshalObject(writer, pb, "", "") } @@ -165,13 +200,22 @@ func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeU // Any is a bit more involved. return m.marshalAny(out, v, indent) case "Duration": - // "Generated output always contains 3, 6, or 9 fractional digits, + // "Generated output always contains 0, 3, 6, or 9 fractional digits, // depending on required precision." s, ns := s.Field(0).Int(), s.Field(1).Int() - d := time.Duration(s)*time.Second + time.Duration(ns)*time.Nanosecond - x := fmt.Sprintf("%.9f", d.Seconds()) + if ns <= -secondInNanos || ns >= secondInNanos { + return fmt.Errorf("ns out of range (%v, %v)", -secondInNanos, secondInNanos) + } + if (s > 0 && ns < 0) || (s < 0 && ns > 0) { + return errors.New("signs of seconds and nanos do not match") + } + if s < 0 { + ns = -ns + } + x := fmt.Sprintf("%d.%09d", s, ns) x = strings.TrimSuffix(x, "000") x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, ".000") out.write(`"`) out.write(x) out.write(`s"`) @@ -182,13 +226,17 @@ func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeU return m.marshalValue(out, &proto.Properties{}, s.Field(0), indent) case "Timestamp": // "RFC 3339, where generated output will always be Z-normalized - // and uses 3, 6 or 9 fractional digits." + // and uses 0, 3, 6 or 9 fractional digits." s, ns := s.Field(0).Int(), s.Field(1).Int() + if ns < 0 || ns >= secondInNanos { + return fmt.Errorf("ns out of range [0, %v)", secondInNanos) + } t := time.Unix(s, ns).UTC() // time.RFC3339Nano isn't exactly right (we need to get 3/6/9 fractional digits). x := t.Format("2006-01-02T15:04:05.000000000") x = strings.TrimSuffix(x, "000") x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, ".000") out.write(`"`) out.write(x) out.write(`Z"`) @@ -344,16 +392,17 @@ func (m *Marshaler) marshalAny(out *errWriter, any proto.Message, indent string) turl := v.Field(0).String() val := v.Field(1).Bytes() - // Only the part of type_url after the last slash is relevant. - mname := turl - if slash := strings.LastIndex(mname, "/"); slash >= 0 { - mname = mname[slash+1:] + var msg proto.Message + var err error + if m.AnyResolver != nil { + msg, err = m.AnyResolver.Resolve(turl) + } else { + msg, err = defaultResolveAny(turl) } - mt := proto.MessageType(mname) - if mt == nil { - return fmt.Errorf("unknown message type %q", mname) + if err != nil { + return err } - msg := reflect.New(mt.Elem()).Interface().(proto.Message) + if err := proto.Unmarshal(val, msg); err != nil { return err } @@ -590,6 +639,12 @@ type Unmarshaler struct { // Whether to allow messages to contain unknown fields, as opposed to // failing to unmarshal. AllowUnknownFields bool + + // A custom URL resolver to use when unmarshaling Any messages from JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver } // UnmarshalNext unmarshals the next protocol buffer from a JSON object stream. @@ -600,7 +655,10 @@ func (u *Unmarshaler) UnmarshalNext(dec *json.Decoder, pb proto.Message) error { if err := dec.Decode(&inputValue); err != nil { return err } - return u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil) + if err := u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil); err != nil { + return err + } + return checkRequiredFields(pb) } // Unmarshal unmarshals a JSON object stream into a protocol @@ -639,7 +697,14 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe // Allocate memory for pointer fields. if targetType.Kind() == reflect.Ptr { + // If input value is "null" and target is a pointer type, then the field should be treated as not set + // UNLESS the target is structpb.Value, in which case it should be set to structpb.NullValue. + _, isJSONPBUnmarshaler := target.Interface().(JSONPBUnmarshaler) + if string(inputValue) == "null" && targetType != reflect.TypeOf(&stpb.Value{}) && !isJSONPBUnmarshaler { + return nil + } target.Set(reflect.New(targetType.Elem())) + return u.unmarshalValue(target.Elem(), inputValue, prop) } @@ -647,15 +712,11 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe return jsu.UnmarshalJSONPB(u, []byte(inputValue)) } - // Handle well-known types. + // Handle well-known types that are not pointers. if w, ok := target.Addr().Interface().(wkt); ok { switch w.XXX_WellKnownType() { case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value", "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue": - // "Wrappers use the same representation in JSON - // as the wrapped primitive type, except that null is allowed." - // encoding/json will turn JSON `null` into Go `nil`, - // so we don't have to do any extra work. return u.unmarshalValue(target.Field(0), inputValue, prop) case "Any": // Use json.RawMessage pointer type instead of value to support pre-1.8 version. @@ -677,16 +738,17 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe } target.Field(0).SetString(turl) - mname := turl - if slash := strings.LastIndex(mname, "/"); slash >= 0 { - mname = mname[slash+1:] + var m proto.Message + var err error + if u.AnyResolver != nil { + m, err = u.AnyResolver.Resolve(turl) + } else { + m, err = defaultResolveAny(turl) } - mt := proto.MessageType(mname) - if mt == nil { - return fmt.Errorf("unknown message type %q", mname) + if err != nil { + return err } - m := reflect.New(mt.Elem()).Interface().(proto.Message) if _, ok := m.(wkt); ok { val, ok := jsonFields["value"] if !ok { @@ -716,21 +778,16 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe return nil case "Duration": - ivStr := string(inputValue) - if ivStr == "null" { - target.Field(0).SetInt(0) - target.Field(1).SetInt(0) - return nil - } - - unq, err := strconv.Unquote(ivStr) + unq, err := strconv.Unquote(string(inputValue)) if err != nil { return err } + d, err := time.ParseDuration(unq) if err != nil { return fmt.Errorf("bad Duration: %v", err) } + ns := d.Nanoseconds() s := ns / 1e9 ns %= 1e9 @@ -738,33 +795,25 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe target.Field(1).SetInt(ns) return nil case "Timestamp": - ivStr := string(inputValue) - if ivStr == "null" { - target.Field(0).SetInt(0) - target.Field(1).SetInt(0) - return nil - } - - unq, err := strconv.Unquote(ivStr) + unq, err := strconv.Unquote(string(inputValue)) if err != nil { return err } + t, err := time.Parse(time.RFC3339Nano, unq) if err != nil { return fmt.Errorf("bad Timestamp: %v", err) } - target.Field(0).SetInt(int64(t.Unix())) + + target.Field(0).SetInt(t.Unix()) target.Field(1).SetInt(int64(t.Nanosecond())) return nil case "Struct": - if string(inputValue) == "null" { - // Interpret a null struct as empty. - return nil - } var m map[string]json.RawMessage if err := json.Unmarshal(inputValue, &m); err != nil { return fmt.Errorf("bad StructValue: %v", err) } + target.Field(0).Set(reflect.ValueOf(map[string]*stpb.Value{})) for k, jv := range m { pv := &stpb.Value{} @@ -775,15 +824,12 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe } return nil case "ListValue": - if string(inputValue) == "null" { - // Interpret a null ListValue as empty. - return nil - } var s []json.RawMessage if err := json.Unmarshal(inputValue, &s); err != nil { return fmt.Errorf("bad ListValue: %v", err) } - target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s), len(s)))) + + target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s)))) for i, sv := range s { if err := u.unmarshalValue(target.Field(0).Index(i), sv, prop); err != nil { return err @@ -933,11 +979,13 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe if err := json.Unmarshal(inputValue, &slc); err != nil { return err } - len := len(slc) - target.Set(reflect.MakeSlice(targetType, len, len)) - for i := 0; i < len; i++ { - if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil { - return err + if slc != nil { + l := len(slc) + target.Set(reflect.MakeSlice(targetType, l, l)) + for i := 0; i < l; i++ { + if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil { + return err + } } } return nil @@ -949,33 +997,30 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe if err := json.Unmarshal(inputValue, &mp); err != nil { return err } - target.Set(reflect.MakeMap(targetType)) - var keyprop, valprop *proto.Properties - if prop != nil { - // These could still be nil if the protobuf metadata is broken somehow. - // TODO: This won't work because the fields are unexported. - // We should probably just reparse them. - //keyprop, valprop = prop.mkeyprop, prop.mvalprop - } - for ks, raw := range mp { - // Unmarshal map key. The core json library already decoded the key into a - // string, so we handle that specially. Other types were quoted post-serialization. - var k reflect.Value - if targetType.Key().Kind() == reflect.String { - k = reflect.ValueOf(ks) - } else { - k = reflect.New(targetType.Key()).Elem() - if err := u.unmarshalValue(k, json.RawMessage(ks), keyprop); err != nil { + if mp != nil { + target.Set(reflect.MakeMap(targetType)) + for ks, raw := range mp { + // Unmarshal map key. The core json library already decoded the key into a + // string, so we handle that specially. Other types were quoted post-serialization. + var k reflect.Value + if targetType.Key().Kind() == reflect.String { + k = reflect.ValueOf(ks) + } else { + k = reflect.New(targetType.Key()).Elem() + // TODO: pass the correct Properties if needed. + if err := u.unmarshalValue(k, json.RawMessage(ks), nil); err != nil { + return err + } + } + + // Unmarshal map value. + v := reflect.New(targetType.Elem()).Elem() + // TODO: pass the correct Properties if needed. + if err := u.unmarshalValue(v, raw, nil); err != nil { return err } + target.SetMapIndex(k, v) } - - // Unmarshal map value. - v := reflect.New(targetType.Elem()).Elem() - if err := u.unmarshalValue(v, raw, valprop); err != nil { - return err - } - target.SetMapIndex(k, v) } return nil } @@ -1057,3 +1102,140 @@ func (s mapKeys) Less(i, j int) bool { } return fmt.Sprint(s[i].Interface()) < fmt.Sprint(s[j].Interface()) } + +// checkRequiredFields returns an error if any required field in the given proto message is not set. +// This function is used by both Marshal and Unmarshal. While required fields only exist in a +// proto2 message, a proto3 message can contain proto2 message(s). +func checkRequiredFields(pb proto.Message) error { + // Most well-known type messages do not contain required fields. The "Any" type may contain + // a message that has required fields. + // + // When an Any message is being marshaled, the code will invoked proto.Unmarshal on Any.Value + // field in order to transform that into JSON, and that should have returned an error if a + // required field is not set in the embedded message. + // + // When an Any message is being unmarshaled, the code will have invoked proto.Marshal on the + // embedded message to store the serialized message in Any.Value field, and that should have + // returned an error if a required field is not set. + if _, ok := pb.(wkt); ok { + return nil + } + + v := reflect.ValueOf(pb) + // Skip message if it is not a struct pointer. + if v.Kind() != reflect.Ptr { + return nil + } + v = v.Elem() + if v.Kind() != reflect.Struct { + return nil + } + + for i := 0; i < v.NumField(); i++ { + field := v.Field(i) + sfield := v.Type().Field(i) + + if sfield.PkgPath != "" { + // blank PkgPath means the field is exported; skip if not exported + continue + } + + if strings.HasPrefix(sfield.Name, "XXX_") { + continue + } + + // Oneof field is an interface implemented by wrapper structs containing the actual oneof + // field, i.e. an interface containing &T{real_value}. + if sfield.Tag.Get("protobuf_oneof") != "" { + if field.Kind() != reflect.Interface { + continue + } + v := field.Elem() + if v.Kind() != reflect.Ptr || v.IsNil() { + continue + } + v = v.Elem() + if v.Kind() != reflect.Struct || v.NumField() < 1 { + continue + } + field = v.Field(0) + sfield = v.Type().Field(0) + } + + protoTag := sfield.Tag.Get("protobuf") + if protoTag == "" { + continue + } + var prop proto.Properties + prop.Init(sfield.Type, sfield.Name, protoTag, &sfield) + + switch field.Kind() { + case reflect.Map: + if field.IsNil() { + continue + } + // Check each map value. + keys := field.MapKeys() + for _, k := range keys { + v := field.MapIndex(k) + if err := checkRequiredFieldsInValue(v); err != nil { + return err + } + } + case reflect.Slice: + // Handle non-repeated type, e.g. bytes. + if !prop.Repeated { + if prop.Required && field.IsNil() { + return fmt.Errorf("required field %q is not set", prop.Name) + } + continue + } + + // Handle repeated type. + if field.IsNil() { + continue + } + // Check each slice item. + for i := 0; i < field.Len(); i++ { + v := field.Index(i) + if err := checkRequiredFieldsInValue(v); err != nil { + return err + } + } + case reflect.Ptr: + if field.IsNil() { + if prop.Required { + return fmt.Errorf("required field %q is not set", prop.Name) + } + continue + } + if err := checkRequiredFieldsInValue(field); err != nil { + return err + } + } + } + + // Handle proto2 extensions. + for _, ext := range proto.RegisteredExtensions(pb) { + if !proto.HasExtension(pb, ext) { + continue + } + ep, err := proto.GetExtension(pb, ext) + if err != nil { + return err + } + err = checkRequiredFieldsInValue(reflect.ValueOf(ep)) + if err != nil { + return err + } + } + + return nil +} + +func checkRequiredFieldsInValue(v reflect.Value) error { + if pm, ok := v.Interface().(proto.Message); ok { + return checkRequiredFields(pm) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go index da93163e..c9934d97 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go @@ -379,9 +379,9 @@ var marshalingTests = []struct { &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}, `{"strry":{"\"one\"":"two","three":"four"}}`}, {"map", marshaler, - &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}, `{"objjy":{"1":{"dub":1}}}`}, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, `{"objjy":{"1":{"dub":1}}}`}, {"map", marshalerAllOptions, - &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}, objjyPrettyJSON}, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, objjyPrettyJSON}, {"map", marshaler, &pb.Mappy{Buggy: map[int64]string{1234: "yup"}}, `{"buggy":{"1234":"yup"}}`}, {"map", marshaler, &pb.Mappy{Booly: map[bool]bool{false: true}}, `{"booly":{"false":true}}`}, @@ -395,7 +395,7 @@ var marshalingTests = []struct { {"proto2 map", marshaler, &pb.Maps{MInt64Str: map[int64]string{213: "cat"}}, `{"mInt64Str":{"213":"cat"}}`}, {"proto2 map", marshaler, - &pb.Maps{MBoolSimple: map[bool]*pb.Simple{true: &pb.Simple{OInt32: proto.Int32(1)}}}, + &pb.Maps{MBoolSimple: map[bool]*pb.Simple{true: {OInt32: proto.Int32(1)}}}, `{"mBoolSimple":{"true":{"oInt32":1}}}`}, {"oneof, not set", marshaler, &pb.MsgWithOneof{}, `{}`}, {"oneof, set", marshaler, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Title{"Grand Poobah"}}, `{"title":"Grand Poobah"}`}, @@ -406,7 +406,10 @@ var marshalingTests = []struct { {"Any with message and indent", marshalerAllOptions, anySimple, anySimplePrettyJSON}, {"Any with WKT", marshaler, anyWellKnown, anyWellKnownJSON}, {"Any with WKT and indent", marshalerAllOptions, anyWellKnown, anyWellKnownPrettyJSON}, - {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}, `{"dur":"3.000s"}`}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}, `{"dur":"3s"}`}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3, Nanos: 1e6}}, `{"dur":"3.001s"}`}, + {"Duration beyond float64 precision", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 100000000, Nanos: 1}}, `{"dur":"100000000.000000001s"}`}, + {"negative Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: -123, Nanos: -456}}, `{"dur":"-123.000000456s"}`}, {"Struct", marshaler, &pb.KnownTypes{St: &stpb.Struct{ Fields: map[string]*stpb.Value{ "one": {Kind: &stpb.Value_StringValue{"loneliest number"}}, @@ -421,6 +424,7 @@ var marshalingTests = []struct { {Kind: &stpb.Value_BoolValue{true}}, }}}, `{"lv":["x",null,3,true]}`}, {"Timestamp", marshaler, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}, `{"ts":"2014-05-13T16:53:20.021Z"}`}, + {"Timestamp", marshaler, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 0}}, `{"ts":"2014-05-13T16:53:20Z"}`}, {"number Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NumberValue{1}}}, `{"val":1}`}, {"null Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NullValue{stpb.NullValue_NULL_VALUE}}}, `{"val":null}`}, {"string number value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_StringValue{"9223372036854775807"}}}, `{"val":"9223372036854775807"}`}, @@ -449,6 +453,9 @@ var marshalingTests = []struct { {"BoolValue", marshaler, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}, `{"bool":true}`}, {"StringValue", marshaler, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}, `{"str":"plush"}`}, {"BytesValue", marshaler, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}, `{"bytes":"d293"}`}, + + {"required", marshaler, &pb.MsgWithRequired{Str: proto.String("hello")}, `{"str":"hello"}`}, + {"required bytes", marshaler, &pb.MsgWithRequiredBytes{Byts: []byte{}}, `{"byts":""}`}, } func TestMarshaling(t *testing.T) { @@ -462,6 +469,40 @@ func TestMarshaling(t *testing.T) { } } +func TestMarshalingNil(t *testing.T) { + var msg *pb.Simple + m := &Marshaler{} + if _, err := m.MarshalToString(msg); err == nil { + t.Errorf("mashaling nil returned no error") + } +} + +func TestMarshalIllegalTime(t *testing.T) { + tests := []struct { + pb proto.Message + fail bool + }{ + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: 0}}, false}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: 0}}, false}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: -1}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: 1}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: 1000000000}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: -1000000000}}, true}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: 1}}, false}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: -1}}, true}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: 1000000000}}, true}, + } + for _, tt := range tests { + _, err := marshaler.MarshalToString(tt.pb) + if err == nil && tt.fail { + t.Errorf("marshaler.MarshalToString(%v) = _, ; want _, ", tt.pb) + } + if err != nil && !tt.fail { + t.Errorf("marshaler.MarshalToString(%v) = _, %v; want _, ", tt.pb, err) + } + } +} + func TestMarshalJSONPBMarshaler(t *testing.T) { rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` msg := dynamicMessage{rawJson: rawJson} @@ -486,12 +527,110 @@ func TestMarshalAnyJSONPBMarshaler(t *testing.T) { } // after custom marshaling, it's round-tripped through JSON decoding/encoding already, // so the keys are sorted, whitespace is compacted, and "@type" key has been added - expected := `{"@type":"type.googleapis.com/` + dynamicMessageName +`","baz":[0,1,2,3],"foo":"bar"}` + expected := `{"@type":"type.googleapis.com/` + dynamicMessageName + `","baz":[0,1,2,3],"foo":"bar"}` if str != expected { t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", str, expected) } } +func TestMarshalWithCustomValidation(t *testing.T) { + msg := dynamicMessage{rawJson: `{ "foo": "bar", "baz": [0, 1, 2, 3] }`, dummy: &dynamicMessage{}} + + js, err := new(Marshaler).MarshalToString(&msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshalling to json: %v", err) + } + err = Unmarshal(strings.NewReader(js), &msg) + if err != nil { + t.Errorf("an unexpected error occurred when unmarshalling from json: %v", err) + } +} + +// Test marshaling message containing unset required fields should produce error. +func TestMarshalUnsetRequiredFields(t *testing.T) { + msgExt := &pb.Real{} + proto.SetExtension(msgExt, pb.E_Extm, &pb.MsgWithRequired{}) + + tests := []struct { + desc string + marshaler *Marshaler + pb proto.Message + }{ + { + desc: "direct required field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequired{}, + }, + { + desc: "direct required field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithRequired{}, + }, + { + desc: "indirect required field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{Subm: &pb.MsgWithRequired{}}, + }, + { + desc: "indirect required field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithIndirectRequired{Subm: &pb.MsgWithRequired{}}, + }, + { + desc: "direct required wkt field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequiredWKT{}, + }, + { + desc: "direct required wkt field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithRequiredWKT{}, + }, + { + desc: "direct required bytes field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequiredBytes{}, + }, + { + desc: "required in map value", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{ + MapField: map[string]*pb.MsgWithRequired{ + "key": {}, + }, + }, + }, + { + desc: "required in repeated item", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{ + SliceField: []*pb.MsgWithRequired{ + {Str: proto.String("hello")}, + {}, + }, + }, + }, + { + desc: "required inside oneof", + marshaler: &Marshaler{}, + pb: &pb.MsgWithOneof{ + Union: &pb.MsgWithOneof_MsgWithRequired{&pb.MsgWithRequired{}}, + }, + }, + { + desc: "required inside extension", + marshaler: &Marshaler{}, + pb: msgExt, + }, + } + + for _, tc := range tests { + if _, err := tc.marshaler.MarshalToString(tc.pb); err == nil { + t.Errorf("%s: expecting error in marshaling with unset required fields %+v", tc.desc, tc.pb) + } + } +} + var unmarshalingTests = []struct { desc string unmarshaler Unmarshaler @@ -535,7 +674,7 @@ var unmarshalingTests = []struct { {"-Inf", Unmarshaler{}, `{"oDouble":"-Infinity"}`, &pb.Simple{ODouble: proto.Float64(math.Inf(-1))}}, {"map", Unmarshaler{}, `{"nummy":{"1":2,"3":4}}`, &pb.Mappy{Nummy: map[int64]int32{1: 2, 3: 4}}}, {"map", Unmarshaler{}, `{"strry":{"\"one\"":"two","three":"four"}}`, &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}}, - {"map", Unmarshaler{}, `{"objjy":{"1":{"dub":1}}}`, &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}}, + {"map", Unmarshaler{}, `{"objjy":{"1":{"dub":1}}}`, &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}}, {"proto2 extension", Unmarshaler{}, realNumberJSON, realNumber}, {"Any with message", Unmarshaler{}, anySimpleJSON, anySimple}, {"Any with message and indent", Unmarshaler{}, anySimplePrettyJSON, anySimple}, @@ -553,12 +692,14 @@ var unmarshalingTests = []struct { {"camelName input", Unmarshaler{}, `{"oBool":true}`, &pb.Simple{OBool: proto.Bool(true)}}, {"Duration", Unmarshaler{}, `{"dur":"3.000s"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}}, - {"null Duration", Unmarshaler{}, `{"dur":null}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 0}}}, + {"Duration", Unmarshaler{}, `{"dur":"4s"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 4}}}, + {"null Duration", Unmarshaler{}, `{"dur":null}`, &pb.KnownTypes{Dur: nil}}, {"Timestamp", Unmarshaler{}, `{"ts":"2014-05-13T16:53:20.021Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}}, + {"Timestamp", Unmarshaler{}, `{"ts":"2014-05-13T16:53:20Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 0}}}, {"PreEpochTimestamp", Unmarshaler{}, `{"ts":"1969-12-31T23:59:58.999999995Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -2, Nanos: 999999995}}}, {"ZeroTimeTimestamp", Unmarshaler{}, `{"ts":"0001-01-01T00:00:00Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -62135596800, Nanos: 0}}}, - {"null Timestamp", Unmarshaler{}, `{"ts":null}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 0, Nanos: 0}}}, - {"null Struct", Unmarshaler{}, `{"st": null}`, &pb.KnownTypes{St: &stpb.Struct{}}}, + {"null Timestamp", Unmarshaler{}, `{"ts":null}`, &pb.KnownTypes{Ts: nil}}, + {"null Struct", Unmarshaler{}, `{"st": null}`, &pb.KnownTypes{St: nil}}, {"empty Struct", Unmarshaler{}, `{"st": {}}`, &pb.KnownTypes{St: &stpb.Struct{}}}, {"basic Struct", Unmarshaler{}, `{"st": {"a": "x", "b": null, "c": 3, "d": true}}`, &pb.KnownTypes{St: &stpb.Struct{Fields: map[string]*stpb.Value{ "a": {Kind: &stpb.Value_StringValue{"x"}}, @@ -575,7 +716,7 @@ var unmarshalingTests = []struct { }}}}, }}}}, }}}}, - {"null ListValue", Unmarshaler{}, `{"lv": null}`, &pb.KnownTypes{Lv: &stpb.ListValue{}}}, + {"null ListValue", Unmarshaler{}, `{"lv": null}`, &pb.KnownTypes{Lv: nil}}, {"empty ListValue", Unmarshaler{}, `{"lv": []}`, &pb.KnownTypes{Lv: &stpb.ListValue{}}}, {"basic ListValue", Unmarshaler{}, `{"lv": ["x", null, 3, true]}`, &pb.KnownTypes{Lv: &stpb.ListValue{Values: []*stpb.Value{ {Kind: &stpb.Value_StringValue{"x"}}, @@ -612,8 +753,20 @@ var unmarshalingTests = []struct { {"BoolValue", Unmarshaler{}, `{"bool":true}`, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}}, {"StringValue", Unmarshaler{}, `{"str":"plush"}`, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}}, {"BytesValue", Unmarshaler{}, `{"bytes":"d293"}`, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}}, - // `null` is also a permissible value. Let's just test one. - {"null DoubleValue", Unmarshaler{}, `{"dbl":null}`, &pb.KnownTypes{Dbl: &wpb.DoubleValue{}}}, + + // Ensure that `null` as a value ends up with a nil pointer instead of a [type]Value struct. + {"null DoubleValue", Unmarshaler{}, `{"dbl":null}`, &pb.KnownTypes{Dbl: nil}}, + {"null FloatValue", Unmarshaler{}, `{"flt":null}`, &pb.KnownTypes{Flt: nil}}, + {"null Int64Value", Unmarshaler{}, `{"i64":null}`, &pb.KnownTypes{I64: nil}}, + {"null UInt64Value", Unmarshaler{}, `{"u64":null}`, &pb.KnownTypes{U64: nil}}, + {"null Int32Value", Unmarshaler{}, `{"i32":null}`, &pb.KnownTypes{I32: nil}}, + {"null UInt32Value", Unmarshaler{}, `{"u32":null}`, &pb.KnownTypes{U32: nil}}, + {"null BoolValue", Unmarshaler{}, `{"bool":null}`, &pb.KnownTypes{Bool: nil}}, + {"null StringValue", Unmarshaler{}, `{"str":null}`, &pb.KnownTypes{Str: nil}}, + {"null BytesValue", Unmarshaler{}, `{"bytes":null}`, &pb.KnownTypes{Bytes: nil}}, + + {"required", Unmarshaler{}, `{"str":"hello"}`, &pb.MsgWithRequired{Str: proto.String("hello")}}, + {"required bytes", Unmarshaler{}, `{"byts": []}`, &pb.MsgWithRequiredBytes{Byts: []byte{}}}, } func TestUnmarshaling(t *testing.T) { @@ -636,6 +789,26 @@ func TestUnmarshaling(t *testing.T) { } } +func TestUnmarshalNullArray(t *testing.T) { + var repeats pb.Repeats + if err := UnmarshalString(`{"rBool":null}`, &repeats); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(repeats, pb.Repeats{}) { + t.Errorf("got non-nil fields in [%#v]", repeats) + } +} + +func TestUnmarshalNullObject(t *testing.T) { + var maps pb.Maps + if err := UnmarshalString(`{"mInt64Str":null}`, &maps); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(maps, pb.Maps{}) { + t.Errorf("got non-nil fields in [%#v]", maps) + } +} + func TestUnmarshalNext(t *testing.T) { // We only need to check against a few, not all of them. tests := unmarshalingTests[:5] @@ -692,6 +865,65 @@ func TestUnmarshalingBadInput(t *testing.T) { } } +type funcResolver func(turl string) (proto.Message, error) + +func (fn funcResolver) Resolve(turl string) (proto.Message, error) { + return fn(turl) +} + +func TestAnyWithCustomResolver(t *testing.T) { + var resolvedTypeUrls []string + resolver := funcResolver(func(turl string) (proto.Message, error) { + resolvedTypeUrls = append(resolvedTypeUrls, turl) + return new(pb.Simple), nil + }) + msg := &pb.Simple{ + OBytes: []byte{1, 2, 3, 4}, + OBool: proto.Bool(true), + OString: proto.String("foobar"), + OInt64: proto.Int64(1020304), + } + msgBytes, err := proto.Marshal(msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling message: %v", err) + } + // make an Any with a type URL that won't resolve w/out custom resolver + any := &anypb.Any{ + TypeUrl: "https://foobar.com/some.random.MessageKind", + Value: msgBytes, + } + + m := Marshaler{AnyResolver: resolver} + js, err := m.MarshalToString(any) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling any to JSON: %v", err) + } + if len(resolvedTypeUrls) != 1 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[0] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[0], "https://foobar.com/some.random.MessageKind") + } + wanted := `{"@type":"https://foobar.com/some.random.MessageKind","oBool":true,"oInt64":"1020304","oString":"foobar","oBytes":"AQIDBA=="}` + if js != wanted { + t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", js, wanted) + } + + u := Unmarshaler{AnyResolver: resolver} + roundTrip := &anypb.Any{} + err = u.Unmarshal(bytes.NewReader([]byte(js)), roundTrip) + if err != nil { + t.Errorf("an unexpected error occurred when unmarshaling any from JSON: %v", err) + } + if len(resolvedTypeUrls) != 2 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[1] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[1], "https://foobar.com/some.random.MessageKind") + } + if !proto.Equal(any, roundTrip) { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", roundTrip, any) + } +} + func TestUnmarshalJSONPBUnmarshaler(t *testing.T) { rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` var msg dynamicMessage @@ -703,6 +935,19 @@ func TestUnmarshalJSONPBUnmarshaler(t *testing.T) { } } +func TestUnmarshalNullWithJSONPBUnmarshaler(t *testing.T) { + rawJson := `{"stringField":null}` + var ptrFieldMsg ptrFieldMessage + if err := Unmarshal(strings.NewReader(rawJson), &ptrFieldMsg); err != nil { + t.Errorf("unmarshal error: %v", err) + } + + want := ptrFieldMessage{StringField: &stringField{IsSet: true, StringValue: "null"}} + if !proto.Equal(&ptrFieldMsg, &want) { + t.Errorf("unmarshal result StringField: got %v, want %v", ptrFieldMsg, want) + } +} + func TestUnmarshalAnyJSONPBUnmarshaler(t *testing.T) { rawJson := `{ "@type": "blah.com/` + dynamicMessageName + `", "foo": "bar", "baz": [0, 1, 2, 3] }` var got anypb.Any @@ -720,22 +965,62 @@ func TestUnmarshalAnyJSONPBUnmarshaler(t *testing.T) { } if !proto.Equal(&got, &want) { - t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", got, want) + t.Errorf("message contents not set correctly after unmarshalling JSON: got %v, wanted %v", got, want) } } const ( dynamicMessageName = "google.protobuf.jsonpb.testing.dynamicMessage" ) + func init() { // we register the custom type below so that we can use it in Any types proto.RegisterType((*dynamicMessage)(nil), dynamicMessageName) } +type ptrFieldMessage struct { + StringField *stringField `protobuf:"bytes,1,opt,name=stringField"` +} + +func (m *ptrFieldMessage) Reset() { +} + +func (m *ptrFieldMessage) String() string { + return m.StringField.StringValue +} + +func (m *ptrFieldMessage) ProtoMessage() { +} + +type stringField struct { + IsSet bool `protobuf:"varint,1,opt,name=isSet"` + StringValue string `protobuf:"bytes,2,opt,name=stringValue"` +} + +func (s *stringField) Reset() { +} + +func (s *stringField) String() string { + return s.StringValue +} + +func (s *stringField) ProtoMessage() { +} + +func (s *stringField) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { + s.IsSet = true + s.StringValue = string(js) + return nil +} + // dynamicMessage implements protobuf.Message but is not a normal generated message type. // It provides implementations of JSONPBMarshaler and JSONPBUnmarshaler for JSON support. type dynamicMessage struct { rawJson string `protobuf:"bytes,1,opt,name=rawJson"` + + // an unexported nested message is present just to ensure that it + // won't result in a panic (see issue #509) + dummy *dynamicMessage `protobuf:"bytes,2,opt,name=dummy"` } func (m *dynamicMessage) Reset() { @@ -756,4 +1041,110 @@ func (m *dynamicMessage) MarshalJSONPB(jm *Marshaler) ([]byte, error) { func (m *dynamicMessage) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { m.rawJson = string(js) return nil -} \ No newline at end of file +} + +// Test unmarshaling message containing unset required fields should produce error. +func TestUnmarshalUnsetRequiredFields(t *testing.T) { + tests := []struct { + desc string + pb proto.Message + json string + }{ + { + desc: "direct required field missing", + pb: &pb.MsgWithRequired{}, + json: `{}`, + }, + { + desc: "direct required field set to null", + pb: &pb.MsgWithRequired{}, + json: `{"str": null}`, + }, + { + desc: "indirect required field missing", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"subm": {}}`, + }, + { + desc: "indirect required field set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"subm": {"str": null}}`, + }, + { + desc: "direct required bytes field missing", + pb: &pb.MsgWithRequiredBytes{}, + json: `{}`, + }, + { + desc: "direct required bytes field set to null", + pb: &pb.MsgWithRequiredBytes{}, + json: `{"byts": null}`, + }, + { + desc: "direct required wkt field missing", + pb: &pb.MsgWithRequiredWKT{}, + json: `{}`, + }, + { + desc: "direct required wkt field set to null", + pb: &pb.MsgWithRequiredWKT{}, + json: `{"str": null}`, + }, + { + desc: "any containing message with required field set to null", + pb: &pb.KnownTypes{}, + json: `{"an": {"@type": "example.com/jsonpb.MsgWithRequired", "str": null}}`, + }, + { + desc: "any containing message with missing required field", + pb: &pb.KnownTypes{}, + json: `{"an": {"@type": "example.com/jsonpb.MsgWithRequired"}}`, + }, + { + desc: "missing required in map value", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"map_field": {"a": {}, "b": {"str": "hi"}}}`, + }, + { + desc: "required in map value set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"map_field": {"a": {"str": "hello"}, "b": {"str": null}}}`, + }, + { + desc: "missing required in slice item", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"slice_field": [{}, {"str": "hi"}]}`, + }, + { + desc: "required in slice item set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"slice_field": [{"str": "hello"}, {"str": null}]}`, + }, + { + desc: "required inside oneof missing", + pb: &pb.MsgWithOneof{}, + json: `{"msgWithRequired": {}}`, + }, + { + desc: "required inside oneof set to null", + pb: &pb.MsgWithOneof{}, + json: `{"msgWithRequired": {"str": null}}`, + }, + { + desc: "required field in extension missing", + pb: &pb.Real{}, + json: `{"[jsonpb.extm]":{}}`, + }, + { + desc: "required field in extension set to null", + pb: &pb.Real{}, + json: `{"[jsonpb.extm]":{"str": null}}`, + }, + } + + for _, tc := range tests { + if err := UnmarshalString(tc.json, tc.pb); err == nil { + t.Errorf("%s: expecting error in unmarshaling with unset required fields %s", tc.desc, tc.json) + } + } +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile deleted file mode 100644 index eeda8ae5..00000000 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2015 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -regenerate: - protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers:. *.proto diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go index ebb180e8..1bcce029 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go @@ -1,29 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: more_test_objects.proto -/* -Package jsonpb is a generated protocol buffer package. - -It is generated from these files: - more_test_objects.proto - test_objects.proto - -It has these top-level messages: - Simple3 - SimpleSlice3 - SimpleMap3 - SimpleNull3 - Mappy - Simple - NonFinites - Repeats - Widget - Maps - MsgWithOneof - Real - Complex - KnownTypes -*/ package jsonpb import proto "github.com/golang/protobuf/proto" @@ -63,16 +40,40 @@ var Numeral_value = map[string]int32{ func (x Numeral) String() string { return proto.EnumName(Numeral_name, int32(x)) } -func (Numeral) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -type Simple3 struct { - Dub float64 `protobuf:"fixed64,1,opt,name=dub" json:"dub,omitempty"` +func (Numeral) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{0} } -func (m *Simple3) Reset() { *m = Simple3{} } -func (m *Simple3) String() string { return proto.CompactTextString(m) } -func (*Simple3) ProtoMessage() {} -func (*Simple3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +type Simple3 struct { + Dub float64 `protobuf:"fixed64,1,opt,name=dub" json:"dub,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Simple3) Reset() { *m = Simple3{} } +func (m *Simple3) String() string { return proto.CompactTextString(m) } +func (*Simple3) ProtoMessage() {} +func (*Simple3) Descriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{0} +} +func (m *Simple3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Simple3.Unmarshal(m, b) +} +func (m *Simple3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Simple3.Marshal(b, m, deterministic) +} +func (dst *Simple3) XXX_Merge(src proto.Message) { + xxx_messageInfo_Simple3.Merge(dst, src) +} +func (m *Simple3) XXX_Size() int { + return xxx_messageInfo_Simple3.Size(m) +} +func (m *Simple3) XXX_DiscardUnknown() { + xxx_messageInfo_Simple3.DiscardUnknown(m) +} + +var xxx_messageInfo_Simple3 proto.InternalMessageInfo func (m *Simple3) GetDub() float64 { if m != nil { @@ -82,13 +83,35 @@ func (m *Simple3) GetDub() float64 { } type SimpleSlice3 struct { - Slices []string `protobuf:"bytes,1,rep,name=slices" json:"slices,omitempty"` + Slices []string `protobuf:"bytes,1,rep,name=slices" json:"slices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SimpleSlice3) Reset() { *m = SimpleSlice3{} } -func (m *SimpleSlice3) String() string { return proto.CompactTextString(m) } -func (*SimpleSlice3) ProtoMessage() {} -func (*SimpleSlice3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *SimpleSlice3) Reset() { *m = SimpleSlice3{} } +func (m *SimpleSlice3) String() string { return proto.CompactTextString(m) } +func (*SimpleSlice3) ProtoMessage() {} +func (*SimpleSlice3) Descriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{1} +} +func (m *SimpleSlice3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleSlice3.Unmarshal(m, b) +} +func (m *SimpleSlice3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleSlice3.Marshal(b, m, deterministic) +} +func (dst *SimpleSlice3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleSlice3.Merge(dst, src) +} +func (m *SimpleSlice3) XXX_Size() int { + return xxx_messageInfo_SimpleSlice3.Size(m) +} +func (m *SimpleSlice3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleSlice3.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleSlice3 proto.InternalMessageInfo func (m *SimpleSlice3) GetSlices() []string { if m != nil { @@ -98,13 +121,35 @@ func (m *SimpleSlice3) GetSlices() []string { } type SimpleMap3 struct { - Stringy map[string]string `protobuf:"bytes,1,rep,name=stringy" json:"stringy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Stringy map[string]string `protobuf:"bytes,1,rep,name=stringy" json:"stringy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SimpleMap3) Reset() { *m = SimpleMap3{} } -func (m *SimpleMap3) String() string { return proto.CompactTextString(m) } -func (*SimpleMap3) ProtoMessage() {} -func (*SimpleMap3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *SimpleMap3) Reset() { *m = SimpleMap3{} } +func (m *SimpleMap3) String() string { return proto.CompactTextString(m) } +func (*SimpleMap3) ProtoMessage() {} +func (*SimpleMap3) Descriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{2} +} +func (m *SimpleMap3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleMap3.Unmarshal(m, b) +} +func (m *SimpleMap3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleMap3.Marshal(b, m, deterministic) +} +func (dst *SimpleMap3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleMap3.Merge(dst, src) +} +func (m *SimpleMap3) XXX_Size() int { + return xxx_messageInfo_SimpleMap3.Size(m) +} +func (m *SimpleMap3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleMap3.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleMap3 proto.InternalMessageInfo func (m *SimpleMap3) GetStringy() map[string]string { if m != nil { @@ -114,13 +159,35 @@ func (m *SimpleMap3) GetStringy() map[string]string { } type SimpleNull3 struct { - Simple *Simple3 `protobuf:"bytes,1,opt,name=simple" json:"simple,omitempty"` + Simple *Simple3 `protobuf:"bytes,1,opt,name=simple" json:"simple,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SimpleNull3) Reset() { *m = SimpleNull3{} } -func (m *SimpleNull3) String() string { return proto.CompactTextString(m) } -func (*SimpleNull3) ProtoMessage() {} -func (*SimpleNull3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *SimpleNull3) Reset() { *m = SimpleNull3{} } +func (m *SimpleNull3) String() string { return proto.CompactTextString(m) } +func (*SimpleNull3) ProtoMessage() {} +func (*SimpleNull3) Descriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{3} +} +func (m *SimpleNull3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleNull3.Unmarshal(m, b) +} +func (m *SimpleNull3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleNull3.Marshal(b, m, deterministic) +} +func (dst *SimpleNull3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleNull3.Merge(dst, src) +} +func (m *SimpleNull3) XXX_Size() int { + return xxx_messageInfo_SimpleNull3.Size(m) +} +func (m *SimpleNull3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleNull3.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleNull3 proto.InternalMessageInfo func (m *SimpleNull3) GetSimple() *Simple3 { if m != nil { @@ -130,22 +197,44 @@ func (m *SimpleNull3) GetSimple() *Simple3 { } type Mappy struct { - Nummy map[int64]int32 `protobuf:"bytes,1,rep,name=nummy" json:"nummy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - Strry map[string]string `protobuf:"bytes,2,rep,name=strry" json:"strry,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Objjy map[int32]*Simple3 `protobuf:"bytes,3,rep,name=objjy" json:"objjy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Buggy map[int64]string `protobuf:"bytes,4,rep,name=buggy" json:"buggy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Booly map[bool]bool `protobuf:"bytes,5,rep,name=booly" json:"booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - Enumy map[string]Numeral `protobuf:"bytes,6,rep,name=enumy" json:"enumy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=jsonpb.Numeral"` - S32Booly map[int32]bool `protobuf:"bytes,7,rep,name=s32booly" json:"s32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - S64Booly map[int64]bool `protobuf:"bytes,8,rep,name=s64booly" json:"s64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - U32Booly map[uint32]bool `protobuf:"bytes,9,rep,name=u32booly" json:"u32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - U64Booly map[uint64]bool `protobuf:"bytes,10,rep,name=u64booly" json:"u64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Nummy map[int64]int32 `protobuf:"bytes,1,rep,name=nummy" json:"nummy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Strry map[string]string `protobuf:"bytes,2,rep,name=strry" json:"strry,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Objjy map[int32]*Simple3 `protobuf:"bytes,3,rep,name=objjy" json:"objjy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Buggy map[int64]string `protobuf:"bytes,4,rep,name=buggy" json:"buggy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Booly map[bool]bool `protobuf:"bytes,5,rep,name=booly" json:"booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Enumy map[string]Numeral `protobuf:"bytes,6,rep,name=enumy" json:"enumy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=jsonpb.Numeral"` + S32Booly map[int32]bool `protobuf:"bytes,7,rep,name=s32booly" json:"s32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + S64Booly map[int64]bool `protobuf:"bytes,8,rep,name=s64booly" json:"s64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + U32Booly map[uint32]bool `protobuf:"bytes,9,rep,name=u32booly" json:"u32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + U64Booly map[uint64]bool `protobuf:"bytes,10,rep,name=u64booly" json:"u64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Mappy) Reset() { *m = Mappy{} } -func (m *Mappy) String() string { return proto.CompactTextString(m) } -func (*Mappy) ProtoMessage() {} -func (*Mappy) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (m *Mappy) Reset() { *m = Mappy{} } +func (m *Mappy) String() string { return proto.CompactTextString(m) } +func (*Mappy) ProtoMessage() {} +func (*Mappy) Descriptor() ([]byte, []int) { + return fileDescriptor_more_test_objects_bef0d79b901f4c4a, []int{4} +} +func (m *Mappy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mappy.Unmarshal(m, b) +} +func (m *Mappy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mappy.Marshal(b, m, deterministic) +} +func (dst *Mappy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mappy.Merge(dst, src) +} +func (m *Mappy) XXX_Size() int { + return xxx_messageInfo_Mappy.Size(m) +} +func (m *Mappy) XXX_DiscardUnknown() { + xxx_messageInfo_Mappy.DiscardUnknown(m) +} + +var xxx_messageInfo_Mappy proto.InternalMessageInfo func (m *Mappy) GetNummy() map[int64]int32 { if m != nil { @@ -221,14 +310,27 @@ func init() { proto.RegisterType((*Simple3)(nil), "jsonpb.Simple3") proto.RegisterType((*SimpleSlice3)(nil), "jsonpb.SimpleSlice3") proto.RegisterType((*SimpleMap3)(nil), "jsonpb.SimpleMap3") + proto.RegisterMapType((map[string]string)(nil), "jsonpb.SimpleMap3.StringyEntry") proto.RegisterType((*SimpleNull3)(nil), "jsonpb.SimpleNull3") proto.RegisterType((*Mappy)(nil), "jsonpb.Mappy") + proto.RegisterMapType((map[bool]bool)(nil), "jsonpb.Mappy.BoolyEntry") + proto.RegisterMapType((map[int64]string)(nil), "jsonpb.Mappy.BuggyEntry") + proto.RegisterMapType((map[string]Numeral)(nil), "jsonpb.Mappy.EnumyEntry") + proto.RegisterMapType((map[int64]int32)(nil), "jsonpb.Mappy.NummyEntry") + proto.RegisterMapType((map[int32]*Simple3)(nil), "jsonpb.Mappy.ObjjyEntry") + proto.RegisterMapType((map[int32]bool)(nil), "jsonpb.Mappy.S32boolyEntry") + proto.RegisterMapType((map[int64]bool)(nil), "jsonpb.Mappy.S64boolyEntry") + proto.RegisterMapType((map[string]string)(nil), "jsonpb.Mappy.StrryEntry") + proto.RegisterMapType((map[uint32]bool)(nil), "jsonpb.Mappy.U32boolyEntry") + proto.RegisterMapType((map[uint64]bool)(nil), "jsonpb.Mappy.U64boolyEntry") proto.RegisterEnum("jsonpb.Numeral", Numeral_name, Numeral_value) } -func init() { proto.RegisterFile("more_test_objects.proto", fileDescriptor0) } +func init() { + proto.RegisterFile("more_test_objects.proto", fileDescriptor_more_test_objects_bef0d79b901f4c4a) +} -var fileDescriptor0 = []byte{ +var fileDescriptor_more_test_objects_bef0d79b901f4c4a = []byte{ // 526 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdd, 0x6b, 0xdb, 0x3c, 0x14, 0x87, 0x5f, 0x27, 0xf5, 0xd7, 0x49, 0xfb, 0x2e, 0x88, 0xb1, 0x99, 0xf4, 0x62, 0xc5, 0xb0, diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go index d413d740..d9e24db2 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go @@ -6,17 +6,23 @@ package jsonpb import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" -import google_protobuf2 "github.com/golang/protobuf/ptypes/struct" -import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" -import google_protobuf4 "github.com/golang/protobuf/ptypes/wrappers" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + type Widget_Color int32 const ( @@ -52,28 +58,51 @@ func (x *Widget_Color) UnmarshalJSON(data []byte) error { *x = Widget_Color(value) return nil } -func (Widget_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{3, 0} } +func (Widget_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{3, 0} +} // Test message for holding primitive types. type Simple struct { - OBool *bool `protobuf:"varint,1,opt,name=o_bool,json=oBool" json:"o_bool,omitempty"` - OInt32 *int32 `protobuf:"varint,2,opt,name=o_int32,json=oInt32" json:"o_int32,omitempty"` - OInt64 *int64 `protobuf:"varint,3,opt,name=o_int64,json=oInt64" json:"o_int64,omitempty"` - OUint32 *uint32 `protobuf:"varint,4,opt,name=o_uint32,json=oUint32" json:"o_uint32,omitempty"` - OUint64 *uint64 `protobuf:"varint,5,opt,name=o_uint64,json=oUint64" json:"o_uint64,omitempty"` - OSint32 *int32 `protobuf:"zigzag32,6,opt,name=o_sint32,json=oSint32" json:"o_sint32,omitempty"` - OSint64 *int64 `protobuf:"zigzag64,7,opt,name=o_sint64,json=oSint64" json:"o_sint64,omitempty"` - OFloat *float32 `protobuf:"fixed32,8,opt,name=o_float,json=oFloat" json:"o_float,omitempty"` - ODouble *float64 `protobuf:"fixed64,9,opt,name=o_double,json=oDouble" json:"o_double,omitempty"` - OString *string `protobuf:"bytes,10,opt,name=o_string,json=oString" json:"o_string,omitempty"` - OBytes []byte `protobuf:"bytes,11,opt,name=o_bytes,json=oBytes" json:"o_bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` + OBool *bool `protobuf:"varint,1,opt,name=o_bool,json=oBool" json:"o_bool,omitempty"` + OInt32 *int32 `protobuf:"varint,2,opt,name=o_int32,json=oInt32" json:"o_int32,omitempty"` + OInt64 *int64 `protobuf:"varint,3,opt,name=o_int64,json=oInt64" json:"o_int64,omitempty"` + OUint32 *uint32 `protobuf:"varint,4,opt,name=o_uint32,json=oUint32" json:"o_uint32,omitempty"` + OUint64 *uint64 `protobuf:"varint,5,opt,name=o_uint64,json=oUint64" json:"o_uint64,omitempty"` + OSint32 *int32 `protobuf:"zigzag32,6,opt,name=o_sint32,json=oSint32" json:"o_sint32,omitempty"` + OSint64 *int64 `protobuf:"zigzag64,7,opt,name=o_sint64,json=oSint64" json:"o_sint64,omitempty"` + OFloat *float32 `protobuf:"fixed32,8,opt,name=o_float,json=oFloat" json:"o_float,omitempty"` + ODouble *float64 `protobuf:"fixed64,9,opt,name=o_double,json=oDouble" json:"o_double,omitempty"` + OString *string `protobuf:"bytes,10,opt,name=o_string,json=oString" json:"o_string,omitempty"` + OBytes []byte `protobuf:"bytes,11,opt,name=o_bytes,json=oBytes" json:"o_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Simple) Reset() { *m = Simple{} } -func (m *Simple) String() string { return proto.CompactTextString(m) } -func (*Simple) ProtoMessage() {} -func (*Simple) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } +func (m *Simple) Reset() { *m = Simple{} } +func (m *Simple) String() string { return proto.CompactTextString(m) } +func (*Simple) ProtoMessage() {} +func (*Simple) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{0} +} +func (m *Simple) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Simple.Unmarshal(m, b) +} +func (m *Simple) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Simple.Marshal(b, m, deterministic) +} +func (dst *Simple) XXX_Merge(src proto.Message) { + xxx_messageInfo_Simple.Merge(dst, src) +} +func (m *Simple) XXX_Size() int { + return xxx_messageInfo_Simple.Size(m) +} +func (m *Simple) XXX_DiscardUnknown() { + xxx_messageInfo_Simple.DiscardUnknown(m) +} + +var xxx_messageInfo_Simple proto.InternalMessageInfo func (m *Simple) GetOBool() bool { if m != nil && m.OBool != nil { @@ -154,19 +183,40 @@ func (m *Simple) GetOBytes() []byte { // Test message for holding special non-finites primitives. type NonFinites struct { - FNan *float32 `protobuf:"fixed32,1,opt,name=f_nan,json=fNan" json:"f_nan,omitempty"` - FPinf *float32 `protobuf:"fixed32,2,opt,name=f_pinf,json=fPinf" json:"f_pinf,omitempty"` - FNinf *float32 `protobuf:"fixed32,3,opt,name=f_ninf,json=fNinf" json:"f_ninf,omitempty"` - DNan *float64 `protobuf:"fixed64,4,opt,name=d_nan,json=dNan" json:"d_nan,omitempty"` - DPinf *float64 `protobuf:"fixed64,5,opt,name=d_pinf,json=dPinf" json:"d_pinf,omitempty"` - DNinf *float64 `protobuf:"fixed64,6,opt,name=d_ninf,json=dNinf" json:"d_ninf,omitempty"` - XXX_unrecognized []byte `json:"-"` + FNan *float32 `protobuf:"fixed32,1,opt,name=f_nan,json=fNan" json:"f_nan,omitempty"` + FPinf *float32 `protobuf:"fixed32,2,opt,name=f_pinf,json=fPinf" json:"f_pinf,omitempty"` + FNinf *float32 `protobuf:"fixed32,3,opt,name=f_ninf,json=fNinf" json:"f_ninf,omitempty"` + DNan *float64 `protobuf:"fixed64,4,opt,name=d_nan,json=dNan" json:"d_nan,omitempty"` + DPinf *float64 `protobuf:"fixed64,5,opt,name=d_pinf,json=dPinf" json:"d_pinf,omitempty"` + DNinf *float64 `protobuf:"fixed64,6,opt,name=d_ninf,json=dNinf" json:"d_ninf,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *NonFinites) Reset() { *m = NonFinites{} } -func (m *NonFinites) String() string { return proto.CompactTextString(m) } -func (*NonFinites) ProtoMessage() {} -func (*NonFinites) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} } +func (m *NonFinites) Reset() { *m = NonFinites{} } +func (m *NonFinites) String() string { return proto.CompactTextString(m) } +func (*NonFinites) ProtoMessage() {} +func (*NonFinites) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{1} +} +func (m *NonFinites) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NonFinites.Unmarshal(m, b) +} +func (m *NonFinites) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NonFinites.Marshal(b, m, deterministic) +} +func (dst *NonFinites) XXX_Merge(src proto.Message) { + xxx_messageInfo_NonFinites.Merge(dst, src) +} +func (m *NonFinites) XXX_Size() int { + return xxx_messageInfo_NonFinites.Size(m) +} +func (m *NonFinites) XXX_DiscardUnknown() { + xxx_messageInfo_NonFinites.DiscardUnknown(m) +} + +var xxx_messageInfo_NonFinites proto.InternalMessageInfo func (m *NonFinites) GetFNan() float32 { if m != nil && m.FNan != nil { @@ -212,24 +262,45 @@ func (m *NonFinites) GetDNinf() float64 { // Test message for holding repeated primitives. type Repeats struct { - RBool []bool `protobuf:"varint,1,rep,name=r_bool,json=rBool" json:"r_bool,omitempty"` - RInt32 []int32 `protobuf:"varint,2,rep,name=r_int32,json=rInt32" json:"r_int32,omitempty"` - RInt64 []int64 `protobuf:"varint,3,rep,name=r_int64,json=rInt64" json:"r_int64,omitempty"` - RUint32 []uint32 `protobuf:"varint,4,rep,name=r_uint32,json=rUint32" json:"r_uint32,omitempty"` - RUint64 []uint64 `protobuf:"varint,5,rep,name=r_uint64,json=rUint64" json:"r_uint64,omitempty"` - RSint32 []int32 `protobuf:"zigzag32,6,rep,name=r_sint32,json=rSint32" json:"r_sint32,omitempty"` - RSint64 []int64 `protobuf:"zigzag64,7,rep,name=r_sint64,json=rSint64" json:"r_sint64,omitempty"` - RFloat []float32 `protobuf:"fixed32,8,rep,name=r_float,json=rFloat" json:"r_float,omitempty"` - RDouble []float64 `protobuf:"fixed64,9,rep,name=r_double,json=rDouble" json:"r_double,omitempty"` - RString []string `protobuf:"bytes,10,rep,name=r_string,json=rString" json:"r_string,omitempty"` - RBytes [][]byte `protobuf:"bytes,11,rep,name=r_bytes,json=rBytes" json:"r_bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` + RBool []bool `protobuf:"varint,1,rep,name=r_bool,json=rBool" json:"r_bool,omitempty"` + RInt32 []int32 `protobuf:"varint,2,rep,name=r_int32,json=rInt32" json:"r_int32,omitempty"` + RInt64 []int64 `protobuf:"varint,3,rep,name=r_int64,json=rInt64" json:"r_int64,omitempty"` + RUint32 []uint32 `protobuf:"varint,4,rep,name=r_uint32,json=rUint32" json:"r_uint32,omitempty"` + RUint64 []uint64 `protobuf:"varint,5,rep,name=r_uint64,json=rUint64" json:"r_uint64,omitempty"` + RSint32 []int32 `protobuf:"zigzag32,6,rep,name=r_sint32,json=rSint32" json:"r_sint32,omitempty"` + RSint64 []int64 `protobuf:"zigzag64,7,rep,name=r_sint64,json=rSint64" json:"r_sint64,omitempty"` + RFloat []float32 `protobuf:"fixed32,8,rep,name=r_float,json=rFloat" json:"r_float,omitempty"` + RDouble []float64 `protobuf:"fixed64,9,rep,name=r_double,json=rDouble" json:"r_double,omitempty"` + RString []string `protobuf:"bytes,10,rep,name=r_string,json=rString" json:"r_string,omitempty"` + RBytes [][]byte `protobuf:"bytes,11,rep,name=r_bytes,json=rBytes" json:"r_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Repeats) Reset() { *m = Repeats{} } -func (m *Repeats) String() string { return proto.CompactTextString(m) } -func (*Repeats) ProtoMessage() {} -func (*Repeats) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} } +func (m *Repeats) Reset() { *m = Repeats{} } +func (m *Repeats) String() string { return proto.CompactTextString(m) } +func (*Repeats) ProtoMessage() {} +func (*Repeats) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{2} +} +func (m *Repeats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Repeats.Unmarshal(m, b) +} +func (m *Repeats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Repeats.Marshal(b, m, deterministic) +} +func (dst *Repeats) XXX_Merge(src proto.Message) { + xxx_messageInfo_Repeats.Merge(dst, src) +} +func (m *Repeats) XXX_Size() int { + return xxx_messageInfo_Repeats.Size(m) +} +func (m *Repeats) XXX_DiscardUnknown() { + xxx_messageInfo_Repeats.DiscardUnknown(m) +} + +var xxx_messageInfo_Repeats proto.InternalMessageInfo func (m *Repeats) GetRBool() []bool { if m != nil { @@ -310,19 +381,40 @@ func (m *Repeats) GetRBytes() [][]byte { // Test message for holding enums and nested messages. type Widget struct { - Color *Widget_Color `protobuf:"varint,1,opt,name=color,enum=jsonpb.Widget_Color" json:"color,omitempty"` - RColor []Widget_Color `protobuf:"varint,2,rep,name=r_color,json=rColor,enum=jsonpb.Widget_Color" json:"r_color,omitempty"` - Simple *Simple `protobuf:"bytes,10,opt,name=simple" json:"simple,omitempty"` - RSimple []*Simple `protobuf:"bytes,11,rep,name=r_simple,json=rSimple" json:"r_simple,omitempty"` - Repeats *Repeats `protobuf:"bytes,20,opt,name=repeats" json:"repeats,omitempty"` - RRepeats []*Repeats `protobuf:"bytes,21,rep,name=r_repeats,json=rRepeats" json:"r_repeats,omitempty"` - XXX_unrecognized []byte `json:"-"` + Color *Widget_Color `protobuf:"varint,1,opt,name=color,enum=jsonpb.Widget_Color" json:"color,omitempty"` + RColor []Widget_Color `protobuf:"varint,2,rep,name=r_color,json=rColor,enum=jsonpb.Widget_Color" json:"r_color,omitempty"` + Simple *Simple `protobuf:"bytes,10,opt,name=simple" json:"simple,omitempty"` + RSimple []*Simple `protobuf:"bytes,11,rep,name=r_simple,json=rSimple" json:"r_simple,omitempty"` + Repeats *Repeats `protobuf:"bytes,20,opt,name=repeats" json:"repeats,omitempty"` + RRepeats []*Repeats `protobuf:"bytes,21,rep,name=r_repeats,json=rRepeats" json:"r_repeats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Widget) Reset() { *m = Widget{} } -func (m *Widget) String() string { return proto.CompactTextString(m) } -func (*Widget) ProtoMessage() {} -func (*Widget) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3} } +func (m *Widget) Reset() { *m = Widget{} } +func (m *Widget) String() string { return proto.CompactTextString(m) } +func (*Widget) ProtoMessage() {} +func (*Widget) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{3} +} +func (m *Widget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Widget.Unmarshal(m, b) +} +func (m *Widget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Widget.Marshal(b, m, deterministic) +} +func (dst *Widget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Widget.Merge(dst, src) +} +func (m *Widget) XXX_Size() int { + return xxx_messageInfo_Widget.Size(m) +} +func (m *Widget) XXX_DiscardUnknown() { + xxx_messageInfo_Widget.DiscardUnknown(m) +} + +var xxx_messageInfo_Widget proto.InternalMessageInfo func (m *Widget) GetColor() Widget_Color { if m != nil && m.Color != nil { @@ -367,15 +459,36 @@ func (m *Widget) GetRRepeats() []*Repeats { } type Maps struct { - MInt64Str map[int64]string `protobuf:"bytes,1,rep,name=m_int64_str,json=mInt64Str" json:"m_int64_str,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MBoolSimple map[bool]*Simple `protobuf:"bytes,2,rep,name=m_bool_simple,json=mBoolSimple" json:"m_bool_simple,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - XXX_unrecognized []byte `json:"-"` + MInt64Str map[int64]string `protobuf:"bytes,1,rep,name=m_int64_str,json=mInt64Str" json:"m_int64_str,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MBoolSimple map[bool]*Simple `protobuf:"bytes,2,rep,name=m_bool_simple,json=mBoolSimple" json:"m_bool_simple,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Maps) Reset() { *m = Maps{} } -func (m *Maps) String() string { return proto.CompactTextString(m) } -func (*Maps) ProtoMessage() {} -func (*Maps) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{4} } +func (m *Maps) Reset() { *m = Maps{} } +func (m *Maps) String() string { return proto.CompactTextString(m) } +func (*Maps) ProtoMessage() {} +func (*Maps) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{4} +} +func (m *Maps) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Maps.Unmarshal(m, b) +} +func (m *Maps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Maps.Marshal(b, m, deterministic) +} +func (dst *Maps) XXX_Merge(src proto.Message) { + xxx_messageInfo_Maps.Merge(dst, src) +} +func (m *Maps) XXX_Size() int { + return xxx_messageInfo_Maps.Size(m) +} +func (m *Maps) XXX_DiscardUnknown() { + xxx_messageInfo_Maps.DiscardUnknown(m) +} + +var xxx_messageInfo_Maps proto.InternalMessageInfo func (m *Maps) GetMInt64Str() map[int64]string { if m != nil { @@ -397,14 +510,36 @@ type MsgWithOneof struct { // *MsgWithOneof_Salary // *MsgWithOneof_Country // *MsgWithOneof_HomeAddress - Union isMsgWithOneof_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` + // *MsgWithOneof_MsgWithRequired + Union isMsgWithOneof_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MsgWithOneof) Reset() { *m = MsgWithOneof{} } -func (m *MsgWithOneof) String() string { return proto.CompactTextString(m) } -func (*MsgWithOneof) ProtoMessage() {} -func (*MsgWithOneof) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{5} } +func (m *MsgWithOneof) Reset() { *m = MsgWithOneof{} } +func (m *MsgWithOneof) String() string { return proto.CompactTextString(m) } +func (*MsgWithOneof) ProtoMessage() {} +func (*MsgWithOneof) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{5} +} +func (m *MsgWithOneof) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithOneof.Unmarshal(m, b) +} +func (m *MsgWithOneof) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithOneof.Marshal(b, m, deterministic) +} +func (dst *MsgWithOneof) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithOneof.Merge(dst, src) +} +func (m *MsgWithOneof) XXX_Size() int { + return xxx_messageInfo_MsgWithOneof.Size(m) +} +func (m *MsgWithOneof) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithOneof.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithOneof proto.InternalMessageInfo type isMsgWithOneof_Union interface { isMsgWithOneof_Union() @@ -422,11 +557,15 @@ type MsgWithOneof_Country struct { type MsgWithOneof_HomeAddress struct { HomeAddress string `protobuf:"bytes,4,opt,name=home_address,json=homeAddress,oneof"` } +type MsgWithOneof_MsgWithRequired struct { + MsgWithRequired *MsgWithRequired `protobuf:"bytes,5,opt,name=msg_with_required,json=msgWithRequired,oneof"` +} -func (*MsgWithOneof_Title) isMsgWithOneof_Union() {} -func (*MsgWithOneof_Salary) isMsgWithOneof_Union() {} -func (*MsgWithOneof_Country) isMsgWithOneof_Union() {} -func (*MsgWithOneof_HomeAddress) isMsgWithOneof_Union() {} +func (*MsgWithOneof_Title) isMsgWithOneof_Union() {} +func (*MsgWithOneof_Salary) isMsgWithOneof_Union() {} +func (*MsgWithOneof_Country) isMsgWithOneof_Union() {} +func (*MsgWithOneof_HomeAddress) isMsgWithOneof_Union() {} +func (*MsgWithOneof_MsgWithRequired) isMsgWithOneof_Union() {} func (m *MsgWithOneof) GetUnion() isMsgWithOneof_Union { if m != nil { @@ -463,6 +602,13 @@ func (m *MsgWithOneof) GetHomeAddress() string { return "" } +func (m *MsgWithOneof) GetMsgWithRequired() *MsgWithRequired { + if x, ok := m.GetUnion().(*MsgWithOneof_MsgWithRequired); ok { + return x.MsgWithRequired + } + return nil +} + // XXX_OneofFuncs is for the internal use of the proto package. func (*MsgWithOneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _MsgWithOneof_OneofMarshaler, _MsgWithOneof_OneofUnmarshaler, _MsgWithOneof_OneofSizer, []interface{}{ @@ -470,6 +616,7 @@ func (*MsgWithOneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) (*MsgWithOneof_Salary)(nil), (*MsgWithOneof_Country)(nil), (*MsgWithOneof_HomeAddress)(nil), + (*MsgWithOneof_MsgWithRequired)(nil), } } @@ -489,6 +636,11 @@ func _MsgWithOneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { case *MsgWithOneof_HomeAddress: b.EncodeVarint(4<<3 | proto.WireBytes) b.EncodeStringBytes(x.HomeAddress) + case *MsgWithOneof_MsgWithRequired: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MsgWithRequired); err != nil { + return err + } case nil: default: return fmt.Errorf("MsgWithOneof.Union has unexpected type %T", x) @@ -527,6 +679,14 @@ func _MsgWithOneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.B x, err := b.DecodeStringBytes() m.Union = &MsgWithOneof_HomeAddress{x} return true, err + case 5: // union.msg_with_required + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MsgWithRequired) + err := b.DecodeMessage(msg) + m.Union = &MsgWithOneof_MsgWithRequired{msg} + return true, err default: return false, nil } @@ -537,20 +697,25 @@ func _MsgWithOneof_OneofSizer(msg proto.Message) (n int) { // union switch x := m.Union.(type) { case *MsgWithOneof_Title: - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Title))) n += len(x.Title) case *MsgWithOneof_Salary: - n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += 1 // tag and wire n += proto.SizeVarint(uint64(x.Salary)) case *MsgWithOneof_Country: - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Country))) n += len(x.Country) case *MsgWithOneof_HomeAddress: - n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.HomeAddress))) n += len(x.HomeAddress) + case *MsgWithOneof_MsgWithRequired: + s := proto.Size(x.MsgWithRequired) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s case nil: default: panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) @@ -560,22 +725,43 @@ func _MsgWithOneof_OneofSizer(msg proto.Message) (n int) { type Real struct { Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Real) Reset() { *m = Real{} } -func (m *Real) String() string { return proto.CompactTextString(m) } -func (*Real) ProtoMessage() {} -func (*Real) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6} } +func (m *Real) Reset() { *m = Real{} } +func (m *Real) String() string { return proto.CompactTextString(m) } +func (*Real) ProtoMessage() {} +func (*Real) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{6} +} var extRange_Real = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Real) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Real } +func (m *Real) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Real.Unmarshal(m, b) +} +func (m *Real) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Real.Marshal(b, m, deterministic) +} +func (dst *Real) XXX_Merge(src proto.Message) { + xxx_messageInfo_Real.Merge(dst, src) +} +func (m *Real) XXX_Size() int { + return xxx_messageInfo_Real.Size(m) +} +func (m *Real) XXX_DiscardUnknown() { + xxx_messageInfo_Real.DiscardUnknown(m) +} + +var xxx_messageInfo_Real proto.InternalMessageInfo func (m *Real) GetValue() float64 { if m != nil && m.Value != nil { @@ -586,22 +772,43 @@ func (m *Real) GetValue() float64 { type Complex struct { Imaginary *float64 `protobuf:"fixed64,1,opt,name=imaginary" json:"imaginary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Complex) Reset() { *m = Complex{} } -func (m *Complex) String() string { return proto.CompactTextString(m) } -func (*Complex) ProtoMessage() {} -func (*Complex) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{7} } +func (m *Complex) Reset() { *m = Complex{} } +func (m *Complex) String() string { return proto.CompactTextString(m) } +func (*Complex) ProtoMessage() {} +func (*Complex) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{7} +} var extRange_Complex = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Complex) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Complex } +func (m *Complex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Complex.Unmarshal(m, b) +} +func (m *Complex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Complex.Marshal(b, m, deterministic) +} +func (dst *Complex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Complex.Merge(dst, src) +} +func (m *Complex) XXX_Size() int { + return xxx_messageInfo_Complex.Size(m) +} +func (m *Complex) XXX_DiscardUnknown() { + xxx_messageInfo_Complex.DiscardUnknown(m) +} + +var xxx_messageInfo_Complex proto.InternalMessageInfo func (m *Complex) GetImaginary() float64 { if m != nil && m.Imaginary != nil { @@ -620,134 +827,324 @@ var E_Complex_RealExtension = &proto.ExtensionDesc{ } type KnownTypes struct { - An *google_protobuf.Any `protobuf:"bytes,14,opt,name=an" json:"an,omitempty"` - Dur *google_protobuf1.Duration `protobuf:"bytes,1,opt,name=dur" json:"dur,omitempty"` - St *google_protobuf2.Struct `protobuf:"bytes,12,opt,name=st" json:"st,omitempty"` - Ts *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=ts" json:"ts,omitempty"` - Lv *google_protobuf2.ListValue `protobuf:"bytes,15,opt,name=lv" json:"lv,omitempty"` - Val *google_protobuf2.Value `protobuf:"bytes,16,opt,name=val" json:"val,omitempty"` - Dbl *google_protobuf4.DoubleValue `protobuf:"bytes,3,opt,name=dbl" json:"dbl,omitempty"` - Flt *google_protobuf4.FloatValue `protobuf:"bytes,4,opt,name=flt" json:"flt,omitempty"` - I64 *google_protobuf4.Int64Value `protobuf:"bytes,5,opt,name=i64" json:"i64,omitempty"` - U64 *google_protobuf4.UInt64Value `protobuf:"bytes,6,opt,name=u64" json:"u64,omitempty"` - I32 *google_protobuf4.Int32Value `protobuf:"bytes,7,opt,name=i32" json:"i32,omitempty"` - U32 *google_protobuf4.UInt32Value `protobuf:"bytes,8,opt,name=u32" json:"u32,omitempty"` - Bool *google_protobuf4.BoolValue `protobuf:"bytes,9,opt,name=bool" json:"bool,omitempty"` - Str *google_protobuf4.StringValue `protobuf:"bytes,10,opt,name=str" json:"str,omitempty"` - Bytes *google_protobuf4.BytesValue `protobuf:"bytes,11,opt,name=bytes" json:"bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` + An *any.Any `protobuf:"bytes,14,opt,name=an" json:"an,omitempty"` + Dur *duration.Duration `protobuf:"bytes,1,opt,name=dur" json:"dur,omitempty"` + St *_struct.Struct `protobuf:"bytes,12,opt,name=st" json:"st,omitempty"` + Ts *timestamp.Timestamp `protobuf:"bytes,2,opt,name=ts" json:"ts,omitempty"` + Lv *_struct.ListValue `protobuf:"bytes,15,opt,name=lv" json:"lv,omitempty"` + Val *_struct.Value `protobuf:"bytes,16,opt,name=val" json:"val,omitempty"` + Dbl *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=dbl" json:"dbl,omitempty"` + Flt *wrappers.FloatValue `protobuf:"bytes,4,opt,name=flt" json:"flt,omitempty"` + I64 *wrappers.Int64Value `protobuf:"bytes,5,opt,name=i64" json:"i64,omitempty"` + U64 *wrappers.UInt64Value `protobuf:"bytes,6,opt,name=u64" json:"u64,omitempty"` + I32 *wrappers.Int32Value `protobuf:"bytes,7,opt,name=i32" json:"i32,omitempty"` + U32 *wrappers.UInt32Value `protobuf:"bytes,8,opt,name=u32" json:"u32,omitempty"` + Bool *wrappers.BoolValue `protobuf:"bytes,9,opt,name=bool" json:"bool,omitempty"` + Str *wrappers.StringValue `protobuf:"bytes,10,opt,name=str" json:"str,omitempty"` + Bytes *wrappers.BytesValue `protobuf:"bytes,11,opt,name=bytes" json:"bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *KnownTypes) Reset() { *m = KnownTypes{} } -func (m *KnownTypes) String() string { return proto.CompactTextString(m) } -func (*KnownTypes) ProtoMessage() {} -func (*KnownTypes) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{8} } +func (m *KnownTypes) Reset() { *m = KnownTypes{} } +func (m *KnownTypes) String() string { return proto.CompactTextString(m) } +func (*KnownTypes) ProtoMessage() {} +func (*KnownTypes) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{8} +} +func (m *KnownTypes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnownTypes.Unmarshal(m, b) +} +func (m *KnownTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnownTypes.Marshal(b, m, deterministic) +} +func (dst *KnownTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnownTypes.Merge(dst, src) +} +func (m *KnownTypes) XXX_Size() int { + return xxx_messageInfo_KnownTypes.Size(m) +} +func (m *KnownTypes) XXX_DiscardUnknown() { + xxx_messageInfo_KnownTypes.DiscardUnknown(m) +} -func (m *KnownTypes) GetAn() *google_protobuf.Any { +var xxx_messageInfo_KnownTypes proto.InternalMessageInfo + +func (m *KnownTypes) GetAn() *any.Any { if m != nil { return m.An } return nil } -func (m *KnownTypes) GetDur() *google_protobuf1.Duration { +func (m *KnownTypes) GetDur() *duration.Duration { if m != nil { return m.Dur } return nil } -func (m *KnownTypes) GetSt() *google_protobuf2.Struct { +func (m *KnownTypes) GetSt() *_struct.Struct { if m != nil { return m.St } return nil } -func (m *KnownTypes) GetTs() *google_protobuf3.Timestamp { +func (m *KnownTypes) GetTs() *timestamp.Timestamp { if m != nil { return m.Ts } return nil } -func (m *KnownTypes) GetLv() *google_protobuf2.ListValue { +func (m *KnownTypes) GetLv() *_struct.ListValue { if m != nil { return m.Lv } return nil } -func (m *KnownTypes) GetVal() *google_protobuf2.Value { +func (m *KnownTypes) GetVal() *_struct.Value { if m != nil { return m.Val } return nil } -func (m *KnownTypes) GetDbl() *google_protobuf4.DoubleValue { +func (m *KnownTypes) GetDbl() *wrappers.DoubleValue { if m != nil { return m.Dbl } return nil } -func (m *KnownTypes) GetFlt() *google_protobuf4.FloatValue { +func (m *KnownTypes) GetFlt() *wrappers.FloatValue { if m != nil { return m.Flt } return nil } -func (m *KnownTypes) GetI64() *google_protobuf4.Int64Value { +func (m *KnownTypes) GetI64() *wrappers.Int64Value { if m != nil { return m.I64 } return nil } -func (m *KnownTypes) GetU64() *google_protobuf4.UInt64Value { +func (m *KnownTypes) GetU64() *wrappers.UInt64Value { if m != nil { return m.U64 } return nil } -func (m *KnownTypes) GetI32() *google_protobuf4.Int32Value { +func (m *KnownTypes) GetI32() *wrappers.Int32Value { if m != nil { return m.I32 } return nil } -func (m *KnownTypes) GetU32() *google_protobuf4.UInt32Value { +func (m *KnownTypes) GetU32() *wrappers.UInt32Value { if m != nil { return m.U32 } return nil } -func (m *KnownTypes) GetBool() *google_protobuf4.BoolValue { +func (m *KnownTypes) GetBool() *wrappers.BoolValue { if m != nil { return m.Bool } return nil } -func (m *KnownTypes) GetStr() *google_protobuf4.StringValue { +func (m *KnownTypes) GetStr() *wrappers.StringValue { if m != nil { return m.Str } return nil } -func (m *KnownTypes) GetBytes() *google_protobuf4.BytesValue { +func (m *KnownTypes) GetBytes() *wrappers.BytesValue { if m != nil { return m.Bytes } return nil } +// Test messages for marshaling/unmarshaling required fields. +type MsgWithRequired struct { + Str *string `protobuf:"bytes,1,req,name=str" json:"str,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequired) Reset() { *m = MsgWithRequired{} } +func (m *MsgWithRequired) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequired) ProtoMessage() {} +func (*MsgWithRequired) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{9} +} +func (m *MsgWithRequired) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequired.Unmarshal(m, b) +} +func (m *MsgWithRequired) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequired.Marshal(b, m, deterministic) +} +func (dst *MsgWithRequired) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequired.Merge(dst, src) +} +func (m *MsgWithRequired) XXX_Size() int { + return xxx_messageInfo_MsgWithRequired.Size(m) +} +func (m *MsgWithRequired) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequired.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequired proto.InternalMessageInfo + +func (m *MsgWithRequired) GetStr() string { + if m != nil && m.Str != nil { + return *m.Str + } + return "" +} + +type MsgWithIndirectRequired struct { + Subm *MsgWithRequired `protobuf:"bytes,1,opt,name=subm" json:"subm,omitempty"` + MapField map[string]*MsgWithRequired `protobuf:"bytes,2,rep,name=map_field,json=mapField" json:"map_field,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + SliceField []*MsgWithRequired `protobuf:"bytes,3,rep,name=slice_field,json=sliceField" json:"slice_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithIndirectRequired) Reset() { *m = MsgWithIndirectRequired{} } +func (m *MsgWithIndirectRequired) String() string { return proto.CompactTextString(m) } +func (*MsgWithIndirectRequired) ProtoMessage() {} +func (*MsgWithIndirectRequired) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{10} +} +func (m *MsgWithIndirectRequired) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithIndirectRequired.Unmarshal(m, b) +} +func (m *MsgWithIndirectRequired) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithIndirectRequired.Marshal(b, m, deterministic) +} +func (dst *MsgWithIndirectRequired) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithIndirectRequired.Merge(dst, src) +} +func (m *MsgWithIndirectRequired) XXX_Size() int { + return xxx_messageInfo_MsgWithIndirectRequired.Size(m) +} +func (m *MsgWithIndirectRequired) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithIndirectRequired.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithIndirectRequired proto.InternalMessageInfo + +func (m *MsgWithIndirectRequired) GetSubm() *MsgWithRequired { + if m != nil { + return m.Subm + } + return nil +} + +func (m *MsgWithIndirectRequired) GetMapField() map[string]*MsgWithRequired { + if m != nil { + return m.MapField + } + return nil +} + +func (m *MsgWithIndirectRequired) GetSliceField() []*MsgWithRequired { + if m != nil { + return m.SliceField + } + return nil +} + +type MsgWithRequiredBytes struct { + Byts []byte `protobuf:"bytes,1,req,name=byts" json:"byts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequiredBytes) Reset() { *m = MsgWithRequiredBytes{} } +func (m *MsgWithRequiredBytes) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequiredBytes) ProtoMessage() {} +func (*MsgWithRequiredBytes) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{11} +} +func (m *MsgWithRequiredBytes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequiredBytes.Unmarshal(m, b) +} +func (m *MsgWithRequiredBytes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequiredBytes.Marshal(b, m, deterministic) +} +func (dst *MsgWithRequiredBytes) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequiredBytes.Merge(dst, src) +} +func (m *MsgWithRequiredBytes) XXX_Size() int { + return xxx_messageInfo_MsgWithRequiredBytes.Size(m) +} +func (m *MsgWithRequiredBytes) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequiredBytes.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequiredBytes proto.InternalMessageInfo + +func (m *MsgWithRequiredBytes) GetByts() []byte { + if m != nil { + return m.Byts + } + return nil +} + +type MsgWithRequiredWKT struct { + Str *wrappers.StringValue `protobuf:"bytes,1,req,name=str" json:"str,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequiredWKT) Reset() { *m = MsgWithRequiredWKT{} } +func (m *MsgWithRequiredWKT) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequiredWKT) ProtoMessage() {} +func (*MsgWithRequiredWKT) Descriptor() ([]byte, []int) { + return fileDescriptor_test_objects_c6f6c615ab823e65, []int{12} +} +func (m *MsgWithRequiredWKT) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequiredWKT.Unmarshal(m, b) +} +func (m *MsgWithRequiredWKT) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequiredWKT.Marshal(b, m, deterministic) +} +func (dst *MsgWithRequiredWKT) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequiredWKT.Merge(dst, src) +} +func (m *MsgWithRequiredWKT) XXX_Size() int { + return xxx_messageInfo_MsgWithRequiredWKT.Size(m) +} +func (m *MsgWithRequiredWKT) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequiredWKT.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequiredWKT proto.InternalMessageInfo + +func (m *MsgWithRequiredWKT) GetStr() *wrappers.StringValue { + if m != nil { + return m.Str + } + return nil +} + var E_Name = &proto.ExtensionDesc{ ExtendedType: (*Real)(nil), ExtensionType: (*string)(nil), @@ -757,96 +1154,125 @@ var E_Name = &proto.ExtensionDesc{ Filename: "test_objects.proto", } +var E_Extm = &proto.ExtensionDesc{ + ExtendedType: (*Real)(nil), + ExtensionType: (*MsgWithRequired)(nil), + Field: 125, + Name: "jsonpb.extm", + Tag: "bytes,125,opt,name=extm", + Filename: "test_objects.proto", +} + func init() { proto.RegisterType((*Simple)(nil), "jsonpb.Simple") proto.RegisterType((*NonFinites)(nil), "jsonpb.NonFinites") proto.RegisterType((*Repeats)(nil), "jsonpb.Repeats") proto.RegisterType((*Widget)(nil), "jsonpb.Widget") proto.RegisterType((*Maps)(nil), "jsonpb.Maps") + proto.RegisterMapType((map[bool]*Simple)(nil), "jsonpb.Maps.MBoolSimpleEntry") + proto.RegisterMapType((map[int64]string)(nil), "jsonpb.Maps.MInt64StrEntry") proto.RegisterType((*MsgWithOneof)(nil), "jsonpb.MsgWithOneof") proto.RegisterType((*Real)(nil), "jsonpb.Real") proto.RegisterType((*Complex)(nil), "jsonpb.Complex") proto.RegisterType((*KnownTypes)(nil), "jsonpb.KnownTypes") + proto.RegisterType((*MsgWithRequired)(nil), "jsonpb.MsgWithRequired") + proto.RegisterType((*MsgWithIndirectRequired)(nil), "jsonpb.MsgWithIndirectRequired") + proto.RegisterMapType((map[string]*MsgWithRequired)(nil), "jsonpb.MsgWithIndirectRequired.MapFieldEntry") + proto.RegisterType((*MsgWithRequiredBytes)(nil), "jsonpb.MsgWithRequiredBytes") + proto.RegisterType((*MsgWithRequiredWKT)(nil), "jsonpb.MsgWithRequiredWKT") proto.RegisterEnum("jsonpb.Widget_Color", Widget_Color_name, Widget_Color_value) proto.RegisterExtension(E_Complex_RealExtension) proto.RegisterExtension(E_Name) + proto.RegisterExtension(E_Extm) } -func init() { proto.RegisterFile("test_objects.proto", fileDescriptor1) } +func init() { proto.RegisterFile("test_objects.proto", fileDescriptor_test_objects_c6f6c615ab823e65) } -var fileDescriptor1 = []byte{ - // 1160 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x95, 0x41, 0x73, 0xdb, 0x44, - 0x14, 0xc7, 0x23, 0xc9, 0x92, 0xed, 0x75, 0x92, 0x9a, 0x6d, 0xda, 0x2a, 0x26, 0x80, 0xc6, 0x94, - 0x22, 0x0a, 0x75, 0x07, 0xc7, 0xe3, 0x61, 0x0a, 0x97, 0xa4, 0x71, 0x29, 0x43, 0x13, 0x98, 0x4d, - 0x43, 0x8f, 0x1e, 0x39, 0x5a, 0xbb, 0x2a, 0xf2, 0xae, 0x67, 0x77, 0x95, 0xd4, 0x03, 0x87, 0x9c, - 0x39, 0x32, 0x7c, 0x05, 0xf8, 0x08, 0x1c, 0xf8, 0x74, 0xcc, 0xdb, 0x95, 0xac, 0xc4, 0x8e, 0x4f, - 0xf1, 0x7b, 0xef, 0xff, 0xfe, 0x59, 0xed, 0x6f, 0x77, 0x1f, 0xc2, 0x8a, 0x4a, 0x35, 0xe4, 0xa3, - 0x77, 0xf4, 0x5c, 0xc9, 0xce, 0x4c, 0x70, 0xc5, 0xb1, 0xf7, 0x4e, 0x72, 0x36, 0x1b, 0xb5, 0x76, - 0x27, 0x9c, 0x4f, 0x52, 0xfa, 0x54, 0x67, 0x47, 0xd9, 0xf8, 0x69, 0xc4, 0xe6, 0x46, 0xd2, 0xfa, - 0x78, 0xb9, 0x14, 0x67, 0x22, 0x52, 0x09, 0x67, 0x79, 0x7d, 0x6f, 0xb9, 0x2e, 0x95, 0xc8, 0xce, - 0x55, 0x5e, 0xfd, 0x64, 0xb9, 0xaa, 0x92, 0x29, 0x95, 0x2a, 0x9a, 0xce, 0xd6, 0xd9, 0x5f, 0x8a, - 0x68, 0x36, 0xa3, 0x22, 0x5f, 0x61, 0xfb, 0x6f, 0x1b, 0x79, 0xa7, 0xc9, 0x74, 0x96, 0x52, 0x7c, - 0x0f, 0x79, 0x7c, 0x38, 0xe2, 0x3c, 0xf5, 0xad, 0xc0, 0x0a, 0x6b, 0xc4, 0xe5, 0x87, 0x9c, 0xa7, - 0xf8, 0x01, 0xaa, 0xf2, 0x61, 0xc2, 0xd4, 0x7e, 0xd7, 0xb7, 0x03, 0x2b, 0x74, 0x89, 0xc7, 0x7f, - 0x80, 0x68, 0x51, 0xe8, 0xf7, 0x7c, 0x27, 0xb0, 0x42, 0xc7, 0x14, 0xfa, 0x3d, 0xbc, 0x8b, 0x6a, - 0x7c, 0x98, 0x99, 0x96, 0x4a, 0x60, 0x85, 0x5b, 0xa4, 0xca, 0xcf, 0x74, 0x58, 0x96, 0xfa, 0x3d, - 0xdf, 0x0d, 0xac, 0xb0, 0x92, 0x97, 0x8a, 0x2e, 0x69, 0xba, 0xbc, 0xc0, 0x0a, 0x3f, 0x20, 0x55, - 0x7e, 0x7a, 0xad, 0x4b, 0x9a, 0xae, 0x6a, 0x60, 0x85, 0x38, 0x2f, 0xf5, 0x7b, 0x66, 0x11, 0xe3, - 0x94, 0x47, 0xca, 0xaf, 0x05, 0x56, 0x68, 0x13, 0x8f, 0xbf, 0x80, 0xc8, 0xf4, 0xc4, 0x3c, 0x1b, - 0xa5, 0xd4, 0xaf, 0x07, 0x56, 0x68, 0x91, 0x2a, 0x3f, 0xd2, 0x61, 0x6e, 0xa7, 0x44, 0xc2, 0x26, - 0x3e, 0x0a, 0xac, 0xb0, 0x0e, 0x76, 0x3a, 0x34, 0x76, 0xa3, 0xb9, 0xa2, 0xd2, 0x6f, 0x04, 0x56, - 0xb8, 0x49, 0x3c, 0x7e, 0x08, 0x51, 0xfb, 0x4f, 0x0b, 0xa1, 0x13, 0xce, 0x5e, 0x24, 0x2c, 0x51, - 0x54, 0xe2, 0xbb, 0xc8, 0x1d, 0x0f, 0x59, 0xc4, 0xf4, 0x56, 0xd9, 0xa4, 0x32, 0x3e, 0x89, 0x18, - 0x6c, 0xe0, 0x78, 0x38, 0x4b, 0xd8, 0x58, 0x6f, 0x94, 0x4d, 0xdc, 0xf1, 0xcf, 0x09, 0x1b, 0x9b, - 0x34, 0x83, 0xb4, 0x93, 0xa7, 0x4f, 0x20, 0x7d, 0x17, 0xb9, 0xb1, 0xb6, 0xa8, 0xe8, 0xd5, 0x55, - 0xe2, 0xdc, 0x22, 0x36, 0x16, 0xae, 0xce, 0xba, 0x71, 0x61, 0x11, 0x1b, 0x0b, 0x2f, 0x4f, 0x83, - 0x45, 0xfb, 0x1f, 0x1b, 0x55, 0x09, 0x9d, 0xd1, 0x48, 0x49, 0x90, 0x88, 0x82, 0x9e, 0x03, 0xf4, - 0x44, 0x41, 0x4f, 0x2c, 0xe8, 0x39, 0x40, 0x4f, 0x2c, 0xe8, 0x89, 0x05, 0x3d, 0x07, 0xe8, 0x89, - 0x05, 0x3d, 0x51, 0xd2, 0x73, 0x80, 0x9e, 0x28, 0xe9, 0x89, 0x92, 0x9e, 0x03, 0xf4, 0x44, 0x49, - 0x4f, 0x94, 0xf4, 0x1c, 0xa0, 0x27, 0x4e, 0xaf, 0x75, 0x2d, 0xe8, 0x39, 0x40, 0x4f, 0x94, 0xf4, - 0xc4, 0x82, 0x9e, 0x03, 0xf4, 0xc4, 0x82, 0x9e, 0x28, 0xe9, 0x39, 0x40, 0x4f, 0x94, 0xf4, 0x44, - 0x49, 0xcf, 0x01, 0x7a, 0xa2, 0xa4, 0x27, 0x16, 0xf4, 0x1c, 0xa0, 0x27, 0x0c, 0xbd, 0x7f, 0x6d, - 0xe4, 0xbd, 0x49, 0xe2, 0x09, 0x55, 0xf8, 0x31, 0x72, 0xcf, 0x79, 0xca, 0x85, 0x26, 0xb7, 0xdd, - 0xdd, 0xe9, 0x98, 0x2b, 0xda, 0x31, 0xe5, 0xce, 0x73, 0xa8, 0x11, 0x23, 0xc1, 0x4f, 0xc0, 0xcf, - 0xa8, 0x61, 0xf3, 0xd6, 0xa9, 0x3d, 0xa1, 0xff, 0xe2, 0x47, 0xc8, 0x93, 0xfa, 0x2a, 0xe9, 0x53, - 0xd5, 0xe8, 0x6e, 0x17, 0x6a, 0x73, 0xc1, 0x48, 0x5e, 0xc5, 0x5f, 0x98, 0x0d, 0xd1, 0x4a, 0x58, - 0xe7, 0xaa, 0x12, 0x36, 0x28, 0x97, 0x56, 0x85, 0x01, 0xec, 0xef, 0x68, 0xcf, 0x3b, 0x85, 0x32, - 0xe7, 0x4e, 0x8a, 0x3a, 0xfe, 0x0a, 0xd5, 0xc5, 0xb0, 0x10, 0xdf, 0xd3, 0xb6, 0x2b, 0xe2, 0x9a, - 0xc8, 0x7f, 0xb5, 0x3f, 0x43, 0xae, 0x59, 0x74, 0x15, 0x39, 0x64, 0x70, 0xd4, 0xdc, 0xc0, 0x75, - 0xe4, 0x7e, 0x4f, 0x06, 0x83, 0x93, 0xa6, 0x85, 0x6b, 0xa8, 0x72, 0xf8, 0xea, 0x6c, 0xd0, 0xb4, - 0xdb, 0x7f, 0xd9, 0xa8, 0x72, 0x1c, 0xcd, 0x24, 0xfe, 0x16, 0x35, 0xa6, 0xe6, 0xb8, 0xc0, 0xde, - 0xeb, 0x33, 0xd6, 0xe8, 0x7e, 0x58, 0xf8, 0x83, 0xa4, 0x73, 0xac, 0xcf, 0xcf, 0xa9, 0x12, 0x03, - 0xa6, 0xc4, 0x9c, 0xd4, 0xa7, 0x45, 0x8c, 0x0f, 0xd0, 0xd6, 0x54, 0x9f, 0xcd, 0xe2, 0xab, 0x6d, - 0xdd, 0xfe, 0xd1, 0xcd, 0x76, 0x38, 0xaf, 0xe6, 0xb3, 0x8d, 0x41, 0x63, 0x5a, 0x66, 0x5a, 0xdf, - 0xa1, 0xed, 0x9b, 0xfe, 0xb8, 0x89, 0x9c, 0x5f, 0xe9, 0x5c, 0x63, 0x74, 0x08, 0xfc, 0xc4, 0x3b, - 0xc8, 0xbd, 0x88, 0xd2, 0x8c, 0xea, 0xeb, 0x57, 0x27, 0x26, 0x78, 0x66, 0x7f, 0x63, 0xb5, 0x4e, - 0x50, 0x73, 0xd9, 0xfe, 0x7a, 0x7f, 0xcd, 0xf4, 0x3f, 0xbc, 0xde, 0xbf, 0x0a, 0xa5, 0xf4, 0x6b, - 0xff, 0x61, 0xa1, 0xcd, 0x63, 0x39, 0x79, 0x93, 0xa8, 0xb7, 0x3f, 0x31, 0xca, 0xc7, 0xf8, 0x3e, - 0x72, 0x55, 0xa2, 0x52, 0xaa, 0xed, 0xea, 0x2f, 0x37, 0x88, 0x09, 0xb1, 0x8f, 0x3c, 0x19, 0xa5, - 0x91, 0x98, 0x6b, 0x4f, 0xe7, 0xe5, 0x06, 0xc9, 0x63, 0xdc, 0x42, 0xd5, 0xe7, 0x3c, 0x83, 0x95, - 0xe8, 0x67, 0x01, 0x7a, 0x8a, 0x04, 0xfe, 0x14, 0x6d, 0xbe, 0xe5, 0x53, 0x3a, 0x8c, 0xe2, 0x58, - 0x50, 0x29, 0xf5, 0x0b, 0x01, 0x82, 0x06, 0x64, 0x0f, 0x4c, 0xf2, 0xb0, 0x8a, 0xdc, 0x8c, 0x25, - 0x9c, 0xb5, 0x1f, 0xa1, 0x0a, 0xa1, 0x51, 0x5a, 0x7e, 0xbe, 0x65, 0xde, 0x08, 0x1d, 0x3c, 0xae, - 0xd5, 0xe2, 0xe6, 0xd5, 0xd5, 0xd5, 0x95, 0xdd, 0xbe, 0x84, 0xff, 0x08, 0x5f, 0xf2, 0x1e, 0xef, - 0xa1, 0x7a, 0x32, 0x8d, 0x26, 0x09, 0x83, 0x95, 0x19, 0x79, 0x99, 0x28, 0x5b, 0xba, 0x47, 0x68, - 0x5b, 0xd0, 0x28, 0x1d, 0xd2, 0xf7, 0x8a, 0x32, 0x99, 0x70, 0x86, 0x37, 0xcb, 0x23, 0x15, 0xa5, - 0xfe, 0x6f, 0x37, 0xcf, 0x64, 0x6e, 0x4f, 0xb6, 0xa0, 0x69, 0x50, 0xf4, 0xb4, 0xff, 0x73, 0x11, - 0xfa, 0x91, 0xf1, 0x4b, 0xf6, 0x7a, 0x3e, 0xa3, 0x12, 0x3f, 0x44, 0x76, 0xc4, 0xfc, 0x6d, 0xdd, - 0xba, 0xd3, 0x31, 0xf3, 0xa9, 0x53, 0xcc, 0xa7, 0xce, 0x01, 0x9b, 0x13, 0x3b, 0x62, 0xf8, 0x4b, - 0xe4, 0xc4, 0x99, 0xb9, 0xa5, 0x8d, 0xee, 0xee, 0x8a, 0xec, 0x28, 0x9f, 0x92, 0x04, 0x54, 0xf8, - 0x73, 0x64, 0x4b, 0xe5, 0x6f, 0x6a, 0xed, 0x83, 0x15, 0xed, 0xa9, 0x9e, 0x98, 0xc4, 0x96, 0x70, - 0xfb, 0x6d, 0x25, 0x73, 0xbe, 0xad, 0x15, 0xe1, 0xeb, 0x62, 0x78, 0x12, 0x5b, 0x49, 0xd0, 0xa6, - 0x17, 0xfe, 0x9d, 0x35, 0xda, 0x57, 0x89, 0x54, 0xbf, 0xc0, 0x0e, 0x13, 0x3b, 0xbd, 0xc0, 0x21, - 0x72, 0x2e, 0xa2, 0xd4, 0x6f, 0x6a, 0xf1, 0xfd, 0x15, 0xb1, 0x11, 0x82, 0x04, 0x77, 0x90, 0x13, - 0x8f, 0x52, 0xcd, 0xbc, 0xd1, 0xdd, 0x5b, 0xfd, 0x2e, 0xfd, 0xc8, 0xe5, 0xfa, 0x78, 0x94, 0xe2, - 0x27, 0xc8, 0x19, 0xa7, 0x4a, 0x1f, 0x01, 0xb8, 0x70, 0xcb, 0x7a, 0xfd, 0x5c, 0xe6, 0xf2, 0x71, - 0xaa, 0x40, 0x9e, 0xe4, 0xb3, 0xf5, 0x36, 0xb9, 0xbe, 0x42, 0xb9, 0x3c, 0xe9, 0xf7, 0x60, 0x35, - 0x59, 0xbf, 0xa7, 0xa7, 0xca, 0x6d, 0xab, 0x39, 0xbb, 0xae, 0xcf, 0xfa, 0x3d, 0x6d, 0xbf, 0xdf, - 0xd5, 0x43, 0x78, 0x8d, 0xfd, 0x7e, 0xb7, 0xb0, 0xdf, 0xef, 0x6a, 0xfb, 0xfd, 0xae, 0x9e, 0xcc, - 0xeb, 0xec, 0x17, 0xfa, 0x4c, 0xeb, 0x2b, 0x7a, 0x84, 0xd5, 0xd7, 0x6c, 0x3a, 0xdc, 0x61, 0x23, - 0xd7, 0x3a, 0xf0, 0x87, 0xd7, 0x08, 0xad, 0xf1, 0x37, 0x63, 0x21, 0xf7, 0x97, 0x4a, 0xe0, 0xaf, - 0x91, 0x5b, 0x0e, 0xf7, 0xdb, 0x3e, 0x40, 0x8f, 0x0b, 0xd3, 0x60, 0x94, 0xcf, 0x02, 0x54, 0x61, - 0xd1, 0x94, 0x2e, 0x1d, 0xfc, 0xdf, 0xf5, 0x0b, 0xa3, 0x2b, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, - 0xd5, 0x39, 0x32, 0x09, 0xf9, 0x09, 0x00, 0x00, +var fileDescriptor_test_objects_c6f6c615ab823e65 = []byte{ + // 1357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xdd, 0x72, 0x13, 0xc7, + 0x12, 0xf6, 0xee, 0x6a, 0xf5, 0xd3, 0xf2, 0x1f, 0x83, 0x81, 0xc5, 0x87, 0x73, 0x8e, 0x4a, 0x70, + 0x38, 0x0a, 0xc4, 0xa2, 0x22, 0xbb, 0x5c, 0x84, 0xe4, 0x06, 0x63, 0x13, 0x08, 0xe0, 0xa4, 0xc6, + 0x26, 0x5c, 0xaa, 0x56, 0xde, 0x91, 0x59, 0xb2, 0xbb, 0xa3, 0xcc, 0xcc, 0xda, 0xa8, 0x92, 0x54, + 0xf9, 0x19, 0x52, 0x79, 0x82, 0x54, 0x25, 0x8f, 0x90, 0x8b, 0xbc, 0x45, 0xde, 0x28, 0x35, 0x3d, + 0xb3, 0x5a, 0x59, 0x42, 0x95, 0x5c, 0x79, 0xbb, 0xfb, 0xeb, 0x4f, 0x33, 0xfd, 0xf5, 0x74, 0x1b, + 0x88, 0x62, 0x52, 0xf5, 0xf9, 0xe0, 0x1d, 0x3b, 0x51, 0xb2, 0x3b, 0x12, 0x5c, 0x71, 0x52, 0x7d, + 0x27, 0x79, 0x36, 0x1a, 0x6c, 0xde, 0x3c, 0xe5, 0xfc, 0x34, 0x61, 0x0f, 0xd0, 0x3b, 0xc8, 0x87, + 0x0f, 0xc2, 0x6c, 0x6c, 0x20, 0x9b, 0xff, 0x99, 0x0d, 0x45, 0xb9, 0x08, 0x55, 0xcc, 0x33, 0x1b, + 0xbf, 0x35, 0x1b, 0x97, 0x4a, 0xe4, 0x27, 0xca, 0x46, 0xff, 0x3b, 0x1b, 0x55, 0x71, 0xca, 0xa4, + 0x0a, 0xd3, 0xd1, 0x22, 0xfa, 0x73, 0x11, 0x8e, 0x46, 0x4c, 0xd8, 0x13, 0xb6, 0x7f, 0x75, 0xa1, + 0x7a, 0x14, 0xa7, 0xa3, 0x84, 0x91, 0x6b, 0x50, 0xe5, 0xfd, 0x01, 0xe7, 0x49, 0xe0, 0xb4, 0x9c, + 0x4e, 0x9d, 0xfa, 0x7c, 0x8f, 0xf3, 0x84, 0xdc, 0x80, 0x1a, 0xef, 0xc7, 0x99, 0xda, 0xee, 0x05, + 0x6e, 0xcb, 0xe9, 0xf8, 0xb4, 0xca, 0x9f, 0x6b, 0x6b, 0x12, 0xd8, 0xdd, 0x09, 0xbc, 0x96, 0xd3, + 0xf1, 0x4c, 0x60, 0x77, 0x87, 0xdc, 0x84, 0x3a, 0xef, 0xe7, 0x26, 0xa5, 0xd2, 0x72, 0x3a, 0x2b, + 0xb4, 0xc6, 0x5f, 0xa3, 0x59, 0x86, 0x76, 0x77, 0x02, 0xbf, 0xe5, 0x74, 0x2a, 0x36, 0x54, 0x64, + 0x49, 0x93, 0x55, 0x6d, 0x39, 0x9d, 0x2b, 0xb4, 0xc6, 0x8f, 0xa6, 0xb2, 0xa4, 0xc9, 0xaa, 0xb5, + 0x9c, 0x0e, 0xb1, 0xa1, 0xdd, 0x1d, 0x73, 0x88, 0x61, 0xc2, 0x43, 0x15, 0xd4, 0x5b, 0x4e, 0xc7, + 0xa5, 0x55, 0xfe, 0x54, 0x5b, 0x26, 0x27, 0xe2, 0xf9, 0x20, 0x61, 0x41, 0xa3, 0xe5, 0x74, 0x1c, + 0x5a, 0xe3, 0xfb, 0x68, 0x5a, 0x3a, 0x25, 0xe2, 0xec, 0x34, 0x80, 0x96, 0xd3, 0x69, 0x68, 0x3a, + 0x34, 0x0d, 0xdd, 0x60, 0xac, 0x98, 0x0c, 0x9a, 0x2d, 0xa7, 0xb3, 0x4c, 0xab, 0x7c, 0x4f, 0x5b, + 0xed, 0x9f, 0x1c, 0x80, 0x43, 0x9e, 0x3d, 0x8d, 0xb3, 0x58, 0x31, 0x49, 0xae, 0x82, 0x3f, 0xec, + 0x67, 0x61, 0x86, 0xa5, 0x72, 0x69, 0x65, 0x78, 0x18, 0x66, 0xba, 0x80, 0xc3, 0xfe, 0x28, 0xce, + 0x86, 0x58, 0x28, 0x97, 0xfa, 0xc3, 0xaf, 0xe3, 0x6c, 0x68, 0xdc, 0x99, 0x76, 0x7b, 0xd6, 0x7d, + 0xa8, 0xdd, 0x57, 0xc1, 0x8f, 0x90, 0xa2, 0x82, 0xa7, 0xab, 0x44, 0x96, 0x22, 0x32, 0x14, 0x3e, + 0x7a, 0xfd, 0xa8, 0xa0, 0x88, 0x0c, 0x45, 0xd5, 0xba, 0x35, 0x45, 0xfb, 0x37, 0x17, 0x6a, 0x94, + 0x8d, 0x58, 0xa8, 0xa4, 0x86, 0x88, 0x42, 0x3d, 0x4f, 0xab, 0x27, 0x0a, 0xf5, 0xc4, 0x44, 0x3d, + 0x4f, 0xab, 0x27, 0x26, 0xea, 0x89, 0x89, 0x7a, 0x9e, 0x56, 0x4f, 0x4c, 0xd4, 0x13, 0xa5, 0x7a, + 0x9e, 0x56, 0x4f, 0x94, 0xea, 0x89, 0x52, 0x3d, 0x4f, 0xab, 0x27, 0x4a, 0xf5, 0x44, 0xa9, 0x9e, + 0xa7, 0xd5, 0x13, 0x47, 0x53, 0x59, 0x13, 0xf5, 0x3c, 0xad, 0x9e, 0x28, 0xd5, 0x13, 0x13, 0xf5, + 0x3c, 0xad, 0x9e, 0x98, 0xa8, 0x27, 0x4a, 0xf5, 0x3c, 0xad, 0x9e, 0x28, 0xd5, 0x13, 0xa5, 0x7a, + 0x9e, 0x56, 0x4f, 0x94, 0xea, 0x89, 0x89, 0x7a, 0x9e, 0x56, 0x4f, 0x18, 0xf5, 0x7e, 0x77, 0xa1, + 0xfa, 0x26, 0x8e, 0x4e, 0x99, 0x22, 0xf7, 0xc0, 0x3f, 0xe1, 0x09, 0x17, 0xa8, 0xdc, 0x6a, 0x6f, + 0xa3, 0x6b, 0x9e, 0x68, 0xd7, 0x84, 0xbb, 0x4f, 0x74, 0x8c, 0x1a, 0x08, 0xd9, 0xd2, 0x7c, 0x06, + 0xad, 0x8b, 0xb7, 0x08, 0x5d, 0x15, 0xf8, 0x97, 0xdc, 0x85, 0xaa, 0xc4, 0xa7, 0x84, 0x5d, 0xd5, + 0xec, 0xad, 0x16, 0x68, 0xf3, 0xc0, 0xa8, 0x8d, 0x92, 0x8f, 0x4c, 0x41, 0x10, 0xa9, 0xcf, 0x39, + 0x8f, 0xd4, 0x05, 0xb2, 0xd0, 0x9a, 0x30, 0x02, 0x07, 0x1b, 0xc8, 0xb9, 0x56, 0x20, 0xad, 0xee, + 0xb4, 0x88, 0x93, 0x8f, 0xa1, 0x21, 0xfa, 0x05, 0xf8, 0x1a, 0xd2, 0xce, 0x81, 0xeb, 0xc2, 0x7e, + 0xb5, 0xff, 0x07, 0xbe, 0x39, 0x74, 0x0d, 0x3c, 0x7a, 0xb0, 0xbf, 0xbe, 0x44, 0x1a, 0xe0, 0x7f, + 0x41, 0x0f, 0x0e, 0x0e, 0xd7, 0x1d, 0x52, 0x87, 0xca, 0xde, 0xcb, 0xd7, 0x07, 0xeb, 0x6e, 0xfb, + 0x67, 0x17, 0x2a, 0xaf, 0xc2, 0x91, 0x24, 0x9f, 0x41, 0x33, 0x35, 0xed, 0xa2, 0x6b, 0x8f, 0x3d, + 0xd6, 0xec, 0xfd, 0xab, 0xe0, 0xd7, 0x90, 0xee, 0x2b, 0xec, 0x9f, 0x23, 0x25, 0x0e, 0x32, 0x25, + 0xc6, 0xb4, 0x91, 0x16, 0x36, 0x79, 0x0c, 0x2b, 0x29, 0xf6, 0x66, 0x71, 0x6b, 0x17, 0xd3, 0xff, + 0x7d, 0x39, 0x5d, 0xf7, 0xab, 0xb9, 0xb6, 0x21, 0x68, 0xa6, 0xa5, 0x67, 0xf3, 0x73, 0x58, 0xbd, + 0xcc, 0x4f, 0xd6, 0xc1, 0xfb, 0x96, 0x8d, 0x51, 0x46, 0x8f, 0xea, 0x4f, 0xb2, 0x01, 0xfe, 0x59, + 0x98, 0xe4, 0x0c, 0x9f, 0x5f, 0x83, 0x1a, 0xe3, 0x91, 0xfb, 0xd0, 0xd9, 0x3c, 0x84, 0xf5, 0x59, + 0xfa, 0xe9, 0xfc, 0xba, 0xc9, 0xbf, 0x33, 0x9d, 0x3f, 0x2f, 0x4a, 0xc9, 0xd7, 0xfe, 0xd3, 0x81, + 0xe5, 0x57, 0xf2, 0xf4, 0x4d, 0xac, 0xde, 0x7e, 0x95, 0x31, 0x3e, 0x24, 0xd7, 0xc1, 0x57, 0xb1, + 0x4a, 0x18, 0xd2, 0x35, 0x9e, 0x2d, 0x51, 0x63, 0x92, 0x00, 0xaa, 0x32, 0x4c, 0x42, 0x31, 0x46, + 0x4e, 0xef, 0xd9, 0x12, 0xb5, 0x36, 0xd9, 0x84, 0xda, 0x13, 0x9e, 0xeb, 0x93, 0xe0, 0x58, 0xd0, + 0x39, 0x85, 0x83, 0xdc, 0x86, 0xe5, 0xb7, 0x3c, 0x65, 0xfd, 0x30, 0x8a, 0x04, 0x93, 0x12, 0x27, + 0x84, 0x06, 0x34, 0xb5, 0xf7, 0xb1, 0x71, 0x92, 0x03, 0xb8, 0x92, 0xca, 0xd3, 0xfe, 0x79, 0xac, + 0xde, 0xf6, 0x05, 0xfb, 0x2e, 0x8f, 0x05, 0x8b, 0x70, 0x6a, 0x34, 0x7b, 0x37, 0x26, 0x85, 0x35, + 0x67, 0xa4, 0x36, 0xfc, 0x6c, 0x89, 0xae, 0xa5, 0x97, 0x5d, 0x7b, 0x35, 0xf0, 0xf3, 0x2c, 0xe6, + 0x59, 0xfb, 0x2e, 0x54, 0x28, 0x0b, 0x93, 0xb2, 0x8a, 0x8e, 0x19, 0x35, 0x68, 0xdc, 0xab, 0xd7, + 0xa3, 0xf5, 0x8b, 0x8b, 0x8b, 0x0b, 0xb7, 0x7d, 0xae, 0x0f, 0xae, 0x0b, 0xf2, 0x9e, 0xdc, 0x82, + 0x46, 0x9c, 0x86, 0xa7, 0x71, 0xa6, 0x2f, 0x68, 0xe0, 0xa5, 0xa3, 0x4c, 0xe9, 0xed, 0xc3, 0xaa, + 0x60, 0x61, 0xd2, 0x67, 0xef, 0x15, 0xcb, 0x64, 0xcc, 0x33, 0xb2, 0x5c, 0x76, 0x66, 0x98, 0x04, + 0xdf, 0x5f, 0x6e, 0x6d, 0x4b, 0x4f, 0x57, 0x74, 0xd2, 0x41, 0x91, 0xd3, 0xfe, 0xc3, 0x07, 0x78, + 0x91, 0xf1, 0xf3, 0xec, 0x78, 0x3c, 0x62, 0x92, 0xdc, 0x01, 0x37, 0xcc, 0x82, 0x55, 0x4c, 0xdd, + 0xe8, 0x9a, 0x35, 0xd7, 0x2d, 0xd6, 0x5c, 0xf7, 0x71, 0x36, 0xa6, 0x6e, 0x98, 0x91, 0xfb, 0xe0, + 0x45, 0xb9, 0x79, 0xec, 0xcd, 0xde, 0xcd, 0x39, 0xd8, 0xbe, 0x5d, 0xb6, 0x54, 0xa3, 0xc8, 0xff, + 0xc1, 0x95, 0x2a, 0x58, 0xb6, 0x35, 0x9c, 0xc5, 0x1e, 0xe1, 0xe2, 0xa5, 0xae, 0xd4, 0x43, 0xc4, + 0x55, 0xd2, 0xb6, 0xc9, 0xe6, 0x1c, 0xf0, 0xb8, 0xd8, 0xc1, 0xd4, 0x55, 0x52, 0x63, 0x93, 0xb3, + 0x60, 0x6d, 0x01, 0xf6, 0x65, 0x2c, 0xd5, 0x37, 0xba, 0xc2, 0xd4, 0x4d, 0xce, 0x48, 0x07, 0xbc, + 0xb3, 0x30, 0x09, 0xd6, 0x11, 0x7c, 0x7d, 0x0e, 0x6c, 0x80, 0x1a, 0x42, 0xba, 0xe0, 0x45, 0x83, + 0x04, 0x5b, 0xa7, 0xd9, 0xbb, 0x35, 0x7f, 0x2f, 0x9c, 0x95, 0x16, 0x1f, 0x0d, 0x12, 0xb2, 0x05, + 0xde, 0x30, 0x51, 0xd8, 0x49, 0xfa, 0xdd, 0xce, 0xe2, 0x71, 0xea, 0x5a, 0xf8, 0x30, 0x51, 0x1a, + 0x1e, 0xdb, 0x15, 0xfd, 0x21, 0x38, 0xbe, 0x44, 0x0b, 0x8f, 0x77, 0x77, 0xf4, 0x69, 0xf2, 0xdd, + 0x1d, 0x5c, 0x4e, 0x1f, 0x3a, 0xcd, 0xeb, 0x69, 0x7c, 0xbe, 0xbb, 0x83, 0xf4, 0xdb, 0x3d, 0xdc, + 0xe5, 0x0b, 0xe8, 0xb7, 0x7b, 0x05, 0xfd, 0x76, 0x0f, 0xe9, 0xb7, 0x7b, 0xb8, 0xe0, 0x17, 0xd1, + 0x4f, 0xf0, 0x39, 0xe2, 0x2b, 0xb8, 0x09, 0x1b, 0x0b, 0x8a, 0xae, 0x47, 0x81, 0x81, 0x23, 0x4e, + 0xf3, 0xeb, 0xa1, 0x06, 0x0b, 0xf8, 0xcd, 0x76, 0xb1, 0xfc, 0x52, 0x09, 0xf2, 0x09, 0xf8, 0xe5, + 0xff, 0x08, 0x1f, 0xba, 0x00, 0x6e, 0x1d, 0x93, 0x60, 0x90, 0xed, 0xdb, 0xb0, 0x36, 0xf3, 0x18, + 0xf5, 0x00, 0x32, 0xa3, 0xd4, 0xed, 0x34, 0x90, 0xb7, 0xfd, 0x8b, 0x0b, 0x37, 0x2c, 0xea, 0x79, + 0x16, 0xc5, 0x82, 0x9d, 0xa8, 0x09, 0xfa, 0x3e, 0x54, 0x64, 0x3e, 0x48, 0x6d, 0x27, 0x2f, 0x7a, + 0xe1, 0x14, 0x41, 0xe4, 0x4b, 0x68, 0xa4, 0xe1, 0xa8, 0x3f, 0x8c, 0x59, 0x12, 0xd9, 0x61, 0xbb, + 0x35, 0x93, 0x31, 0xfb, 0x03, 0x7a, 0x08, 0x3f, 0xd5, 0x78, 0x33, 0x7c, 0xeb, 0xa9, 0x35, 0xc9, + 0x43, 0x68, 0xca, 0x24, 0x3e, 0x61, 0x96, 0xcd, 0x43, 0xb6, 0x85, 0xbf, 0x0f, 0x88, 0xc5, 0xcc, + 0xcd, 0x63, 0x58, 0xb9, 0x44, 0x3a, 0x3d, 0x72, 0x1b, 0x66, 0xe4, 0x6e, 0x5d, 0x1e, 0xb9, 0x0b, + 0x69, 0xa7, 0x66, 0xef, 0x3d, 0xd8, 0x98, 0x89, 0x62, 0xb5, 0x09, 0x81, 0xca, 0x60, 0xac, 0x24, + 0xd6, 0x73, 0x99, 0xe2, 0x77, 0x7b, 0x1f, 0xc8, 0x0c, 0xf6, 0xcd, 0x8b, 0xe3, 0x42, 0x6e, 0x0d, + 0xfc, 0x27, 0x72, 0x3f, 0x6a, 0x41, 0x25, 0x0b, 0x53, 0x36, 0x33, 0xb4, 0x7e, 0xc0, 0x5b, 0x60, + 0xe4, 0xd1, 0xa7, 0x50, 0x61, 0xef, 0x55, 0x3a, 0x83, 0xf8, 0xf1, 0x6f, 0xa4, 0xd2, 0x29, 0x7f, + 0x05, 0x00, 0x00, 0xff, 0xff, 0xea, 0x06, 0x1a, 0xa9, 0x37, 0x0c, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto index 0d2fc1fa..36eb6e8c 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto @@ -107,6 +107,7 @@ message MsgWithOneof { int64 salary = 2; string Country = 3; string home_address = 4; + MsgWithRequired msg_with_required = 5; } } @@ -145,3 +146,26 @@ message KnownTypes { optional google.protobuf.StringValue str = 10; optional google.protobuf.BytesValue bytes = 11; } + +// Test messages for marshaling/unmarshaling required fields. +message MsgWithRequired { + required string str = 1; +} + +message MsgWithIndirectRequired { + optional MsgWithRequired subm = 1; + map map_field = 2; + repeated MsgWithRequired slice_field = 3; +} + +message MsgWithRequiredBytes { + required bytes byts = 1; +} + +message MsgWithRequiredWKT { + required google.protobuf.StringValue str = 1; +} + +extend Real { + optional MsgWithRequired extm = 125; +} diff --git a/vendor/github.com/golang/protobuf/proto/Makefile b/vendor/github.com/golang/protobuf/proto/Makefile deleted file mode 100644 index e2e0651a..00000000 --- a/vendor/github.com/golang/protobuf/proto/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -install: - go install - -test: install generate-test-pbs - go test - - -generate-test-pbs: - make install - make -C testdata - protoc --go_out=Mtestdata/test.proto=github.com/golang/protobuf/proto/testdata,Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. proto3_proto/proto3.proto - make diff --git a/vendor/github.com/golang/protobuf/proto/all_test.go b/vendor/github.com/golang/protobuf/proto/all_test.go index 41451a40..361f72fb 100644 --- a/vendor/github.com/golang/protobuf/proto/all_test.go +++ b/vendor/github.com/golang/protobuf/proto/all_test.go @@ -41,11 +41,12 @@ import ( "reflect" "runtime/debug" "strings" + "sync" "testing" "time" . "github.com/golang/protobuf/proto" - . "github.com/golang/protobuf/proto/testdata" + . "github.com/golang/protobuf/proto/test_proto" ) var globalO *Buffer @@ -114,6 +115,8 @@ func initGoTest(setdefaults bool) *GoTest { pb.F_BytesDefaulted = Default_GoTest_F_BytesDefaulted pb.F_Sint32Defaulted = Int32(Default_GoTest_F_Sint32Defaulted) pb.F_Sint64Defaulted = Int64(Default_GoTest_F_Sint64Defaulted) + pb.F_Sfixed32Defaulted = Int32(Default_GoTest_F_Sfixed32Defaulted) + pb.F_Sfixed64Defaulted = Int64(Default_GoTest_F_Sfixed64Defaulted) } pb.Kind = GoTest_TIME.Enum() @@ -131,135 +134,13 @@ func initGoTest(setdefaults bool) *GoTest { pb.F_BytesRequired = []byte("bytes") pb.F_Sint32Required = Int32(-32) pb.F_Sint64Required = Int64(-64) + pb.F_Sfixed32Required = Int32(-32) + pb.F_Sfixed64Required = Int64(-64) pb.Requiredgroup = initGoTest_RequiredGroup() return pb } -func fail(msg string, b *bytes.Buffer, s string, t *testing.T) { - data := b.Bytes() - ld := len(data) - ls := len(s) / 2 - - fmt.Printf("fail %s ld=%d ls=%d\n", msg, ld, ls) - - // find the interesting spot - n - n := ls - if ld < ls { - n = ld - } - j := 0 - for i := 0; i < n; i++ { - bs := hex(s[j])*16 + hex(s[j+1]) - j += 2 - if data[i] == bs { - continue - } - n = i - break - } - l := n - 10 - if l < 0 { - l = 0 - } - h := n + 10 - - // find the interesting spot - n - fmt.Printf("is[%d]:", l) - for i := l; i < h; i++ { - if i >= ld { - fmt.Printf(" --") - continue - } - fmt.Printf(" %.2x", data[i]) - } - fmt.Printf("\n") - - fmt.Printf("sb[%d]:", l) - for i := l; i < h; i++ { - if i >= ls { - fmt.Printf(" --") - continue - } - bs := hex(s[j])*16 + hex(s[j+1]) - j += 2 - fmt.Printf(" %.2x", bs) - } - fmt.Printf("\n") - - t.Fail() - - // t.Errorf("%s: \ngood: %s\nbad: %x", msg, s, b.Bytes()) - // Print the output in a partially-decoded format; can - // be helpful when updating the test. It produces the output - // that is pasted, with minor edits, into the argument to verify(). - // data := b.Bytes() - // nesting := 0 - // for b.Len() > 0 { - // start := len(data) - b.Len() - // var u uint64 - // u, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on varint:", err) - // return - // } - // wire := u & 0x7 - // tag := u >> 3 - // switch wire { - // case WireVarint: - // v, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on varint:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireFixed32: - // v, err := DecodeFixed32(b) - // if err != nil { - // fmt.Printf("decode error on fixed32:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireFixed64: - // v, err := DecodeFixed64(b) - // if err != nil { - // fmt.Printf("decode error on fixed64:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireBytes: - // nb, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on bytes:", err) - // return - // } - // after_tag := len(data) - b.Len() - // str := make([]byte, nb) - // _, err = b.Read(str) - // if err != nil { - // fmt.Printf("decode error on bytes:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" \"%x\" // field %d, encoding %d (FIELD)\n", - // data[start:after_tag], str, tag, wire) - // case WireStartGroup: - // nesting++ - // fmt.Printf("\t\t\"%x\"\t\t// start group field %d level %d\n", - // data[start:len(data)-b.Len()], tag, nesting) - // case WireEndGroup: - // fmt.Printf("\t\t\"%x\"\t\t// end group field %d level %d\n", - // data[start:len(data)-b.Len()], tag, nesting) - // nesting-- - // default: - // fmt.Printf("unrecognized wire type %d\n", wire) - // return - // } - // } -} - func hex(c uint8) uint8 { if '0' <= c && c <= '9' { return c - '0' @@ -482,6 +363,48 @@ func TestMarshalerEncoding(t *testing.T) { } } +// Ensure that Buffer.Marshal uses O(N) memory for N messages +func TestBufferMarshalAllocs(t *testing.T) { + value := &OtherMessage{Key: Int64(1)} + msg := &MyMessage{Count: Int32(1), Others: []*OtherMessage{value}} + + reallocSize := func(t *testing.T, items int, prealloc int) (int64, int64) { + var b Buffer + b.SetBuf(make([]byte, 0, prealloc)) + + var allocSpace int64 + prevCap := cap(b.Bytes()) + for i := 0; i < items; i++ { + err := b.Marshal(msg) + if err != nil { + t.Errorf("Marshal err = %q", err) + break + } + if c := cap(b.Bytes()); prevCap != c { + allocSpace += int64(c) + prevCap = c + } + } + needSpace := int64(len(b.Bytes())) + return allocSpace, needSpace + } + + for _, prealloc := range []int{0, 100, 10000} { + for _, items := range []int{1, 2, 5, 10, 20, 50, 100, 200, 500, 1000} { + runtimeSpace, need := reallocSize(t, items, prealloc) + totalSpace := int64(prealloc) + runtimeSpace + + runtimeRatio := float64(runtimeSpace) / float64(need) + totalRatio := float64(totalSpace) / float64(need) + + if totalRatio < 1 || runtimeRatio > 4 { + t.Errorf("needed %dB, allocated %dB total (ratio %.1f), allocated %dB at runtime (ratio %.1f)", + need, totalSpace, totalRatio, runtimeSpace, runtimeRatio) + } + } + } +} + // Simple tests for bytes func TestBytesPrimitives(t *testing.T) { o := old() @@ -519,7 +442,7 @@ func TestRequiredBit(t *testing.T) { err := o.Marshal(pb) if err == nil { t.Error("did not catch missing required fields") - } else if strings.Index(err.Error(), "Kind") < 0 { + } else if !strings.Contains(err.Error(), "Kind") { t.Error("wrong error type:", err) } } @@ -612,7 +535,9 @@ func TestEncodeDecode1(t *testing.T) { "b404"+ // field 70, encoding 4, end group "aa0605"+"6279746573"+ // field 101, encoding 2, string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 - "b8067f") // field 103, encoding 0, 0x7f zigzag64 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff") // field 105, encoding 1, -64 fixed64 } // All required fields set, defaults provided. @@ -647,9 +572,13 @@ func TestEncodeDecode2(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -669,6 +598,8 @@ func TestEncodeDecode3(t *testing.T) { pb.F_BytesDefaulted = []byte("Bignose") pb.F_Sint32Defaulted = Int32(-32) pb.F_Sint64Defaulted = Int64(-64) + pb.F_Sfixed32Defaulted = Int32(-32) + pb.F_Sfixed64Defaulted = Int64(-64) overify(t, pb, "0807"+ // field 1, encoding 0, value 7 @@ -699,9 +630,13 @@ func TestEncodeDecode3(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -724,6 +659,8 @@ func TestEncodeDecode4(t *testing.T) { pb.F_BytesOptional = []byte("Bignose") pb.F_Sint32Optional = Int32(-32) pb.F_Sint64Optional = Int64(-64) + pb.F_Sfixed32Optional = Int32(-32) + pb.F_Sfixed64Optional = Int64(-64) pb.Optionalgroup = initGoTest_OptionalGroup() overify(t, pb, @@ -771,12 +708,18 @@ func TestEncodeDecode4(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "ea1207"+"4269676e6f7365"+ // field 301, encoding 2, string "Bignose" "f0123f"+ // field 302, encoding 0, value 63 "f8127f"+ // field 303, encoding 0, value 127 + "8513e0ffffff"+ // field 304, encoding 5, -32 fixed32 + "8913c0ffffffffffffff"+ // field 305, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -797,6 +740,8 @@ func TestEncodeDecode5(t *testing.T) { pb.F_BytesRepeated = [][]byte{[]byte("big"), []byte("nose")} pb.F_Sint32Repeated = []int32{32, -32} pb.F_Sint64Repeated = []int64{64, -64} + pb.F_Sfixed32Repeated = []int32{32, -32} + pb.F_Sfixed64Repeated = []int64{64, -64} pb.Repeatedgroup = []*GoTest_RepeatedGroup{initGoTest_RepeatedGroup(), initGoTest_RepeatedGroup()} overify(t, pb, @@ -856,15 +801,23 @@ func TestEncodeDecode5(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "ca0c03"+"626967"+ // field 201, encoding 2, string "big" "ca0c04"+"6e6f7365"+ // field 201, encoding 2, string "nose" "d00c40"+ // field 202, encoding 0, value 32 "d00c3f"+ // field 202, encoding 0, value -32 "d80c8001"+ // field 203, encoding 0, value 64 "d80c7f"+ // field 203, encoding 0, value -64 + "e50c20000000"+ // field 204, encoding 5, 32 fixed32 + "e50ce0ffffff"+ // field 204, encoding 5, -32 fixed32 + "e90c4000000000000000"+ // field 205, encoding 1, 64 fixed64 + "e90cc0ffffffffffffff"+ // field 205, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -882,6 +835,8 @@ func TestEncodeDecode6(t *testing.T) { pb.F_DoubleRepeatedPacked = []float64{64., 65.} pb.F_Sint32RepeatedPacked = []int32{32, -32} pb.F_Sint64RepeatedPacked = []int64{64, -64} + pb.F_Sfixed32RepeatedPacked = []int32{32, -32} + pb.F_Sfixed64RepeatedPacked = []int64{64, -64} overify(t, pb, "0807"+ // field 1, encoding 0, value 7 @@ -917,10 +872,17 @@ func TestEncodeDecode6(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "b21f02"+ // field 502, encoding 2, 2 bytes "403f"+ // value 32, value -32 "ba1f03"+ // field 503, encoding 2, 3 bytes - "80017f") // value 64, value -64 + "80017f"+ // value 64, value -64 + "c21f08"+ // field 504, encoding 2, 8 bytes + "20000000e0ffffff"+ // value 32, value -32 + "ca1f10"+ // field 505, encoding 2, 16 bytes + "4000000000000000c0ffffffffffffff") // value 64, value -64 + } // Test that we can encode empty bytes fields. @@ -1167,13 +1129,10 @@ func TestBigRepeated(t *testing.T) { if pbd.Repeatedgroup[i] == nil { // TODO: more checking? t.Error("pbd.Repeatedgroup bad") } - var x uint64 - x = uint64(pbd.F_Sint64Repeated[i]) - if x != i { + if x := uint64(pbd.F_Sint64Repeated[i]); x != i { t.Error("pbd.F_Sint64Repeated bad", x, i) } - x = uint64(pbd.F_Sint32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Sint32Repeated[i]); x != i { t.Error("pbd.F_Sint32Repeated bad", x, i) } s := fmt.Sprint(i) @@ -1181,39 +1140,31 @@ func TestBigRepeated(t *testing.T) { if pbd.F_StringRepeated[i] != s { t.Error("pbd.F_Sint32Repeated bad", pbd.F_StringRepeated[i], i) } - x = uint64(pbd.F_DoubleRepeated[i]) - if x != i { + if x := uint64(pbd.F_DoubleRepeated[i]); x != i { t.Error("pbd.F_DoubleRepeated bad", x, i) } - x = uint64(pbd.F_FloatRepeated[i]) - if x != i { + if x := uint64(pbd.F_FloatRepeated[i]); x != i { t.Error("pbd.F_FloatRepeated bad", x, i) } - x = pbd.F_Uint64Repeated[i] - if x != i { + if x := pbd.F_Uint64Repeated[i]; x != i { t.Error("pbd.F_Uint64Repeated bad", x, i) } - x = uint64(pbd.F_Uint32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Uint32Repeated[i]); x != i { t.Error("pbd.F_Uint32Repeated bad", x, i) } - x = pbd.F_Fixed64Repeated[i] - if x != i { + if x := pbd.F_Fixed64Repeated[i]; x != i { t.Error("pbd.F_Fixed64Repeated bad", x, i) } - x = uint64(pbd.F_Fixed32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Fixed32Repeated[i]); x != i { t.Error("pbd.F_Fixed32Repeated bad", x, i) } - x = uint64(pbd.F_Int64Repeated[i]) - if x != i { + if x := uint64(pbd.F_Int64Repeated[i]); x != i { t.Error("pbd.F_Int64Repeated bad", x, i) } - x = uint64(pbd.F_Int32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Int32Repeated[i]); x != i { t.Error("pbd.F_Int32Repeated bad", x, i) } - if pbd.F_BoolRepeated[i] != (i%2 == 0) { + if x := pbd.F_BoolRepeated[i]; x != (i%2 == 0) { t.Error("pbd.F_BoolRepeated bad", x, i) } if pbd.RepeatedField[i] == nil { // TODO: more checking? @@ -1222,21 +1173,25 @@ func TestBigRepeated(t *testing.T) { } } -// Verify we give a useful message when decoding to the wrong structure type. -func TestTypeMismatch(t *testing.T) { - pb1 := initGoTest(true) +func TestBadWireTypeUnknown(t *testing.T) { + var b []byte + fmt.Sscanf("0a01780d00000000080b101612036161611521000000202c220362626225370000002203636363214200000000000000584d5a036464645900000000000056405d63000000", "%x", &b) - // Marshal - o := old() - o.Marshal(pb1) + m := new(MyMessage) + if err := Unmarshal(b, m); err != nil { + t.Errorf("unexpected Unmarshal error: %v", err) + } - // Now Unmarshal it to the wrong type. - pb2 := initGoTestField() - err := o.Unmarshal(pb2) - if err == nil { - t.Error("expected error, got no error") - } else if !strings.Contains(err.Error(), "bad wiretype") { - t.Error("expected bad wiretype error, got", err) + var unknown []byte + fmt.Sscanf("0a01780d0000000010161521000000202c2537000000214200000000000000584d5a036464645d63000000", "%x", &unknown) + if !bytes.Equal(m.XXX_unrecognized, unknown) { + t.Errorf("unknown bytes mismatch:\ngot %x\nwant %x", m.XXX_unrecognized, unknown) + } + DiscardUnknown(m) + + want := &MyMessage{Count: Int32(11), Name: String("aaa"), Pet: []string{"bbb", "ccc"}, Bigfloat: Float64(88)} + if !Equal(m, want) { + t.Errorf("message mismatch:\ngot %v\nwant %v", m, want) } } @@ -1331,7 +1286,8 @@ func TestRequiredFieldEnforcement(t *testing.T) { err = Unmarshal(buf, pb) if err == nil { t.Error("unmarshal: expected error, got nil") - } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "{Unknown}") { + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Type") && !strings.Contains(err.Error(), "{Unknown}") { + // TODO: remove unknown cases once we commit to the new unmarshaler. t.Errorf("unmarshal: bad error type: %v", err) } } @@ -1348,7 +1304,7 @@ func TestRequiredFieldEnforcementGroups(t *testing.T) { buf := []byte{11, 12} if err := Unmarshal(buf, pb); err == nil { t.Error("unmarshal: expected error, got nil") - } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.{Unknown}") { + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.Field") && !strings.Contains(err.Error(), "Group.{Unknown}") { t.Errorf("unmarshal: bad error type: %v", err) } } @@ -1385,18 +1341,7 @@ func (*NNIMessage) Reset() {} func (*NNIMessage) String() string { return "" } func (*NNIMessage) ProtoMessage() {} -// A type that implements the Marshaler interface and is nillable. -type nillableMessage struct { - x uint64 -} - -func (nm *nillableMessage) Marshal() ([]byte, error) { - return EncodeVarint(nm.x), nil -} - -type NMMessage struct { - nm *nillableMessage -} +type NMMessage struct{} func (*NMMessage) Reset() {} func (*NMMessage) String() string { return "" } @@ -1595,6 +1540,14 @@ func TestVarintOverflow(t *testing.T) { } } +func TestBytesWithInvalidLengthInGroup(t *testing.T) { + // Overflowing a 64-bit length should not be allowed. + b := []byte{0xbb, 0x30, 0xb2, 0x30, 0xb0, 0xb2, 0x83, 0xf1, 0xb0, 0xb2, 0xef, 0xbf, 0xbd, 0x01} + if err := Unmarshal(b, new(MyMessage)); err == nil { + t.Fatalf("Overflowed uint64 length without error") + } +} + func TestUnmarshalFuzz(t *testing.T) { const N = 1000 seed := time.Now().UnixNano() @@ -1668,6 +1621,28 @@ func TestExtensionMarshalOrder(t *testing.T) { } } +func TestExtensionMapFieldMarshalDeterministic(t *testing.T) { + m := &MyMessage{Count: Int(123)} + if err := SetExtension(m, E_Ext_More, &Ext{MapField: map[int32]int32{1: 1, 2: 2, 3: 3, 4: 4}}); err != nil { + t.Fatalf("SetExtension: %v", err) + } + marshal := func(m Message) []byte { + var b Buffer + b.SetDeterministic(true) + if err := b.Marshal(m); err != nil { + t.Fatalf("Marshal failed: %v", err) + } + return b.Bytes() + } + + want := marshal(m) + for i := 0; i < 100; i++ { + if got := marshal(m); !bytes.Equal(got, want) { + t.Errorf("Marshal produced inconsistent output with determinism enabled (pass %d).\n got %v\nwant %v", i, got, want) + } + } +} + // Many extensions, because small maps might not iterate differently on each iteration. var exts = []*ExtensionDesc{ E_X201, @@ -1802,6 +1777,43 @@ func TestUnmarshalMergesMessages(t *testing.T) { } } +func TestUnmarshalMergesGroups(t *testing.T) { + // If a nested group occurs twice in the input, + // the fields should be merged when decoding. + a := &GroupNew{ + G: &GroupNew_G{ + X: Int32(7), + Y: Int32(8), + }, + } + aData, err := Marshal(a) + if err != nil { + t.Fatalf("Marshal(a): %v", err) + } + b := &GroupNew{ + G: &GroupNew_G{ + X: Int32(9), + }, + } + bData, err := Marshal(b) + if err != nil { + t.Fatalf("Marshal(b): %v", err) + } + want := &GroupNew{ + G: &GroupNew_G{ + X: Int32(9), + Y: Int32(8), + }, + } + got := new(GroupNew) + if err := Unmarshal(append(aData, bData...), got); err != nil { + t.Fatalf("Unmarshal: %v", err) + } + if !Equal(got, want) { + t.Errorf("\n got %v\nwant %v", got, want) + } +} + func TestEncodingSizes(t *testing.T) { tests := []struct { m Message @@ -1845,7 +1857,9 @@ func TestRequiredNotSetError(t *testing.T) { "b404" + // field 70, encoding 4, end group "aa0605" + "6279746573" + // field 101, encoding 2, string "bytes" "b0063f" + // field 102, encoding 0, 0x3f zigzag32 - "b8067f" // field 103, encoding 0, 0x7f zigzag64 + "b8067f" + // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff" + // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff" // field 105, encoding 1, -64 fixed64 o := old() bytes, err := Marshal(pb) @@ -1854,7 +1868,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("expected = %s", expected) } - if strings.Index(err.Error(), "RequiredField.Label") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") { t.Errorf("marshal-1 wrong err msg: %v", err) } if !equal(bytes, expected, t) { @@ -1870,7 +1884,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("string = %s", expected) } - if strings.Index(err.Error(), "RequiredField.{Unknown}") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") && !strings.Contains(err.Error(), "RequiredField.{Unknown}") { t.Errorf("unmarshal wrong err msg: %v", err) } bytes, err = Marshal(pbd) @@ -1879,7 +1893,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("string = %s", expected) } - if strings.Index(err.Error(), "RequiredField.Label") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") { t.Errorf("marshal-2 wrong err msg: %v", err) } if !equal(bytes, expected, t) { @@ -1888,6 +1902,25 @@ func TestRequiredNotSetError(t *testing.T) { } } +func TestRequiredNotSetErrorWithBadWireTypes(t *testing.T) { + // Required field expects a varint, and properly found a varint. + if err := Unmarshal([]byte{0x08, 0x00}, new(GoEnum)); err != nil { + t.Errorf("Unmarshal = %v, want nil", err) + } + // Required field expects a varint, but found a fixed32 instead. + if err := Unmarshal([]byte{0x0d, 0x00, 0x00, 0x00, 0x00}, new(GoEnum)); err == nil { + t.Errorf("Unmarshal = nil, want RequiredNotSetError") + } + // Required field expects a varint, and found both a varint and fixed32 (ignored). + m := new(GoEnum) + if err := Unmarshal([]byte{0x08, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x00}, m); err != nil { + t.Errorf("Unmarshal = %v, want nil", err) + } + if !bytes.Equal(m.XXX_unrecognized, []byte{0x0d, 0x00, 0x00, 0x00, 0x00}) { + t.Errorf("expected fixed32 to appear as unknown bytes: %x", m.XXX_unrecognized) + } +} + func fuzzUnmarshal(t *testing.T, data []byte) { defer func() { if e := recover(); e != nil { @@ -1946,6 +1979,32 @@ func TestMapFieldMarshal(t *testing.T) { (new(Buffer)).DebugPrint("Dump of b", b) } +func TestMapFieldDeterministicMarshal(t *testing.T) { + m := &MessageWithMap{ + NameMapping: map[int32]string{ + 1: "Rob", + 4: "Ian", + 8: "Dave", + }, + } + + marshal := func(m Message) []byte { + var b Buffer + b.SetDeterministic(true) + if err := b.Marshal(m); err != nil { + t.Fatalf("Marshal failed: %v", err) + } + return b.Bytes() + } + + want := marshal(m) + for i := 0; i < 10; i++ { + if got := marshal(m); !bytes.Equal(got, want) { + t.Errorf("Marshal produced inconsistent output with determinism enabled (pass %d).\n got %v\nwant %v", i, got, want) + } + } +} + func TestMapFieldRoundTrips(t *testing.T) { m := &MessageWithMap{ NameMapping: map[int32]string{ @@ -1954,7 +2013,7 @@ func TestMapFieldRoundTrips(t *testing.T) { 8: "Dave", }, MsgMapping: map[int64]*FloatingPoint{ - 0x7001: &FloatingPoint{F: Float64(2.0)}, + 0x7001: {F: Float64(2.0)}, }, ByteMapping: map[bool][]byte{ false: []byte("that's not right!"), @@ -1970,14 +2029,8 @@ func TestMapFieldRoundTrips(t *testing.T) { if err := Unmarshal(b, m2); err != nil { t.Fatalf("Unmarshal: %v", err) } - for _, pair := range [][2]interface{}{ - {m.NameMapping, m2.NameMapping}, - {m.MsgMapping, m2.MsgMapping}, - {m.ByteMapping, m2.ByteMapping}, - } { - if !reflect.DeepEqual(pair[0], pair[1]) { - t.Errorf("Map did not survive a round trip.\ninitial: %v\n final: %v", pair[0], pair[1]) - } + if !Equal(m, m2) { + t.Errorf("Map did not survive a round trip.\ninitial: %v\n final: %v", m, m2) } } @@ -2005,7 +2058,7 @@ func TestMapFieldWithNil(t *testing.T) { func TestMapFieldWithNilBytes(t *testing.T) { m1 := &MessageWithMap{ ByteMapping: map[bool][]byte{ - false: []byte{}, + false: {}, true: nil, }, } @@ -2119,6 +2172,22 @@ func TestOneof(t *testing.T) { } } +func TestOneofNilBytes(t *testing.T) { + // A oneof with nil byte slice should marshal to tag + 0 (size), with no error. + m := &Communique{Union: &Communique_Data{Data: nil}} + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal failed: %v", err) + } + want := []byte{ + 7<<3 | 2, // tag 7, wire type 2 + 0, // size + } + if !bytes.Equal(b, want) { + t.Errorf("Wrong result of Marshal: got %x, want %x", b, want) + } +} + func TestInefficientPackedBool(t *testing.T) { // https://github.com/golang/protobuf/issues/76 inp := []byte{ @@ -2132,6 +2201,69 @@ func TestInefficientPackedBool(t *testing.T) { } } +// Make sure pure-reflect-based implementation handles +// []int32-[]enum conversion correctly. +func TestRepeatedEnum2(t *testing.T) { + pb := &RepeatedEnum{ + Color: []RepeatedEnum_Color{RepeatedEnum_RED}, + } + b, err := Marshal(pb) + if err != nil { + t.Fatalf("Marshal failed: %v", err) + } + x := new(RepeatedEnum) + err = Unmarshal(b, x) + if err != nil { + t.Fatalf("Unmarshal failed: %v", err) + } + if !Equal(pb, x) { + t.Errorf("Incorrect result: want: %v got: %v", pb, x) + } +} + +// TestConcurrentMarshal makes sure that it is safe to marshal +// same message in multiple goroutines concurrently. +func TestConcurrentMarshal(t *testing.T) { + pb := initGoTest(true) + const N = 100 + b := make([][]byte, N) + + var wg sync.WaitGroup + for i := 0; i < N; i++ { + wg.Add(1) + go func(i int) { + defer wg.Done() + var err error + b[i], err = Marshal(pb) + if err != nil { + t.Errorf("marshal error: %v", err) + } + }(i) + } + + wg.Wait() + for i := 1; i < N; i++ { + if !bytes.Equal(b[0], b[i]) { + t.Errorf("concurrent marshal result not same: b[0] = %v, b[%d] = %v", b[0], i, b[i]) + } + } +} + +func TestInvalidUTF8(t *testing.T) { + const wire = "\x12\x04\xde\xea\xca\xfe" + + var m GoTest + if err := Unmarshal([]byte(wire), &m); err == nil { + t.Errorf("Unmarshal error: got nil, want non-nil") + } + + m.Reset() + m.Table = String(wire[2:]) + if _, err := Marshal(&m); err == nil { + t.Errorf("Marshal error: got nil, want non-nil") + } +} + // Benchmarks func testMsg() *GoTest { diff --git a/vendor/github.com/golang/protobuf/proto/any_test.go b/vendor/github.com/golang/protobuf/proto/any_test.go index 1a3c22ed..56fc97c1 100644 --- a/vendor/github.com/golang/protobuf/proto/any_test.go +++ b/vendor/github.com/golang/protobuf/proto/any_test.go @@ -38,7 +38,7 @@ import ( "github.com/golang/protobuf/proto" pb "github.com/golang/protobuf/proto/proto3_proto" - testpb "github.com/golang/protobuf/proto/testdata" + testpb "github.com/golang/protobuf/proto/test_proto" anypb "github.com/golang/protobuf/ptypes/any" ) @@ -166,33 +166,33 @@ anything: < name: "David" result_count: 47 anything: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 47 name: "David" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "foo" > - [testdata.Ext.text]: "bar" + [test_proto.Ext.text]: "bar" > > many_things: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 42 bikeshed: GREEN rep_bytes: "roboto" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "baz" > > > many_things: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 47 name: "David" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "foo" > - [testdata.Ext.text]: "bar" + [test_proto.Ext.text]: "bar" > > ` diff --git a/vendor/github.com/golang/protobuf/proto/clone.go b/vendor/github.com/golang/protobuf/proto/clone.go index e392575b..3cd3249f 100644 --- a/vendor/github.com/golang/protobuf/proto/clone.go +++ b/vendor/github.com/golang/protobuf/proto/clone.go @@ -35,22 +35,39 @@ package proto import ( + "fmt" "log" "reflect" "strings" ) // Clone returns a deep copy of a protocol buffer. -func Clone(pb Message) Message { - in := reflect.ValueOf(pb) +func Clone(src Message) Message { + in := reflect.ValueOf(src) if in.IsNil() { - return pb + return src } - out := reflect.New(in.Type().Elem()) - // out is empty so a merge is a deep copy. - mergeStruct(out.Elem(), in.Elem()) - return out.Interface().(Message) + dst := out.Interface().(Message) + Merge(dst, src) + return dst +} + +// Merger is the interface representing objects that can merge messages of the same type. +type Merger interface { + // Merge merges src into this message. + // Required and optional fields that are set in src will be set to that value in dst. + // Elements of repeated fields will be appended. + // + // Merge may panic if called with a different argument type than the receiver. + Merge(src Message) +} + +// generatedMerger is the custom merge method that generated protos will have. +// We must add this method since a generate Merge method will conflict with +// many existing protos that have a Merge data field already defined. +type generatedMerger interface { + XXX_Merge(src Message) } // Merge merges src into dst. @@ -58,17 +75,24 @@ func Clone(pb Message) Message { // Elements of repeated fields will be appended. // Merge panics if src and dst are not the same type, or if dst is nil. func Merge(dst, src Message) { + if m, ok := dst.(Merger); ok { + m.Merge(src) + return + } + in := reflect.ValueOf(src) out := reflect.ValueOf(dst) if out.IsNil() { panic("proto: nil destination") } if in.Type() != out.Type() { - // Explicit test prior to mergeStruct so that mistyped nils will fail - panic("proto: type mismatch") + panic(fmt.Sprintf("proto.Merge(%T, %T) type mismatch", dst, src)) } if in.IsNil() { - // Merging nil into non-nil is a quiet no-op + return // Merge from nil src is a noop + } + if m, ok := dst.(generatedMerger); ok { + m.XXX_Merge(src) return } mergeStruct(out.Elem(), in.Elem()) @@ -84,7 +108,7 @@ func mergeStruct(out, in reflect.Value) { mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) } - if emIn, ok := extendable(in.Addr().Interface()); ok { + if emIn, err := extendable(in.Addr().Interface()); err == nil { emOut, _ := extendable(out.Addr().Interface()) mIn, muIn := emIn.extensionsRead() if mIn != nil { diff --git a/vendor/github.com/golang/protobuf/proto/clone_test.go b/vendor/github.com/golang/protobuf/proto/clone_test.go index f607ff49..0d3b1273 100644 --- a/vendor/github.com/golang/protobuf/proto/clone_test.go +++ b/vendor/github.com/golang/protobuf/proto/clone_test.go @@ -37,7 +37,7 @@ import ( "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) var cloneTestMessage = &pb.MyMessage{ @@ -72,7 +72,7 @@ func init() { func TestClone(t *testing.T) { m := proto.Clone(cloneTestMessage).(*pb.MyMessage) if !proto.Equal(m, cloneTestMessage) { - t.Errorf("Clone(%v) = %v", cloneTestMessage, m) + t.Fatalf("Clone(%v) = %v", cloneTestMessage, m) } // Verify it was a deep copy. @@ -244,27 +244,45 @@ var mergeTests = []struct { Data: []byte("texas!"), }, }, - // Oneof fields should merge by assignment. - { - src: &pb.Communique{ - Union: &pb.Communique_Number{41}, - }, - dst: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, - want: &pb.Communique{ - Union: &pb.Communique_Number{41}, - }, + { // Oneof fields should merge by assignment. + src: &pb.Communique{Union: &pb.Communique_Number{41}}, + dst: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, + want: &pb.Communique{Union: &pb.Communique_Number{41}}, + }, + { // Oneof nil is the same as not set. + src: &pb.Communique{}, + dst: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, + want: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, }, - // Oneof nil is the same as not set. { - src: &pb.Communique{}, - dst: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, - want: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, + src: &pb.Communique{Union: &pb.Communique_Number{1337}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Number{1337}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Col{pb.MyMessage_RED}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Col{pb.MyMessage_RED}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Data{[]byte("hello")}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Data{[]byte("hello")}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Msg{}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123")}}}, + dst: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + want: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123"), BytesField: []byte{1, 2, 3}}}}, }, { src: &proto3pb.Message{ @@ -287,14 +305,86 @@ var mergeTests = []struct { }, }, }, + { + src: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + dst: &pb.GoTest{}, + want: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + }, + { + src: &pb.GoTest{}, + dst: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + want: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + }, + { + src: &pb.GoTest{ + F_BytesRepeated: [][]byte{nil, []byte{}, []byte{0}}, + }, + dst: &pb.GoTest{}, + want: &pb.GoTest{ + F_BytesRepeated: [][]byte{nil, []byte{}, []byte{0}}, + }, + }, + { + src: &pb.MyMessage{ + Others: []*pb.OtherMessage{}, + }, + dst: &pb.MyMessage{}, + want: &pb.MyMessage{ + Others: []*pb.OtherMessage{}, + }, + }, } func TestMerge(t *testing.T) { for _, m := range mergeTests { got := proto.Clone(m.dst) + if !proto.Equal(got, m.dst) { + t.Errorf("Clone()\ngot %v\nwant %v", got, m.dst) + continue + } proto.Merge(got, m.src) if !proto.Equal(got, m.want) { - t.Errorf("Merge(%v, %v)\n got %v\nwant %v\n", m.dst, m.src, got, m.want) + t.Errorf("Merge(%v, %v)\ngot %v\nwant %v", m.dst, m.src, got, m.want) } } } diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go index aa207298..d9aa3c42 100644 --- a/vendor/github.com/golang/protobuf/proto/decode.go +++ b/vendor/github.com/golang/protobuf/proto/decode.go @@ -39,8 +39,6 @@ import ( "errors" "fmt" "io" - "os" - "reflect" ) // errOverflow is returned when an integer is too large to be represented. @@ -50,10 +48,6 @@ var errOverflow = errors.New("proto: integer overflow") // wire type is encountered. It does not get returned to user code. var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") -// The fundamental decoders that interpret bytes on the wire. -// Those that take integer types all return uint64 and are -// therefore of type valueDecoder. - // DecodeVarint reads a varint-encoded integer from the slice. // It returns the integer and the number of bytes consumed, or // zero if there is not enough. @@ -267,9 +261,6 @@ func (p *Buffer) DecodeZigzag32() (x uint64, err error) { return } -// These are not ValueDecoders: they produce an array of bytes or a string. -// bytes, embedded messages - // DecodeRawBytes reads a count-delimited byte buffer from the Buffer. // This is the format used for the bytes protocol buffer // type and for embedded messages. @@ -311,81 +302,29 @@ func (p *Buffer) DecodeStringBytes() (s string, err error) { return string(buf), nil } -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -// If the protocol buffer has extensions, and the field matches, add it as an extension. -// Otherwise, if the XXX_unrecognized field exists, append the skipped data there. -func (o *Buffer) skipAndSave(t reflect.Type, tag, wire int, base structPointer, unrecField field) error { - oi := o.index - - err := o.skip(t, tag, wire) - if err != nil { - return err - } - - if !unrecField.IsValid() { - return nil - } - - ptr := structPointer_Bytes(base, unrecField) - - // Add the skipped field to struct field - obuf := o.buf - - o.buf = *ptr - o.EncodeVarint(uint64(tag<<3 | wire)) - *ptr = append(o.buf, obuf[oi:o.index]...) - - o.buf = obuf - - return nil -} - -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -func (o *Buffer) skip(t reflect.Type, tag, wire int) error { - - var u uint64 - var err error - - switch wire { - case WireVarint: - _, err = o.DecodeVarint() - case WireFixed64: - _, err = o.DecodeFixed64() - case WireBytes: - _, err = o.DecodeRawBytes(false) - case WireFixed32: - _, err = o.DecodeFixed32() - case WireStartGroup: - for { - u, err = o.DecodeVarint() - if err != nil { - break - } - fwire := int(u & 0x7) - if fwire == WireEndGroup { - break - } - ftag := int(u >> 3) - err = o.skip(t, ftag, fwire) - if err != nil { - break - } - } - default: - err = fmt.Errorf("proto: can't skip unknown wire type %d for %s", wire, t) - } - return err -} - // Unmarshaler is the interface representing objects that can -// unmarshal themselves. The method should reset the receiver before -// decoding starts. The argument points to data that may be +// unmarshal themselves. The argument points to data that may be // overwritten, so implementations should not keep references to the // buffer. +// Unmarshal implementations should not clear the receiver. +// Any unmarshaled data should be merged into the receiver. +// Callers of Unmarshal that do not want to retain existing data +// should Reset the receiver before calling Unmarshal. type Unmarshaler interface { Unmarshal([]byte) error } +// newUnmarshaler is the interface representing objects that can +// unmarshal themselves. The semantics are identical to Unmarshaler. +// +// This exists to support protoc-gen-go generated messages. +// The proto package will stop type-asserting to this interface in the future. +// +// DO NOT DEPEND ON THIS. +type newUnmarshaler interface { + XXX_Unmarshal([]byte) error +} + // Unmarshal parses the protocol buffer representation in buf and places the // decoded result in pb. If the struct underlying pb does not match // the data in buf, the results can be unpredictable. @@ -395,7 +334,13 @@ type Unmarshaler interface { // to preserve and append to existing data. func Unmarshal(buf []byte, pb Message) error { pb.Reset() - return UnmarshalMerge(buf, pb) + if u, ok := pb.(newUnmarshaler); ok { + return u.XXX_Unmarshal(buf) + } + if u, ok := pb.(Unmarshaler); ok { + return u.Unmarshal(buf) + } + return NewBuffer(buf).Unmarshal(pb) } // UnmarshalMerge parses the protocol buffer representation in buf and @@ -405,8 +350,16 @@ func Unmarshal(buf []byte, pb Message) error { // UnmarshalMerge merges into existing data in pb. // Most code should use Unmarshal instead. func UnmarshalMerge(buf []byte, pb Message) error { - // If the object can unmarshal itself, let it. + if u, ok := pb.(newUnmarshaler); ok { + return u.XXX_Unmarshal(buf) + } if u, ok := pb.(Unmarshaler); ok { + // NOTE: The history of proto have unfortunately been inconsistent + // whether Unmarshaler should or should not implicitly clear itself. + // Some implementations do, most do not. + // Thus, calling this here may or may not do what people want. + // + // See https://github.com/golang/protobuf/issues/424 return u.Unmarshal(buf) } return NewBuffer(buf).Unmarshal(pb) @@ -422,12 +375,17 @@ func (p *Buffer) DecodeMessage(pb Message) error { } // DecodeGroup reads a tag-delimited group from the Buffer. +// StartGroup tag is already consumed. This function consumes +// EndGroup tag. func (p *Buffer) DecodeGroup(pb Message) error { - typ, base, err := getbase(pb) - if err != nil { - return err + b := p.buf[p.index:] + x, y := findEndGroup(b) + if x < 0 { + return io.ErrUnexpectedEOF } - return p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), true, base) + err := Unmarshal(b[:x], pb) + p.index += y + return err } // Unmarshal parses the protocol buffer representation in the @@ -438,533 +396,33 @@ func (p *Buffer) DecodeGroup(pb Message) error { // Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. func (p *Buffer) Unmarshal(pb Message) error { // If the object can unmarshal itself, let it. + if u, ok := pb.(newUnmarshaler); ok { + err := u.XXX_Unmarshal(p.buf[p.index:]) + p.index = len(p.buf) + return err + } if u, ok := pb.(Unmarshaler); ok { + // NOTE: The history of proto have unfortunately been inconsistent + // whether Unmarshaler should or should not implicitly clear itself. + // Some implementations do, most do not. + // Thus, calling this here may or may not do what people want. + // + // See https://github.com/golang/protobuf/issues/424 err := u.Unmarshal(p.buf[p.index:]) p.index = len(p.buf) return err } - typ, base, err := getbase(pb) - if err != nil { - return err - } - - err = p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), false, base) - - if collectStats { - stats.Decode++ - } - - return err -} - -// unmarshalType does the work of unmarshaling a structure. -func (o *Buffer) unmarshalType(st reflect.Type, prop *StructProperties, is_group bool, base structPointer) error { - var state errorState - required, reqFields := prop.reqCount, uint64(0) - - var err error - for err == nil && o.index < len(o.buf) { - oi := o.index - var u uint64 - u, err = o.DecodeVarint() - if err != nil { - break - } - wire := int(u & 0x7) - if wire == WireEndGroup { - if is_group { - if required > 0 { - // Not enough information to determine the exact field. - // (See below.) - return &RequiredNotSetError{"{Unknown}"} - } - return nil // input is satisfied - } - return fmt.Errorf("proto: %s: wiretype end group for non-group", st) - } - tag := int(u >> 3) - if tag <= 0 { - return fmt.Errorf("proto: %s: illegal tag %d (wire type %d)", st, tag, wire) - } - fieldnum, ok := prop.decoderTags.get(tag) - if !ok { - // Maybe it's an extension? - if prop.extendable { - if e, _ := extendable(structPointer_Interface(base, st)); isExtensionField(e, int32(tag)) { - if err = o.skip(st, tag, wire); err == nil { - extmap := e.extensionsWrite() - ext := extmap[int32(tag)] // may be missing - ext.enc = append(ext.enc, o.buf[oi:o.index]...) - extmap[int32(tag)] = ext - } - continue - } - } - // Maybe it's a oneof? - if prop.oneofUnmarshaler != nil { - m := structPointer_Interface(base, st).(Message) - // First return value indicates whether tag is a oneof field. - ok, err = prop.oneofUnmarshaler(m, tag, wire, o) - if err == ErrInternalBadWireType { - // Map the error to something more descriptive. - // Do the formatting here to save generated code space. - err = fmt.Errorf("bad wiretype for oneof field in %T", m) - } - if ok { - continue - } - } - err = o.skipAndSave(st, tag, wire, base, prop.unrecField) - continue - } - p := prop.Prop[fieldnum] - - if p.dec == nil { - fmt.Fprintf(os.Stderr, "proto: no protobuf decoder for %s.%s\n", st, st.Field(fieldnum).Name) - continue - } - dec := p.dec - if wire != WireStartGroup && wire != p.WireType { - if wire == WireBytes && p.packedDec != nil { - // a packable field - dec = p.packedDec - } else { - err = fmt.Errorf("proto: bad wiretype for field %s.%s: got wiretype %d, want %d", st, st.Field(fieldnum).Name, wire, p.WireType) - continue - } - } - decErr := dec(o, p, base) - if decErr != nil && !state.shouldContinue(decErr, p) { - err = decErr - } - if err == nil && p.Required { - // Successfully decoded a required field. - if tag <= 64 { - // use bitmap for fields 1-64 to catch field reuse. - var mask uint64 = 1 << uint64(tag-1) - if reqFields&mask == 0 { - // new required field - reqFields |= mask - required-- - } - } else { - // This is imprecise. It can be fooled by a required field - // with a tag > 64 that is encoded twice; that's very rare. - // A fully correct implementation would require allocating - // a data structure, which we would like to avoid. - required-- - } - } - } - if err == nil { - if is_group { - return io.ErrUnexpectedEOF - } - if state.err != nil { - return state.err - } - if required > 0 { - // Not enough information to determine the exact field. If we use extra - // CPU, we could determine the field only if the missing required field - // has a tag <= 64 and we check reqFields. - return &RequiredNotSetError{"{Unknown}"} - } - } - return err -} - -// Individual type decoders -// For each, -// u is the decoded value, -// v is a pointer to the field (pointer) in the struct - -// Sizes of the pools to allocate inside the Buffer. -// The goal is modest amortization and allocation -// on at least 16-byte boundaries. -const ( - boolPoolSize = 16 - uint32PoolSize = 8 - uint64PoolSize = 4 -) - -// Decode a bool. -func (o *Buffer) dec_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - if len(o.bools) == 0 { - o.bools = make([]bool, boolPoolSize) - } - o.bools[0] = u != 0 - *structPointer_Bool(base, p.field) = &o.bools[0] - o.bools = o.bools[1:] - return nil -} - -func (o *Buffer) dec_proto3_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - *structPointer_BoolVal(base, p.field) = u != 0 - return nil -} - -// Decode an int32. -func (o *Buffer) dec_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32_Set(structPointer_Word32(base, p.field), o, uint32(u)) - return nil -} - -func (o *Buffer) dec_proto3_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32Val_Set(structPointer_Word32Val(base, p.field), uint32(u)) - return nil -} - -// Decode an int64. -func (o *Buffer) dec_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64_Set(structPointer_Word64(base, p.field), o, u) - return nil -} - -func (o *Buffer) dec_proto3_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64Val_Set(structPointer_Word64Val(base, p.field), o, u) - return nil -} - -// Decode a string. -func (o *Buffer) dec_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_String(base, p.field) = &s - return nil -} - -func (o *Buffer) dec_proto3_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_StringVal(base, p.field) = s - return nil -} - -// Decode a slice of bytes ([]byte). -func (o *Buffer) dec_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - *structPointer_Bytes(base, p.field) = b - return nil -} - -// Decode a slice of bools ([]bool). -func (o *Buffer) dec_slice_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - v := structPointer_BoolSlice(base, p.field) - *v = append(*v, u != 0) - return nil -} - -// Decode a slice of bools ([]bool) in packed format. -func (o *Buffer) dec_slice_packed_bool(p *Properties, base structPointer) error { - v := structPointer_BoolSlice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded bools - fin := o.index + nb - if fin < o.index { - return errOverflow - } - - y := *v - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - y = append(y, u != 0) - } - - *v = y - return nil -} - -// Decode a slice of int32s ([]int32). -func (o *Buffer) dec_slice_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - structPointer_Word32Slice(base, p.field).Append(uint32(u)) - return nil -} - -// Decode a slice of int32s ([]int32) in packed format. -func (o *Buffer) dec_slice_packed_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int32s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(uint32(u)) - } - return nil -} - -// Decode a slice of int64s ([]int64). -func (o *Buffer) dec_slice_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - - structPointer_Word64Slice(base, p.field).Append(u) - return nil -} - -// Decode a slice of int64s ([]int64) in packed format. -func (o *Buffer) dec_slice_packed_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int64s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(u) - } - return nil -} - -// Decode a slice of strings ([]string). -func (o *Buffer) dec_slice_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - v := structPointer_StringSlice(base, p.field) - *v = append(*v, s) - return nil -} - -// Decode a slice of slice of bytes ([][]byte). -func (o *Buffer) dec_slice_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - v := structPointer_BytesSlice(base, p.field) - *v = append(*v, b) - return nil -} - -// Decode a map field. -func (o *Buffer) dec_new_map(p *Properties, base structPointer) error { - raw, err := o.DecodeRawBytes(false) - if err != nil { - return err - } - oi := o.index // index at the end of this map entry - o.index -= len(raw) // move buffer back to start of map entry - - mptr := structPointer_NewAt(base, p.field, p.mtype) // *map[K]V - if mptr.Elem().IsNil() { - mptr.Elem().Set(reflect.MakeMap(mptr.Type().Elem())) - } - v := mptr.Elem() // map[K]V - - // Prepare addressable doubly-indirect placeholders for the key and value types. - // See enc_new_map for why. - keyptr := reflect.New(reflect.PtrTo(p.mtype.Key())).Elem() // addressable *K - keybase := toStructPointer(keyptr.Addr()) // **K - - var valbase structPointer - var valptr reflect.Value - switch p.mtype.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valptr = reflect.ValueOf(&dummy) // *[]byte - valbase = toStructPointer(valptr) // *[]byte - case reflect.Ptr: - // message; valptr is **Msg; need to allocate the intermediate pointer - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valptr.Set(reflect.New(valptr.Type().Elem())) - valbase = toStructPointer(valptr) - default: - // everything else - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valbase = toStructPointer(valptr.Addr()) // **V - } - - // Decode. - // This parses a restricted wire format, namely the encoding of a message - // with two fields. See enc_new_map for the format. - for o.index < oi { - // tagcode for key and value properties are always a single byte - // because they have tags 1 and 2. - tagcode := o.buf[o.index] - o.index++ - switch tagcode { - case p.mkeyprop.tagcode[0]: - if err := p.mkeyprop.dec(o, p.mkeyprop, keybase); err != nil { - return err - } - case p.mvalprop.tagcode[0]: - if err := p.mvalprop.dec(o, p.mvalprop, valbase); err != nil { - return err - } - default: - // TODO: Should we silently skip this instead? - return fmt.Errorf("proto: bad map data tag %d", raw[0]) - } - } - keyelem, valelem := keyptr.Elem(), valptr.Elem() - if !keyelem.IsValid() { - keyelem = reflect.Zero(p.mtype.Key()) - } - if !valelem.IsValid() { - valelem = reflect.Zero(p.mtype.Elem()) - } - - v.SetMapIndex(keyelem, valelem) - return nil -} - -// Decode a group. -func (o *Buffer) dec_struct_group(p *Properties, base structPointer) error { - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - return o.unmarshalType(p.stype, p.sprop, true, bas) -} - -// Decode an embedded message. -func (o *Buffer) dec_struct_message(p *Properties, base structPointer) (err error) { - raw, e := o.DecodeRawBytes(false) - if e != nil { - return e - } - - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := structPointer_Interface(bas, p.stype) - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, false, bas) - o.buf = obuf - o.index = oi - - return err -} - -// Decode a slice of embedded messages. -func (o *Buffer) dec_slice_struct_message(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, false, base) -} - -// Decode a slice of embedded groups. -func (o *Buffer) dec_slice_struct_group(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, true, base) -} - -// Decode a slice of structs ([]*struct). -func (o *Buffer) dec_slice_struct(p *Properties, is_group bool, base structPointer) error { - v := reflect.New(p.stype) - bas := toStructPointer(v) - structPointer_StructPointerSlice(base, p.field).Append(bas) - - if is_group { - err := o.unmarshalType(p.stype, p.sprop, is_group, bas) - return err - } - - raw, err := o.DecodeRawBytes(false) - if err != nil { - return err - } - - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := v.Interface() - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, is_group, bas) - - o.buf = obuf - o.index = oi - + // Slow workaround for messages that aren't Unmarshalers. + // This includes some hand-coded .pb.go files and + // bootstrap protos. + // TODO: fix all of those and then add Unmarshal to + // the Message interface. Then: + // The cast above and code below can be deleted. + // The old unmarshaler can be deleted. + // Clients can call Unmarshal directly (can already do that, actually). + var info InternalMessageInfo + err := info.Unmarshal(pb, p.buf[p.index:]) + p.index = len(p.buf) return err } diff --git a/vendor/github.com/golang/protobuf/proto/decode_test.go b/vendor/github.com/golang/protobuf/proto/decode_test.go index 2c4c31d1..949be3ab 100644 --- a/vendor/github.com/golang/protobuf/proto/decode_test.go +++ b/vendor/github.com/golang/protobuf/proto/decode_test.go @@ -41,10 +41,7 @@ import ( tpb "github.com/golang/protobuf/proto/proto3_proto" ) -var ( - bytesBlackhole []byte - msgBlackhole = new(tpb.Message) -) +var msgBlackhole = new(tpb.Message) // BenchmarkVarint32ArraySmall shows the performance on an array of small int32 fields (1 and // 2 bytes long). diff --git a/vendor/github.com/golang/protobuf/proto/discard.go b/vendor/github.com/golang/protobuf/proto/discard.go new file mode 100644 index 00000000..dea2617c --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/discard.go @@ -0,0 +1,350 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "fmt" + "reflect" + "strings" + "sync" + "sync/atomic" +) + +type generatedDiscarder interface { + XXX_DiscardUnknown() +} + +// DiscardUnknown recursively discards all unknown fields from this message +// and all embedded messages. +// +// When unmarshaling a message with unrecognized fields, the tags and values +// of such fields are preserved in the Message. This allows a later call to +// marshal to be able to produce a message that continues to have those +// unrecognized fields. To avoid this, DiscardUnknown is used to +// explicitly clear the unknown fields after unmarshaling. +// +// For proto2 messages, the unknown fields of message extensions are only +// discarded from messages that have been accessed via GetExtension. +func DiscardUnknown(m Message) { + if m, ok := m.(generatedDiscarder); ok { + m.XXX_DiscardUnknown() + return + } + // TODO: Dynamically populate a InternalMessageInfo for legacy messages, + // but the master branch has no implementation for InternalMessageInfo, + // so it would be more work to replicate that approach. + discardLegacy(m) +} + +// DiscardUnknown recursively discards all unknown fields. +func (a *InternalMessageInfo) DiscardUnknown(m Message) { + di := atomicLoadDiscardInfo(&a.discard) + if di == nil { + di = getDiscardInfo(reflect.TypeOf(m).Elem()) + atomicStoreDiscardInfo(&a.discard, di) + } + di.discard(toPointer(&m)) +} + +type discardInfo struct { + typ reflect.Type + + initialized int32 // 0: only typ is valid, 1: everything is valid + lock sync.Mutex + + fields []discardFieldInfo + unrecognized field +} + +type discardFieldInfo struct { + field field // Offset of field, guaranteed to be valid + discard func(src pointer) +} + +var ( + discardInfoMap = map[reflect.Type]*discardInfo{} + discardInfoLock sync.Mutex +) + +func getDiscardInfo(t reflect.Type) *discardInfo { + discardInfoLock.Lock() + defer discardInfoLock.Unlock() + di := discardInfoMap[t] + if di == nil { + di = &discardInfo{typ: t} + discardInfoMap[t] = di + } + return di +} + +func (di *discardInfo) discard(src pointer) { + if src.isNil() { + return // Nothing to do. + } + + if atomic.LoadInt32(&di.initialized) == 0 { + di.computeDiscardInfo() + } + + for _, fi := range di.fields { + sfp := src.offset(fi.field) + fi.discard(sfp) + } + + // For proto2 messages, only discard unknown fields in message extensions + // that have been accessed via GetExtension. + if em, err := extendable(src.asPointerTo(di.typ).Interface()); err == nil { + // Ignore lock since DiscardUnknown is not concurrency safe. + emm, _ := em.extensionsRead() + for _, mx := range emm { + if m, ok := mx.value.(Message); ok { + DiscardUnknown(m) + } + } + } + + if di.unrecognized.IsValid() { + *src.offset(di.unrecognized).toBytes() = nil + } +} + +func (di *discardInfo) computeDiscardInfo() { + di.lock.Lock() + defer di.lock.Unlock() + if di.initialized != 0 { + return + } + t := di.typ + n := t.NumField() + + for i := 0; i < n; i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + + dfi := discardFieldInfo{field: toField(&f)} + tf := f.Type + + // Unwrap tf to get its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic(fmt.Sprintf("%v.%s cannot be a slice of pointers to primitive types", t, f.Name)) + } + + switch tf.Kind() { + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("%v.%s cannot be a direct struct value", t, f.Name)) + case isSlice: // E.g., []*pb.T + di := getDiscardInfo(tf) + dfi.discard = func(src pointer) { + sps := src.getPointerSlice() + for _, sp := range sps { + if !sp.isNil() { + di.discard(sp) + } + } + } + default: // E.g., *pb.T + di := getDiscardInfo(tf) + dfi.discard = func(src pointer) { + sp := src.getPointer() + if !sp.isNil() { + di.discard(sp) + } + } + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%v.%s cannot be a pointer to a map or a slice of map values", t, f.Name)) + default: // E.g., map[K]V + if tf.Elem().Kind() == reflect.Ptr { // Proto struct (e.g., *T) + dfi.discard = func(src pointer) { + sm := src.asPointerTo(tf).Elem() + if sm.Len() == 0 { + return + } + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + DiscardUnknown(val.Interface().(Message)) + } + } + } else { + dfi.discard = func(pointer) {} // Noop + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%v.%s cannot be a pointer to a interface or a slice of interface values", t, f.Name)) + default: // E.g., interface{} + // TODO: Make this faster? + dfi.discard = func(src pointer) { + su := src.asPointerTo(tf).Elem() + if !su.IsNil() { + sv := su.Elem().Elem().Field(0) + if sv.Kind() == reflect.Ptr && sv.IsNil() { + return + } + switch sv.Type().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + DiscardUnknown(sv.Interface().(Message)) + } + } + } + } + default: + continue + } + di.fields = append(di.fields, dfi) + } + + di.unrecognized = invalidField + if f, ok := t.FieldByName("XXX_unrecognized"); ok { + if f.Type != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + di.unrecognized = toField(&f) + } + + atomic.StoreInt32(&di.initialized, 1) +} + +func discardLegacy(m Message) { + v := reflect.ValueOf(m) + if v.Kind() != reflect.Ptr || v.IsNil() { + return + } + v = v.Elem() + if v.Kind() != reflect.Struct { + return + } + t := v.Type() + + for i := 0; i < v.NumField(); i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + vf := v.Field(i) + tf := f.Type + + // Unwrap tf to get its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic(fmt.Sprintf("%T.%s cannot be a slice of pointers to primitive types", m, f.Name)) + } + + switch tf.Kind() { + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("%T.%s cannot be a direct struct value", m, f.Name)) + case isSlice: // E.g., []*pb.T + for j := 0; j < vf.Len(); j++ { + discardLegacy(vf.Index(j).Interface().(Message)) + } + default: // E.g., *pb.T + discardLegacy(vf.Interface().(Message)) + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%T.%s cannot be a pointer to a map or a slice of map values", m, f.Name)) + default: // E.g., map[K]V + tv := vf.Type().Elem() + if tv.Kind() == reflect.Ptr && tv.Implements(protoMessageType) { // Proto struct (e.g., *T) + for _, key := range vf.MapKeys() { + val := vf.MapIndex(key) + discardLegacy(val.Interface().(Message)) + } + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%T.%s cannot be a pointer to a interface or a slice of interface values", m, f.Name)) + default: // E.g., test_proto.isCommunique_Union interface + if !vf.IsNil() && f.Tag.Get("protobuf_oneof") != "" { + vf = vf.Elem() // E.g., *test_proto.Communique_Msg + if !vf.IsNil() { + vf = vf.Elem() // E.g., test_proto.Communique_Msg + vf = vf.Field(0) // E.g., Proto struct (e.g., *T) or primitive value + if vf.Kind() == reflect.Ptr { + discardLegacy(vf.Interface().(Message)) + } + } + } + } + } + } + + if vf := v.FieldByName("XXX_unrecognized"); vf.IsValid() { + if vf.Type() != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + vf.Set(reflect.ValueOf([]byte(nil))) + } + + // For proto2 messages, only discard unknown fields in message extensions + // that have been accessed via GetExtension. + if em, err := extendable(m); err == nil { + // Ignore lock since discardLegacy is not concurrency safe. + emm, _ := em.extensionsRead() + for _, mx := range emm { + if m, ok := mx.value.(Message); ok { + discardLegacy(m) + } + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/discard_test.go b/vendor/github.com/golang/protobuf/proto/discard_test.go new file mode 100644 index 00000000..a2ff5509 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/discard_test.go @@ -0,0 +1,170 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "testing" + + "github.com/golang/protobuf/proto" + + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + pb "github.com/golang/protobuf/proto/test_proto" +) + +func TestDiscardUnknown(t *testing.T) { + tests := []struct { + desc string + in, want proto.Message + }{{ + desc: "Nil", + in: nil, want: nil, // Should not panic + }, { + desc: "NilPtr", + in: (*proto3pb.Message)(nil), want: (*proto3pb.Message)(nil), // Should not panic + }, { + desc: "Nested", + in: &proto3pb.Message{ + Name: "Aaron", + Nested: &proto3pb.Nested{Cute: true, XXX_unrecognized: []byte("blah")}, + XXX_unrecognized: []byte("blah"), + }, + want: &proto3pb.Message{ + Name: "Aaron", + Nested: &proto3pb.Nested{Cute: true}, + }, + }, { + desc: "Slice", + in: &proto3pb.Message{ + Name: "Aaron", + Children: []*proto3pb.Message{ + {Name: "Sarah", XXX_unrecognized: []byte("blah")}, + {Name: "Abraham", XXX_unrecognized: []byte("blah")}, + }, + XXX_unrecognized: []byte("blah"), + }, + want: &proto3pb.Message{ + Name: "Aaron", + Children: []*proto3pb.Message{ + {Name: "Sarah"}, + {Name: "Abraham"}, + }, + }, + }, { + desc: "OneOf", + in: &pb.Communique{ + Union: &pb.Communique_Msg{&pb.Strings{ + StringField: proto.String("123"), + XXX_unrecognized: []byte("blah"), + }}, + XXX_unrecognized: []byte("blah"), + }, + want: &pb.Communique{ + Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123")}}, + }, + }, { + desc: "Map", + in: &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4002: &pb.FloatingPoint{ + Exact: proto.Bool(true), + XXX_unrecognized: []byte("blah"), + }, + }}, + want: &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4002: &pb.FloatingPoint{Exact: proto.Bool(true)}, + }}, + }, { + desc: "Extension", + in: func() proto.Message { + m := &pb.MyMessage{ + Count: proto.Int32(42), + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(6), + XXX_unrecognized: []byte("blah"), + }, + XXX_unrecognized: []byte("blah"), + } + proto.SetExtension(m, pb.E_Ext_More, &pb.Ext{ + Data: proto.String("extension"), + XXX_unrecognized: []byte("blah"), + }) + return m + }(), + want: func() proto.Message { + m := &pb.MyMessage{ + Count: proto.Int32(42), + Somegroup: &pb.MyMessage_SomeGroup{GroupField: proto.Int32(6)}, + } + proto.SetExtension(m, pb.E_Ext_More, &pb.Ext{Data: proto.String("extension")}) + return m + }(), + }} + + // Test the legacy code path. + for _, tt := range tests { + // Clone the input so that we don't alter the original. + in := tt.in + if in != nil { + in = proto.Clone(tt.in) + } + + var m LegacyMessage + m.Message, _ = in.(*proto3pb.Message) + m.Communique, _ = in.(*pb.Communique) + m.MessageWithMap, _ = in.(*pb.MessageWithMap) + m.MyMessage, _ = in.(*pb.MyMessage) + proto.DiscardUnknown(&m) + if !proto.Equal(in, tt.want) { + t.Errorf("test %s/Legacy, expected unknown fields to be discarded\ngot %v\nwant %v", tt.desc, in, tt.want) + } + } + + for _, tt := range tests { + proto.DiscardUnknown(tt.in) + if !proto.Equal(tt.in, tt.want) { + t.Errorf("test %s, expected unknown fields to be discarded\ngot %v\nwant %v", tt.desc, tt.in, tt.want) + } + } +} + +// LegacyMessage is a proto.Message that has several nested messages. +// This does not have the XXX_DiscardUnknown method and so forces DiscardUnknown +// to use the legacy fallback logic. +type LegacyMessage struct { + Message *proto3pb.Message + Communique *pb.Communique + MessageWithMap *pb.MessageWithMap + MyMessage *pb.MyMessage +} + +func (m *LegacyMessage) Reset() { *m = LegacyMessage{} } +func (m *LegacyMessage) String() string { return proto.CompactTextString(m) } +func (*LegacyMessage) ProtoMessage() {} diff --git a/vendor/github.com/golang/protobuf/proto/encode.go b/vendor/github.com/golang/protobuf/proto/encode.go index 2b30f846..c27d35f8 100644 --- a/vendor/github.com/golang/protobuf/proto/encode.go +++ b/vendor/github.com/golang/protobuf/proto/encode.go @@ -39,7 +39,6 @@ import ( "errors" "fmt" "reflect" - "sort" ) // RequiredNotSetError is the error returned if Marshal is called with @@ -82,10 +81,6 @@ var ( const maxVarintBytes = 10 // maximum length of a varint -// maxMarshalSize is the largest allowed size of an encoded protobuf, -// since C++ and Java use signed int32s for the size. -const maxMarshalSize = 1<<31 - 1 - // EncodeVarint returns the varint encoding of x. // This is the format for the // int32, int64, uint32, uint64, bool, and enum @@ -119,18 +114,27 @@ func (p *Buffer) EncodeVarint(x uint64) error { // SizeVarint returns the varint encoding size of an integer. func SizeVarint(x uint64) int { - return sizeVarint(x) -} - -func sizeVarint(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } + switch { + case x < 1<<7: + return 1 + case x < 1<<14: + return 2 + case x < 1<<21: + return 3 + case x < 1<<28: + return 4 + case x < 1<<35: + return 5 + case x < 1<<42: + return 6 + case x < 1<<49: + return 7 + case x < 1<<56: + return 8 + case x < 1<<63: + return 9 } - return n + return 10 } // EncodeFixed64 writes a 64-bit integer to the Buffer. @@ -149,10 +153,6 @@ func (p *Buffer) EncodeFixed64(x uint64) error { return nil } -func sizeFixed64(x uint64) int { - return 8 -} - // EncodeFixed32 writes a 32-bit integer to the Buffer. // This is the format for the // fixed32, sfixed32, and float protocol buffer types. @@ -165,10 +165,6 @@ func (p *Buffer) EncodeFixed32(x uint64) error { return nil } -func sizeFixed32(x uint64) int { - return 4 -} - // EncodeZigzag64 writes a zigzag-encoded 64-bit integer // to the Buffer. // This is the format used for the sint64 protocol buffer type. @@ -177,10 +173,6 @@ func (p *Buffer) EncodeZigzag64(x uint64) error { return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func sizeZigzag64(x uint64) int { - return sizeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} - // EncodeZigzag32 writes a zigzag-encoded 32-bit integer // to the Buffer. // This is the format used for the sint32 protocol buffer type. @@ -189,10 +181,6 @@ func (p *Buffer) EncodeZigzag32(x uint64) error { return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) } -func sizeZigzag32(x uint64) int { - return sizeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - // EncodeRawBytes writes a count-delimited byte buffer to the Buffer. // This is the format used for the bytes protocol buffer // type and for embedded messages. @@ -202,11 +190,6 @@ func (p *Buffer) EncodeRawBytes(b []byte) error { return nil } -func sizeRawBytes(b []byte) int { - return sizeVarint(uint64(len(b))) + - len(b) -} - // EncodeStringBytes writes an encoded string to the Buffer. // This is the format used for the proto2 string type. func (p *Buffer) EncodeStringBytes(s string) error { @@ -215,319 +198,17 @@ func (p *Buffer) EncodeStringBytes(s string) error { return nil } -func sizeStringBytes(s string) int { - return sizeVarint(uint64(len(s))) + - len(s) -} - // Marshaler is the interface representing objects that can marshal themselves. type Marshaler interface { Marshal() ([]byte, error) } -// Marshal takes the protocol buffer -// and encodes it into the wire format, returning the data. -func Marshal(pb Message) ([]byte, error) { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - return m.Marshal() - } - p := NewBuffer(nil) - err := p.Marshal(pb) - if p.buf == nil && err == nil { - // Return a non-nil slice on success. - return []byte{}, nil - } - return p.buf, err -} - // EncodeMessage writes the protocol buffer to the Buffer, // prefixed by a varint-encoded length. func (p *Buffer) EncodeMessage(pb Message) error { - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - var state errorState - err = p.enc_len_struct(GetProperties(t.Elem()), base, &state) - } - return err -} - -// Marshal takes the protocol buffer -// and encodes it into the wire format, writing the result to the -// Buffer. -func (p *Buffer) Marshal(pb Message) error { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - data, err := m.Marshal() - p.buf = append(p.buf, data...) - return err - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - err = p.enc_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Encode++ // Parens are to work around a goimports bug. - } - - if len(p.buf) > maxMarshalSize { - return ErrTooLarge - } - return err -} - -// Size returns the encoded size of a protocol buffer. -func Size(pb Message) (n int) { - // Can the object marshal itself? If so, Size is slow. - // TODO: add Size to Marshaler, or add a Sizer interface. - if m, ok := pb.(Marshaler); ok { - b, _ := m.Marshal() - return len(b) - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return 0 - } - if err == nil { - n = size_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Size++ // Parens are to work around a goimports bug. - } - - return -} - -// Individual type encoders. - -// Encode a bool. -func (o *Buffer) enc_bool(p *Properties, base structPointer) error { - v := *structPointer_Bool(base, p.field) - if v == nil { - return ErrNil - } - x := 0 - if *v { - x = 1 - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_bool(p *Properties, base structPointer) error { - v := *structPointer_BoolVal(base, p.field) - if !v { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, 1) - return nil -} - -func size_bool(p *Properties, base structPointer) int { - v := *structPointer_Bool(base, p.field) - if v == nil { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -func size_proto3_bool(p *Properties, base structPointer) int { - v := *structPointer_BoolVal(base, p.field) - if !v && !p.oneof { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -// Encode an int32. -func (o *Buffer) enc_int32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode a uint32. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := word32_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := word32_Get(v) - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode an int64. -func (o *Buffer) enc_int64(p *Properties, base structPointer) error { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return ErrNil - } - x := word64_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func (o *Buffer) enc_proto3_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func size_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return 0 - } - x := word64_Get(v) - n += len(p.tagcode) - n += p.valSize(x) - return -} - -func size_proto3_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(x) - return -} - -// Encode a string. -func (o *Buffer) enc_string(p *Properties, base structPointer) error { - v := *structPointer_String(base, p.field) - if v == nil { - return ErrNil - } - x := *v - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(x) - return nil -} - -func (o *Buffer) enc_proto3_string(p *Properties, base structPointer) error { - v := *structPointer_StringVal(base, p.field) - if v == "" { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(v) - return nil -} - -func size_string(p *Properties, base structPointer) (n int) { - v := *structPointer_String(base, p.field) - if v == nil { - return 0 - } - x := *v - n += len(p.tagcode) - n += sizeStringBytes(x) - return -} - -func size_proto3_string(p *Properties, base structPointer) (n int) { - v := *structPointer_StringVal(base, p.field) - if v == "" && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeStringBytes(v) - return + siz := Size(pb) + p.EncodeVarint(uint64(siz)) + return p.Marshal(pb) } // All protocol buffer fields are nillable, but be careful. @@ -538,825 +219,3 @@ func isNil(v reflect.Value) bool { } return false } - -// Encode a message struct. -func (o *Buffer) enc_struct_message(p *Properties, base structPointer) error { - var state errorState - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return ErrNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - return state.err - } - - o.buf = append(o.buf, p.tagcode...) - return o.enc_len_struct(p.sprop, structp, &state) -} - -func size_struct_message(p *Properties, base structPointer) int { - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return 0 - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n0 := len(p.tagcode) - n1 := sizeRawBytes(data) - return n0 + n1 - } - - n0 := len(p.tagcode) - n1 := size_struct(p.sprop, structp) - n2 := sizeVarint(uint64(n1)) // size of encoded length - return n0 + n1 + n2 -} - -// Encode a group struct. -func (o *Buffer) enc_struct_group(p *Properties, base structPointer) error { - var state errorState - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return ErrNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - err := o.enc_struct(p.sprop, b) - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return state.err -} - -func size_struct_group(p *Properties, base structPointer) (n int) { - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return 0 - } - - n += sizeVarint(uint64((p.Tag << 3) | WireStartGroup)) - n += size_struct(p.sprop, b) - n += sizeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return -} - -// Encode a slice of bools ([]bool). -func (o *Buffer) enc_slice_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - for _, x := range s { - o.buf = append(o.buf, p.tagcode...) - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_bool(p *Properties, base structPointer) int { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - return l * (len(p.tagcode) + 1) // each bool takes exactly one byte -} - -// Encode a slice of bools ([]bool) in packed format. -func (o *Buffer) enc_slice_packed_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(l)) // each bool takes exactly one byte - for _, x := range s { - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_packed_bool(p *Properties, base structPointer) (n int) { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - n += len(p.tagcode) - n += sizeVarint(uint64(l)) - n += l // each bool takes exactly one byte - return -} - -// Encode a slice of bytes ([]byte). -func (o *Buffer) enc_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if s == nil { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func (o *Buffer) enc_proto3_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func size_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if s == nil && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -func size_proto3_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -// Encode a slice of int32s ([]int32). -func (o *Buffer) enc_slice_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of int32s ([]int32) in packed format. -func (o *Buffer) enc_slice_packed_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(buf, uint64(x)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - bufSize += p.valSize(uint64(x)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of uint32s ([]uint32). -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := s.Index(i) - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := s.Index(i) - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of uint32s ([]uint32) in packed format. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_packed_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, uint64(s.Index(i))) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(uint64(s.Index(i))) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of int64s ([]int64). -func (o *Buffer) enc_slice_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, s.Index(i)) - } - return nil -} - -func size_slice_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - n += p.valSize(s.Index(i)) - } - return -} - -// Encode a slice of int64s ([]int64) in packed format. -func (o *Buffer) enc_slice_packed_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, s.Index(i)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(s.Index(i)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of slice of bytes ([][]byte). -func (o *Buffer) enc_slice_slice_byte(p *Properties, base structPointer) error { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(ss[i]) - } - return nil -} - -func size_slice_slice_byte(p *Properties, base structPointer) (n int) { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return 0 - } - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeRawBytes(ss[i]) - } - return -} - -// Encode a slice of strings ([]string). -func (o *Buffer) enc_slice_string(p *Properties, base structPointer) error { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(ss[i]) - } - return nil -} - -func size_slice_string(p *Properties, base structPointer) (n int) { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeStringBytes(ss[i]) - } - return -} - -// Encode a slice of message structs ([]*struct). -func (o *Buffer) enc_slice_struct_message(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return errRepeatedHasNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - continue - } - - o.buf = append(o.buf, p.tagcode...) - err := o.enc_len_struct(p.sprop, structp, &state) - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - } - return state.err -} - -func size_slice_struct_message(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return // return the size up to this point - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n += sizeRawBytes(data) - continue - } - - n0 := size_struct(p.sprop, structp) - n1 := sizeVarint(uint64(n0)) // size of encoded length - n += n0 + n1 - } - return -} - -// Encode a slice of group structs ([]*struct). -func (o *Buffer) enc_slice_struct_group(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return errRepeatedHasNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - - err := o.enc_struct(p.sprop, b) - - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - } - return state.err -} - -func size_slice_struct_group(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - n += l * sizeVarint(uint64((p.Tag<<3)|WireStartGroup)) - n += l * sizeVarint(uint64((p.Tag<<3)|WireEndGroup)) - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return // return size up to this point - } - - n += size_struct(p.sprop, b) - } - return -} - -// Encode an extension map. -func (o *Buffer) enc_map(p *Properties, base structPointer) error { - exts := structPointer_ExtMap(base, p.field) - if err := encodeExtensionsMap(*exts); err != nil { - return err - } - - return o.enc_map_body(*exts) -} - -func (o *Buffer) enc_exts(p *Properties, base structPointer) error { - exts := structPointer_Extensions(base, p.field) - - v, mu := exts.extensionsRead() - if v == nil { - return nil - } - - mu.Lock() - defer mu.Unlock() - if err := encodeExtensionsMap(v); err != nil { - return err - } - - return o.enc_map_body(v) -} - -func (o *Buffer) enc_map_body(v map[int32]Extension) error { - // Fast-path for common cases: zero or one extensions. - if len(v) <= 1 { - for _, e := range v { - o.buf = append(o.buf, e.enc...) - } - return nil - } - - // Sort keys to provide a deterministic encoding. - keys := make([]int, 0, len(v)) - for k := range v { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, k := range keys { - o.buf = append(o.buf, v[int32(k)].enc...) - } - return nil -} - -func size_map(p *Properties, base structPointer) int { - v := structPointer_ExtMap(base, p.field) - return extensionsMapSize(*v) -} - -func size_exts(p *Properties, base structPointer) int { - v := structPointer_Extensions(base, p.field) - return extensionsSize(v) -} - -// Encode a map field. -func (o *Buffer) enc_new_map(p *Properties, base structPointer) error { - var state errorState // XXX: or do we need to plumb this through? - - /* - A map defined as - map map_field = N; - is encoded in the same way as - message MapFieldEntry { - key_type key = 1; - value_type value = 2; - } - repeated MapFieldEntry map_field = N; - */ - - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - if v.Len() == 0 { - return nil - } - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - enc := func() error { - if err := p.mkeyprop.enc(o, p.mkeyprop, keybase); err != nil { - return err - } - if err := p.mvalprop.enc(o, p.mvalprop, valbase); err != nil && err != ErrNil { - return err - } - return nil - } - - // Don't sort map keys. It is not required by the spec, and C++ doesn't do it. - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - - keycopy.Set(key) - valcopy.Set(val) - - o.buf = append(o.buf, p.tagcode...) - if err := o.enc_len_thing(enc, &state); err != nil { - return err - } - } - return nil -} - -func size_new_map(p *Properties, base structPointer) int { - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - n := 0 - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - keycopy.Set(key) - valcopy.Set(val) - - // Tag codes for key and val are the responsibility of the sub-sizer. - keysize := p.mkeyprop.size(p.mkeyprop, keybase) - valsize := p.mvalprop.size(p.mvalprop, valbase) - entry := keysize + valsize - // Add on tag code and length of map entry itself. - n += len(p.tagcode) + sizeVarint(uint64(entry)) + entry - } - return n -} - -// mapEncodeScratch returns a new reflect.Value matching the map's value type, -// and a structPointer suitable for passing to an encoder or sizer. -func mapEncodeScratch(mapType reflect.Type) (keycopy, valcopy reflect.Value, keybase, valbase structPointer) { - // Prepare addressable doubly-indirect placeholders for the key and value types. - // This is needed because the element-type encoders expect **T, but the map iteration produces T. - - keycopy = reflect.New(mapType.Key()).Elem() // addressable K - keyptr := reflect.New(reflect.PtrTo(keycopy.Type())).Elem() // addressable *K - keyptr.Set(keycopy.Addr()) // - keybase = toStructPointer(keyptr.Addr()) // **K - - // Value types are more varied and require special handling. - switch mapType.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valcopy = reflect.ValueOf(&dummy).Elem() // addressable []byte - valbase = toStructPointer(valcopy.Addr()) - case reflect.Ptr: - // message; the generated field type is map[K]*Msg (so V is *Msg), - // so we only need one level of indirection. - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valbase = toStructPointer(valcopy.Addr()) - default: - // everything else - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valptr := reflect.New(reflect.PtrTo(valcopy.Type())).Elem() // addressable *V - valptr.Set(valcopy.Addr()) // - valbase = toStructPointer(valptr.Addr()) // **V - } - return -} - -// Encode a struct. -func (o *Buffer) enc_struct(prop *StructProperties, base structPointer) error { - var state errorState - // Encode fields in tag order so that decoders may use optimizations - // that depend on the ordering. - // https://developers.google.com/protocol-buffers/docs/encoding#order - for _, i := range prop.order { - p := prop.Prop[i] - if p.enc != nil { - err := p.enc(o, p, base) - if err != nil { - if err == ErrNil { - if p.Required && state.err == nil { - state.err = &RequiredNotSetError{p.Name} - } - } else if err == errRepeatedHasNil { - // Give more context to nil values in repeated fields. - return errors.New("repeated field " + p.OrigName + " has nil element") - } else if !state.shouldContinue(err, p) { - return err - } - } - if len(o.buf) > maxMarshalSize { - return ErrTooLarge - } - } - } - - // Do oneof fields. - if prop.oneofMarshaler != nil { - m := structPointer_Interface(base, prop.stype).(Message) - if err := prop.oneofMarshaler(m, o); err == ErrNil { - return errOneofHasNil - } else if err != nil { - return err - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - if len(o.buf)+len(v) > maxMarshalSize { - return ErrTooLarge - } - if len(v) > 0 { - o.buf = append(o.buf, v...) - } - } - - return state.err -} - -func size_struct(prop *StructProperties, base structPointer) (n int) { - for _, i := range prop.order { - p := prop.Prop[i] - if p.size != nil { - n += p.size(p, base) - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - n += len(v) - } - - // Factor in any oneof fields. - if prop.oneofSizer != nil { - m := structPointer_Interface(base, prop.stype).(Message) - n += prop.oneofSizer(m) - } - - return -} - -var zeroes [20]byte // longer than any conceivable sizeVarint - -// Encode a struct, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_struct(prop *StructProperties, base structPointer, state *errorState) error { - return o.enc_len_thing(func() error { return o.enc_struct(prop, base) }, state) -} - -// Encode something, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_thing(enc func() error, state *errorState) error { - iLen := len(o.buf) - o.buf = append(o.buf, 0, 0, 0, 0) // reserve four bytes for length - iMsg := len(o.buf) - err := enc() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - lMsg := len(o.buf) - iMsg - lLen := sizeVarint(uint64(lMsg)) - switch x := lLen - (iMsg - iLen); { - case x > 0: // actual length is x bytes larger than the space we reserved - // Move msg x bytes right. - o.buf = append(o.buf, zeroes[:x]...) - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - case x < 0: // actual length is x bytes smaller than the space we reserved - // Move msg x bytes left. - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - o.buf = o.buf[:len(o.buf)+x] // x is negative - } - // Encode the length in the reserved space. - o.buf = o.buf[:iLen] - o.EncodeVarint(uint64(lMsg)) - o.buf = o.buf[:len(o.buf)+lMsg] - return state.err -} - -// errorState maintains the first error that occurs and updates that error -// with additional context. -type errorState struct { - err error -} - -// shouldContinue reports whether encoding should continue upon encountering the -// given error. If the error is RequiredNotSetError, shouldContinue returns true -// and, if this is the first appearance of that error, remembers it for future -// reporting. -// -// If prop is not nil, it may update any error with additional context about the -// field with the error. -func (s *errorState) shouldContinue(err error, prop *Properties) bool { - // Ignore unset required fields. - reqNotSet, ok := err.(*RequiredNotSetError) - if !ok { - return false - } - if s.err == nil { - if prop != nil { - err = &RequiredNotSetError{prop.Name + "." + reqNotSet.field} - } - s.err = err - } - return true -} diff --git a/vendor/github.com/golang/protobuf/proto/equal.go b/vendor/github.com/golang/protobuf/proto/equal.go index 2ed1cf59..d4db5a1c 100644 --- a/vendor/github.com/golang/protobuf/proto/equal.go +++ b/vendor/github.com/golang/protobuf/proto/equal.go @@ -109,15 +109,6 @@ func equalStruct(v1, v2 reflect.Value) bool { // set/unset mismatch return false } - b1, ok := f1.Interface().(raw) - if ok { - b2 := f2.Interface().(raw) - // RawMessage - if !bytes.Equal(b1.Bytes(), b2.Bytes()) { - return false - } - continue - } f1, f2 = f1.Elem(), f2.Elem() } if !equalAny(f1, f2, sprop.Prop[i]) { @@ -146,11 +137,7 @@ func equalStruct(v1, v2 reflect.Value) bool { u1 := uf.Bytes() u2 := v2.FieldByName("XXX_unrecognized").Bytes() - if !bytes.Equal(u1, u2) { - return false - } - - return true + return bytes.Equal(u1, u2) } // v1 and v2 are known to have the same type. @@ -261,6 +248,15 @@ func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { m1, m2 := e1.value, e2.value + if m1 == nil && m2 == nil { + // Both have only encoded form. + if bytes.Equal(e1.enc, e2.enc) { + continue + } + // The bytes are different, but the extensions might still be + // equal. We need to decode them to compare. + } + if m1 != nil && m2 != nil { // Both are unencoded. if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { @@ -276,8 +272,12 @@ func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { desc = m[extNum] } if desc == nil { + // If both have only encoded form and the bytes are the same, + // it is handled above. We get here when the bytes are different. + // We don't know how to decode it, so just compare them as byte + // slices. log.Printf("proto: don't know how to compare extension %d of %v", extNum, base) - continue + return false } var err error if m1 == nil { diff --git a/vendor/github.com/golang/protobuf/proto/equal_test.go b/vendor/github.com/golang/protobuf/proto/equal_test.go index a2febb39..93ff88f3 100644 --- a/vendor/github.com/golang/protobuf/proto/equal_test.go +++ b/vendor/github.com/golang/protobuf/proto/equal_test.go @@ -36,7 +36,7 @@ import ( . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) // Four identical base messages. @@ -45,6 +45,9 @@ var messageWithoutExtension = &pb.MyMessage{Count: Int32(7)} var messageWithExtension1a = &pb.MyMessage{Count: Int32(7)} var messageWithExtension1b = &pb.MyMessage{Count: Int32(7)} var messageWithExtension2 = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3a = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3b = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3c = &pb.MyMessage{Count: Int32(7)} // Two messages with non-message extensions. var messageWithInt32Extension1 = &pb.MyMessage{Count: Int32(8)} @@ -83,6 +86,20 @@ func init() { if err := SetExtension(messageWithInt32Extension1, pb.E_Ext_Number, Int32(24)); err != nil { panic("SetExtension on Int32-2 failed: " + err.Error()) } + + // messageWithExtension3{a,b,c} has unregistered extension. + if RegisteredExtensions(messageWithExtension3a)[200] != nil { + panic("expect extension 200 unregistered") + } + bytes := []byte{ + 0xc0, 0x0c, 0x01, // id=200, wiretype=0 (varint), data=1 + } + bytes2 := []byte{ + 0xc0, 0x0c, 0x02, // id=200, wiretype=0 (varint), data=2 + } + SetRawExtension(messageWithExtension3a, 200, bytes) + SetRawExtension(messageWithExtension3b, 200, bytes) + SetRawExtension(messageWithExtension3c, 200, bytes2) } var EqualTests = []struct { @@ -142,6 +159,9 @@ var EqualTests = []struct { {"int32 extension vs. itself", messageWithInt32Extension1, messageWithInt32Extension1, true}, {"int32 extension vs. a different int32", messageWithInt32Extension1, messageWithInt32Extension2, false}, + {"unregistered extension same", messageWithExtension3a, messageWithExtension3b, true}, + {"unregistered extension different", messageWithExtension3a, messageWithExtension3c, false}, + { "message with group", &pb.MyMessage{ diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go index eaad2183..816a3b9d 100644 --- a/vendor/github.com/golang/protobuf/proto/extensions.go +++ b/vendor/github.com/golang/protobuf/proto/extensions.go @@ -38,6 +38,7 @@ package proto import ( "errors" "fmt" + "io" "reflect" "strconv" "sync" @@ -91,14 +92,29 @@ func (n notLocker) Unlock() {} // extendable returns the extendableProto interface for the given generated proto message. // If the proto message has the old extension format, it returns a wrapper that implements // the extendableProto interface. -func extendable(p interface{}) (extendableProto, bool) { - if ep, ok := p.(extendableProto); ok { - return ep, ok +func extendable(p interface{}) (extendableProto, error) { + switch p := p.(type) { + case extendableProto: + if isNilPtr(p) { + return nil, fmt.Errorf("proto: nil %T is not extendable", p) + } + return p, nil + case extendableProtoV1: + if isNilPtr(p) { + return nil, fmt.Errorf("proto: nil %T is not extendable", p) + } + return extensionAdapter{p}, nil } - if ep, ok := p.(extendableProtoV1); ok { - return extensionAdapter{ep}, ok - } - return nil, false + // Don't allocate a specific error containing %T: + // this is the hot path for Clone and MarshalText. + return nil, errNotExtendable +} + +var errNotExtendable = errors.New("proto: not an extendable proto.Message") + +func isNilPtr(x interface{}) bool { + v := reflect.ValueOf(x) + return v.Kind() == reflect.Ptr && v.IsNil() } // XXX_InternalExtensions is an internal representation of proto extensions. @@ -143,9 +159,6 @@ func (e *XXX_InternalExtensions) extensionsRead() (map[int32]Extension, sync.Loc return e.p.extensionMap, &e.p.mu } -var extendableProtoType = reflect.TypeOf((*extendableProto)(nil)).Elem() -var extendableProtoV1Type = reflect.TypeOf((*extendableProtoV1)(nil)).Elem() - // ExtensionDesc represents an extension specification. // Used in generated code from the protocol compiler. type ExtensionDesc struct { @@ -179,8 +192,8 @@ type Extension struct { // SetRawExtension is for testing only. func SetRawExtension(base Message, id int32, b []byte) { - epb, ok := extendable(base) - if !ok { + epb, err := extendable(base) + if err != nil { return } extmap := epb.extensionsWrite() @@ -205,7 +218,7 @@ func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error { pbi = ea.extendableProtoV1 } if a, b := reflect.TypeOf(pbi), reflect.TypeOf(extension.ExtendedType); a != b { - return errors.New("proto: bad extended type; " + b.String() + " does not extend " + a.String()) + return fmt.Errorf("proto: bad extended type; %v does not extend %v", b, a) } // Check the range. if !isExtensionField(pb, extension.Field) { @@ -250,85 +263,11 @@ func extensionProperties(ed *ExtensionDesc) *Properties { return prop } -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensions(e *XXX_InternalExtensions) error { - m, mu := e.extensionsRead() - if m == nil { - return nil // fast path - } - mu.Lock() - defer mu.Unlock() - return encodeExtensionsMap(m) -} - -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensionsMap(m map[int32]Extension) error { - for k, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - p := NewBuffer(nil) - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - if err := props.enc(p, props, toStructPointer(x)); err != nil { - return err - } - e.enc = p.buf - m[k] = e - } - return nil -} - -func extensionsSize(e *XXX_InternalExtensions) (n int) { - m, mu := e.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - defer mu.Unlock() - return extensionsMapSize(m) -} - -func extensionsMapSize(m map[int32]Extension) (n int) { - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - n += props.size(props, toStructPointer(x)) - } - return -} - // HasExtension returns whether the given extension is present in pb. func HasExtension(pb Message, extension *ExtensionDesc) bool { // TODO: Check types, field numbers, etc.? - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return false } extmap, mu := epb.extensionsRead() @@ -336,15 +275,15 @@ func HasExtension(pb Message, extension *ExtensionDesc) bool { return false } mu.Lock() - _, ok = extmap[extension.Field] + _, ok := extmap[extension.Field] mu.Unlock() return ok } // ClearExtension removes the given extension from pb. func ClearExtension(pb Message, extension *ExtensionDesc) { - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return } // TODO: Check types, field numbers, etc.? @@ -352,16 +291,26 @@ func ClearExtension(pb Message, extension *ExtensionDesc) { delete(extmap, extension.Field) } -// GetExtension parses and returns the given extension of pb. -// If the extension is not present and has no default value it returns ErrMissingExtension. +// GetExtension retrieves a proto2 extended field from pb. +// +// If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil), +// then GetExtension parses the encoded field and returns a Go value of the specified type. +// If the field is not present, then the default value is returned (if one is specified), +// otherwise ErrMissingExtension is reported. +// +// If the descriptor is not type complete (i.e., ExtensionDesc.ExtensionType is nil), +// then GetExtension returns the raw encoded bytes of the field extension. func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return nil, err } - if err := checkExtensionTypes(epb, extension); err != nil { - return nil, err + if extension.ExtendedType != nil { + // can only check type if this is a complete descriptor + if err := checkExtensionTypes(epb, extension); err != nil { + return nil, err + } } emap, mu := epb.extensionsRead() @@ -388,6 +337,11 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { return e.value, nil } + if extension.ExtensionType == nil { + // incomplete descriptor + return e.enc, nil + } + v, err := decodeExtension(e.enc, extension) if err != nil { return nil, err @@ -405,6 +359,11 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { // defaultExtensionValue returns the default value for extension. // If no default for an extension is defined ErrMissingExtension is returned. func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { + if extension.ExtensionType == nil { + // incomplete descriptor, so no default + return nil, ErrMissingExtension + } + t := reflect.TypeOf(extension.ExtensionType) props := extensionProperties(extension) @@ -439,31 +398,28 @@ func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { // decodeExtension decodes an extension encoded in b. func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { - o := NewBuffer(b) - t := reflect.TypeOf(extension.ExtensionType) - - props := extensionProperties(extension) + unmarshal := typeUnmarshaler(t, extension.Tag) // t is a pointer to a struct, pointer to basic type or a slice. - // Allocate a "field" to store the pointer/slice itself; the - // pointer/slice will be stored here. We pass - // the address of this field to props.dec. - // This passes a zero field and a *t and lets props.dec - // interpret it as a *struct{ x t }. + // Allocate space to store the pointer/slice. value := reflect.New(t).Elem() + var err error for { - // Discard wire type and field number varint. It isn't needed. - if _, err := o.DecodeVarint(); err != nil { + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + wire := int(x) & 7 + + b, err = unmarshal(b, valToPointer(value.Addr()), wire) + if err != nil { return nil, err } - if err := props.dec(o, props, toStructPointer(value.Addr())); err != nil { - return nil, err - } - - if o.index >= len(o.buf) { + if len(b) == 0 { break } } @@ -473,9 +429,9 @@ func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { // GetExtensions returns a slice of the extensions present in pb that are also listed in es. // The returned slice has the same length as es; missing extensions will appear as nil elements. func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return nil, err } extensions = make([]interface{}, len(es)) for i, e := range es { @@ -494,9 +450,9 @@ func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, e // For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing // just the Field field, which defines the extension's field number. func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { - epb, ok := extendable(pb) - if !ok { - return nil, fmt.Errorf("proto: %T is not an extendable proto.Message", pb) + epb, err := extendable(pb) + if err != nil { + return nil, err } registeredExtensions := RegisteredExtensions(pb) @@ -523,9 +479,9 @@ func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { // SetExtension sets the specified extension of pb to the specified value. func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { - epb, ok := extendable(pb) - if !ok { - return errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return err } if err := checkExtensionTypes(epb, extension); err != nil { return err @@ -550,8 +506,8 @@ func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error // ClearAllExtensions clears all extensions from pb. func ClearAllExtensions(pb Message) { - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return } m := epb.extensionsWrite() diff --git a/vendor/github.com/golang/protobuf/proto/extensions_test.go b/vendor/github.com/golang/protobuf/proto/extensions_test.go index b6d9114c..dc69fe97 100644 --- a/vendor/github.com/golang/protobuf/proto/extensions_test.go +++ b/vendor/github.com/golang/protobuf/proto/extensions_test.go @@ -34,12 +34,14 @@ package proto_test import ( "bytes" "fmt" + "io" "reflect" "sort" + "strings" "testing" "github.com/golang/protobuf/proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" "golang.org/x/sync/errgroup" ) @@ -64,7 +66,107 @@ func TestGetExtensionsWithMissingExtensions(t *testing.T) { } } -func TestExtensionDescsWithMissingExtensions(t *testing.T) { +func TestGetExtensionWithEmptyBuffer(t *testing.T) { + // Make sure that GetExtension returns an error if its + // undecoded buffer is empty. + msg := &pb.MyMessage{} + proto.SetRawExtension(msg, pb.E_Ext_More.Field, []byte{}) + _, err := proto.GetExtension(msg, pb.E_Ext_More) + if want := io.ErrUnexpectedEOF; err != want { + t.Errorf("unexpected error in GetExtension from empty buffer: got %v, want %v", err, want) + } +} + +func TestGetExtensionForIncompleteDesc(t *testing.T) { + msg := &pb.MyMessage{Count: proto.Int32(0)} + extdesc1 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 123456789, + Name: "a.b", + Tag: "varint,123456789,opt", + } + ext1 := proto.Bool(true) + if err := proto.SetExtension(msg, extdesc1, ext1); err != nil { + t.Fatalf("Could not set ext1: %s", err) + } + extdesc2 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 123456790, + Name: "a.c", + Tag: "bytes,123456790,opt", + } + ext2 := []byte{0, 1, 2, 3, 4, 5, 6, 7} + if err := proto.SetExtension(msg, extdesc2, ext2); err != nil { + t.Fatalf("Could not set ext2: %s", err) + } + extdesc3 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*pb.Ext)(nil), + Field: 123456791, + Name: "a.d", + Tag: "bytes,123456791,opt", + } + ext3 := &pb.Ext{Data: proto.String("foo")} + if err := proto.SetExtension(msg, extdesc3, ext3); err != nil { + t.Fatalf("Could not set ext3: %s", err) + } + + b, err := proto.Marshal(msg) + if err != nil { + t.Fatalf("Could not marshal msg: %v", err) + } + if err := proto.Unmarshal(b, msg); err != nil { + t.Fatalf("Could not unmarshal into msg: %v", err) + } + + var expected proto.Buffer + if err := expected.EncodeVarint(uint64((extdesc1.Field << 3) | proto.WireVarint)); err != nil { + t.Fatalf("failed to compute expected prefix for ext1: %s", err) + } + if err := expected.EncodeVarint(1 /* bool true */); err != nil { + t.Fatalf("failed to compute expected value for ext1: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc1.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext1: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext1: got %v, want %v", b, expected.Bytes()) + } + + expected = proto.Buffer{} // reset + if err := expected.EncodeVarint(uint64((extdesc2.Field << 3) | proto.WireBytes)); err != nil { + t.Fatalf("failed to compute expected prefix for ext2: %s", err) + } + if err := expected.EncodeRawBytes(ext2); err != nil { + t.Fatalf("failed to compute expected value for ext2: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc2.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext2: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext2: got %v, want %v", b, expected.Bytes()) + } + + expected = proto.Buffer{} // reset + if err := expected.EncodeVarint(uint64((extdesc3.Field << 3) | proto.WireBytes)); err != nil { + t.Fatalf("failed to compute expected prefix for ext3: %s", err) + } + if b, err := proto.Marshal(ext3); err != nil { + t.Fatalf("failed to compute expected value for ext3: %s", err) + } else if err := expected.EncodeRawBytes(b); err != nil { + t.Fatalf("failed to compute expected value for ext3: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc3.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext3: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext3: got %v, want %v", b, expected.Bytes()) + } +} + +func TestExtensionDescsWithUnregisteredExtensions(t *testing.T) { msg := &pb.MyMessage{Count: proto.Int32(0)} extdesc1 := pb.E_Ext_More if descs, err := proto.ExtensionDescs(msg); len(descs) != 0 || err != nil { @@ -100,7 +202,7 @@ func TestExtensionDescsWithMissingExtensions(t *testing.T) { t.Fatalf("proto.ExtensionDescs: got error %v", err) } sortExtDescs(descs) - wantDescs := []*proto.ExtensionDesc{extdesc1, &proto.ExtensionDesc{Field: extdesc2.Field}} + wantDescs := []*proto.ExtensionDesc{extdesc1, {Field: extdesc2.Field}} if !reflect.DeepEqual(descs, wantDescs) { t.Errorf("proto.ExtensionDescs(msg) sorted extension ids: got %+v, want %+v", descs, wantDescs) } @@ -200,7 +302,7 @@ func TestGetExtensionDefaults(t *testing.T) { {pb.E_DefaultSfixed64, setInt64, int64(51)}, {pb.E_DefaultBool, setBool, true}, {pb.E_DefaultBool, setBool2, true}, - {pb.E_DefaultString, setString, "Hello, string"}, + {pb.E_DefaultString, setString, "Hello, string,def=foo"}, {pb.E_DefaultBytes, setBytes, []byte("Hello, bytes")}, {pb.E_DefaultEnum, setEnum, pb.DefaultsMessage_ONE}, } @@ -287,6 +389,44 @@ func TestGetExtensionDefaults(t *testing.T) { } } +func TestNilMessage(t *testing.T) { + name := "nil interface" + if got, err := proto.GetExtension(nil, pb.E_Ext_More); err == nil { + t.Errorf("%s: got %T %v, expected to fail", name, got, got) + } else if !strings.Contains(err.Error(), "extendable") { + t.Errorf("%s: got error %v, expected not-extendable error", name, err) + } + + // Regression tests: all functions of the Extension API + // used to panic when passed (*M)(nil), where M is a concrete message + // type. Now they handle this gracefully as a no-op or reported error. + var nilMsg *pb.MyMessage + desc := pb.E_Ext_More + + isNotExtendable := func(err error) bool { + return strings.Contains(fmt.Sprint(err), "not extendable") + } + + if proto.HasExtension(nilMsg, desc) { + t.Error("HasExtension(nil) = true") + } + + if _, err := proto.GetExtensions(nilMsg, []*proto.ExtensionDesc{desc}); !isNotExtendable(err) { + t.Errorf("GetExtensions(nil) = %q (wrong error)", err) + } + + if _, err := proto.ExtensionDescs(nilMsg); !isNotExtendable(err) { + t.Errorf("ExtensionDescs(nil) = %q (wrong error)", err) + } + + if err := proto.SetExtension(nilMsg, desc, nil); !isNotExtendable(err) { + t.Errorf("SetExtension(nil) = %q (wrong error)", err) + } + + proto.ClearExtension(nilMsg, desc) // no-op + proto.ClearAllExtensions(nilMsg) // no-op +} + func TestExtensionsRoundTrip(t *testing.T) { msg := &pb.MyMessage{} ext1 := &pb.Ext{ @@ -311,7 +451,7 @@ func TestExtensionsRoundTrip(t *testing.T) { } x, ok := e.(*pb.Ext) if !ok { - t.Errorf("e has type %T, expected testdata.Ext", e) + t.Errorf("e has type %T, expected test_proto.Ext", e) } else if *x.Data != "there" { t.Errorf("SetExtension failed to overwrite, got %+v, not 'there'", x) } @@ -339,7 +479,7 @@ func TestNilExtension(t *testing.T) { } if err := proto.SetExtension(msg, pb.E_Ext_More, (*pb.Ext)(nil)); err == nil { t.Error("expected SetExtension to fail due to a nil extension") - } else if want := "proto: SetExtension called with nil value of type *testdata.Ext"; err.Error() != want { + } else if want := fmt.Sprintf("proto: SetExtension called with nil value of type %T", new(pb.Ext)); err.Error() != want { t.Errorf("expected error %v, got %v", want, err) } // Note: if the behavior of Marshal is ever changed to ignore nil extensions, update @@ -402,8 +542,13 @@ func TestMarshalUnmarshalRepeatedExtension(t *testing.T) { if ext == nil { t.Fatalf("[%s] Invalid extension", test.name) } - if !reflect.DeepEqual(ext, test.ext) { - t.Errorf("[%s] Wrong value for ComplexExtension: got: %v want: %v\n", test.name, ext, test.ext) + if len(ext) != len(test.ext) { + t.Errorf("[%s] Wrong length of ComplexExtension: got: %v want: %v\n", test.name, len(ext), len(test.ext)) + } + for i := range test.ext { + if !proto.Equal(ext[i], test.ext[i]) { + t.Errorf("[%s] Wrong value for ComplexExtension[%d]: got: %v want: %v\n", test.name, i, ext[i], test.ext[i]) + } } } } @@ -477,8 +622,8 @@ func TestUnmarshalRepeatingNonRepeatedExtension(t *testing.T) { if ext == nil { t.Fatalf("[%s] Invalid extension", test.name) } - if !reflect.DeepEqual(*ext, want) { - t.Errorf("[%s] Wrong value for ComplexExtension: got: %s want: %s\n", test.name, ext, want) + if !proto.Equal(ext, &want) { + t.Errorf("[%s] Wrong value for ComplexExtension: got: %s want: %s\n", test.name, ext, &want) } } } @@ -509,19 +654,22 @@ func TestClearAllExtensions(t *testing.T) { } func TestMarshalRace(t *testing.T) { - // unregistered extension - desc := &proto.ExtensionDesc{ - ExtendedType: (*pb.MyMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 101010100, - Name: "emptyextension", - Tag: "varint,0,opt", + ext := &pb.Ext{} + m := &pb.MyMessage{Count: proto.Int32(4)} + if err := proto.SetExtension(m, pb.E_Ext_More, ext); err != nil { + t.Fatalf("proto.SetExtension(m, desc, true): got error %q, want nil", err) } - m := &pb.MyMessage{Count: proto.Int32(4)} - if err := proto.SetExtension(m, desc, proto.Bool(true)); err != nil { - t.Errorf("proto.SetExtension(m, desc, true): got error %q, want nil", err) + b, err := proto.Marshal(m) + if err != nil { + t.Fatalf("Could not marshal message: %v", err) } + if err := proto.Unmarshal(b, m); err != nil { + t.Fatalf("Could not unmarshal message: %v", err) + } + // after Unmarshal, the extension is in undecoded form. + // GetExtension will decode it lazily. Make sure this does + // not race against Marshal. var g errgroup.Group for n := 3; n > 0; n-- { @@ -529,6 +677,10 @@ func TestMarshalRace(t *testing.T) { _, err := proto.Marshal(m) return err }) + g.Go(func() error { + _, err := proto.GetExtension(m, pb.E_Ext_More) + return err + }) } if err := g.Wait(); err != nil { t.Fatal(err) diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go index 1c225504..0e2191b8 100644 --- a/vendor/github.com/golang/protobuf/proto/lib.go +++ b/vendor/github.com/golang/protobuf/proto/lib.go @@ -265,6 +265,7 @@ package proto import ( "encoding/json" + "errors" "fmt" "log" "reflect" @@ -273,6 +274,8 @@ import ( "sync" ) +var errInvalidUTF8 = errors.New("proto: invalid UTF-8 string") + // Message is implemented by generated protocol buffer messages. type Message interface { Reset() @@ -309,16 +312,7 @@ type Buffer struct { buf []byte // encode/decode byte stream index int // read point - // pools of basic types to amortize allocation. - bools []bool - uint32s []uint32 - uint64s []uint64 - - // extra pools, only used with pointer_reflect.go - int32s []int32 - int64s []int64 - float32s []float32 - float64s []float64 + deterministic bool } // NewBuffer allocates a new Buffer and initializes its internal data to @@ -343,6 +337,30 @@ func (p *Buffer) SetBuf(s []byte) { // Bytes returns the contents of the Buffer. func (p *Buffer) Bytes() []byte { return p.buf } +// SetDeterministic sets whether to use deterministic serialization. +// +// Deterministic serialization guarantees that for a given binary, equal +// messages will always be serialized to the same bytes. This implies: +// +// - Repeated serialization of a message will return the same bytes. +// - Different processes of the same binary (which may be executing on +// different machines) will serialize equal messages to the same bytes. +// +// Note that the deterministic serialization is NOT canonical across +// languages. It is not guaranteed to remain stable over time. It is unstable +// across different builds with schema changes due to unknown fields. +// Users who need canonical serialization (e.g., persistent storage in a +// canonical form, fingerprinting, etc.) should define their own +// canonicalization specification and implement their own serializer rather +// than relying on this API. +// +// If deterministic serialization is requested, map entries will be sorted +// by keys in lexographical order. This is an implementation detail and +// subject to change. +func (p *Buffer) SetDeterministic(deterministic bool) { + p.deterministic = deterministic +} + /* * Helper routines for simplifying the creation of optional fields of basic type. */ @@ -831,22 +849,12 @@ func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMes return sf, false, nil } +// mapKeys returns a sort.Interface to be used for sorting the map keys. // Map fields may have key types of non-float scalars, strings and enums. -// The easiest way to sort them in some deterministic order is to use fmt. -// If this turns out to be inefficient we can always consider other options, -// such as doing a Schwartzian transform. - func mapKeys(vs []reflect.Value) sort.Interface { - s := mapKeySorter{ - vs: vs, - // default Less function: textual comparison - less: func(a, b reflect.Value) bool { - return fmt.Sprint(a.Interface()) < fmt.Sprint(b.Interface()) - }, - } + s := mapKeySorter{vs: vs} - // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps; - // numeric keys are sorted numerically. + // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps. if len(vs) == 0 { return s } @@ -855,6 +863,12 @@ func mapKeys(vs []reflect.Value) sort.Interface { s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } case reflect.Uint32, reflect.Uint64: s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } + case reflect.Bool: + s.less = func(a, b reflect.Value) bool { return !a.Bool() && b.Bool() } // false < true + case reflect.String: + s.less = func(a, b reflect.Value) bool { return a.String() < b.String() } + default: + panic(fmt.Sprintf("unsupported map key type: %v", vs[0].Kind())) } return s @@ -895,3 +909,13 @@ const ProtoPackageIsVersion2 = true // ProtoPackageIsVersion1 is referenced from generated protocol buffer files // to assert that that code is compatible with this version of the proto package. const ProtoPackageIsVersion1 = true + +// InternalMessageInfo is a type used internally by generated .pb.go files. +// This type is not intended to be used by non-generated code. +// This type is not subject to any compatibility guarantee. +type InternalMessageInfo struct { + marshal *marshalInfo + unmarshal *unmarshalInfo + merge *mergeInfo + discard *discardInfo +} diff --git a/vendor/github.com/golang/protobuf/proto/map_test.go b/vendor/github.com/golang/protobuf/proto/map_test.go index 313e8792..b1e1529e 100644 --- a/vendor/github.com/golang/protobuf/proto/map_test.go +++ b/vendor/github.com/golang/protobuf/proto/map_test.go @@ -2,12 +2,36 @@ package proto_test import ( "fmt" + "reflect" "testing" "github.com/golang/protobuf/proto" ppb "github.com/golang/protobuf/proto/proto3_proto" ) +func TestMap(t *testing.T) { + var b []byte + fmt.Sscanf("a2010c0a044b657931120456616c31a201130a044b657932120556616c3261120456616c32a201240a044b6579330d05000000120556616c33621a0556616c3361120456616c331505000000a20100a201260a044b657934130a07536f6d6555524c1209536f6d655469746c651a08536e69707065743114", "%x", &b) + + var m ppb.Message + if err := proto.Unmarshal(b, &m); err != nil { + t.Fatalf("proto.Unmarshal error: %v", err) + } + + got := m.StringMap + want := map[string]string{ + "": "", + "Key1": "Val1", + "Key2": "Val2", + "Key3": "Val3", + "Key4": "", + } + + if !reflect.DeepEqual(got, want) { + t.Errorf("maps differ:\ngot %#v\nwant %#v", got, want) + } +} + func marshalled() []byte { m := &ppb.IntMaps{} for i := 0; i < 1000; i++ { diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go index fd982dec..3b6ca41d 100644 --- a/vendor/github.com/golang/protobuf/proto/message_set.go +++ b/vendor/github.com/golang/protobuf/proto/message_set.go @@ -42,6 +42,7 @@ import ( "fmt" "reflect" "sort" + "sync" ) // errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. @@ -94,10 +95,7 @@ func (ms *messageSet) find(pb Message) *_MessageSet_Item { } func (ms *messageSet) Has(pb Message) bool { - if ms.find(pb) != nil { - return true - } - return false + return ms.find(pb) != nil } func (ms *messageSet) Unmarshal(pb Message) error { @@ -150,46 +148,42 @@ func skipVarint(buf []byte) []byte { // MarshalMessageSet encodes the extension map represented by m in the message set wire format. // It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option. func MarshalMessageSet(exts interface{}) ([]byte, error) { - var m map[int32]Extension + return marshalMessageSet(exts, false) +} + +// marshaMessageSet implements above function, with the opt to turn on / off deterministic during Marshal. +func marshalMessageSet(exts interface{}, deterministic bool) ([]byte, error) { switch exts := exts.(type) { case *XXX_InternalExtensions: - if err := encodeExtensions(exts); err != nil { - return nil, err - } - m, _ = exts.extensionsRead() + var u marshalInfo + siz := u.sizeMessageSet(exts) + b := make([]byte, 0, siz) + return u.appendMessageSet(b, exts, deterministic) + case map[int32]Extension: - if err := encodeExtensionsMap(exts); err != nil { - return nil, err + // This is an old-style extension map. + // Wrap it in a new-style XXX_InternalExtensions. + ie := XXX_InternalExtensions{ + p: &struct { + mu sync.Mutex + extensionMap map[int32]Extension + }{ + extensionMap: exts, + }, } - m = exts + + var u marshalInfo + siz := u.sizeMessageSet(&ie) + b := make([]byte, 0, siz) + return u.appendMessageSet(b, &ie, deterministic) + default: return nil, errors.New("proto: not an extension map") } - - // Sort extension IDs to provide a deterministic encoding. - // See also enc_map in encode.go. - ids := make([]int, 0, len(m)) - for id := range m { - ids = append(ids, int(id)) - } - sort.Ints(ids) - - ms := &messageSet{Item: make([]*_MessageSet_Item, 0, len(m))} - for _, id := range ids { - e := m[int32(id)] - // Remove the wire type and field number varint, as well as the length varint. - msg := skipVarint(skipVarint(e.enc)) - - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: Int32(int32(id)), - Message: msg, - }) - } - return Marshal(ms) } // UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. -// It is called by generated Unmarshal methods on protocol buffer messages with the message_set_wire_format option. +// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option. func UnmarshalMessageSet(buf []byte, exts interface{}) error { var m map[int32]Extension switch exts := exts.(type) { @@ -235,7 +229,15 @@ func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { var m map[int32]Extension switch exts := exts.(type) { case *XXX_InternalExtensions: - m, _ = exts.extensionsRead() + var mu sync.Locker + m, mu = exts.extensionsRead() + if m != nil { + // Keep the extensions map locked until we're done marshaling to prevent + // races between marshaling and unmarshaling the lazily-{en,de}coded + // values. + mu.Lock() + defer mu.Unlock() + } case map[int32]Extension: m = exts default: @@ -253,15 +255,16 @@ func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { for i, id := range ids { ext := m[id] - if i > 0 { - b.WriteByte(',') - } - msd, ok := messageSetMap[id] if !ok { // Unknown type; we can't render it, so skip it. continue } + + if i > 0 && b.Len() > 1 { + b.WriteByte(',') + } + fmt.Fprintf(&b, `"[%s]":`, msd.name) x := ext.value diff --git a/vendor/github.com/golang/protobuf/proto/message_set_test.go b/vendor/github.com/golang/protobuf/proto/message_set_test.go index 353a3ea7..2c170c5f 100644 --- a/vendor/github.com/golang/protobuf/proto/message_set_test.go +++ b/vendor/github.com/golang/protobuf/proto/message_set_test.go @@ -64,3 +64,14 @@ func TestUnmarshalMessageSetWithDuplicate(t *testing.T) { t.Errorf("Combined extension is %q, want %q", got, want) } } + +func TestMarshalMessageSetJSON_UnknownType(t *testing.T) { + extMap := map[int32]Extension{12345: Extension{}} + got, err := MarshalMessageSetJSON(extMap) + if err != nil { + t.Fatalf("MarshalMessageSetJSON: %v", err) + } + if want := []byte("{}"); !bytes.Equal(got, want) { + t.Errorf("MarshalMessageSetJSON(%v) = %q, want %q", extMap, got, want) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go index fb512e2e..b6cad908 100644 --- a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go +++ b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go @@ -29,7 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// +build appengine js +// +build purego appengine js // This file contains an implementation of proto field accesses using package reflect. // It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can @@ -38,32 +38,13 @@ package proto import ( - "math" "reflect" + "sync" ) -// A structPointer is a pointer to a struct. -type structPointer struct { - v reflect.Value -} +const unsafeAllowed = false -// toStructPointer returns a structPointer equivalent to the given reflect value. -// The reflect value must itself be a pointer to a struct. -func toStructPointer(v reflect.Value) structPointer { - return structPointer{v} -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p.v.IsNil() -} - -// Interface returns the struct pointer as an interface value. -func structPointer_Interface(p structPointer, _ reflect.Type) interface{} { - return p.v.Interface() -} - -// A field identifies a field in a struct, accessible from a structPointer. +// A field identifies a field in a struct, accessible from a pointer. // In this implementation, a field is identified by the sequence of field indices // passed to reflect's FieldByIndex. type field []int @@ -76,409 +57,301 @@ func toField(f *reflect.StructField) field { // invalidField is an invalid field identifier. var invalidField = field(nil) +// zeroField is a noop when calling pointer.offset. +var zeroField = field([]int{}) + // IsValid reports whether the field identifier is valid. func (f field) IsValid() bool { return f != nil } -// field returns the given field in the struct as a reflect value. -func structPointer_field(p structPointer, f field) reflect.Value { - // Special case: an extension map entry with a value of type T - // passes a *T to the struct-handling code with a zero field, - // expecting that it will be treated as equivalent to *struct{ X T }, - // which has the same memory layout. We have to handle that case - // specially, because reflect will panic if we call FieldByIndex on a - // non-struct. - if f == nil { - return p.v.Elem() - } - - return p.v.Elem().FieldByIndex(f) -} - -// ifield returns the given field in the struct as an interface value. -func structPointer_ifield(p structPointer, f field) interface{} { - return structPointer_field(p, f).Addr().Interface() -} - -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return structPointer_ifield(p, f).(*[]byte) -} - -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return structPointer_ifield(p, f).(*[][]byte) -} - -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return structPointer_ifield(p, f).(**bool) -} - -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return structPointer_ifield(p, f).(*bool) -} - -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return structPointer_ifield(p, f).(*[]bool) -} - -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return structPointer_ifield(p, f).(**string) -} - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return structPointer_ifield(p, f).(*string) -} - -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return structPointer_ifield(p, f).(*[]string) -} - -// Extensions returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return structPointer_ifield(p, f).(*XXX_InternalExtensions) -} - -// ExtMap returns the address of an extension map field in the struct. -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return structPointer_ifield(p, f).(*map[int32]Extension) -} - -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return structPointer_field(p, f).Addr() -} - -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - structPointer_field(p, f).Set(q.v) -} - -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return structPointer{structPointer_field(p, f)} -} - -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) structPointerSlice { - return structPointerSlice{structPointer_field(p, f)} -} - -// A structPointerSlice represents the address of a slice of pointers to structs -// (themselves messages or groups). That is, v.Type() is *[]*struct{...}. -type structPointerSlice struct { +// The pointer type is for the table-driven decoder. +// The implementation here uses a reflect.Value of pointer type to +// create a generic pointer. In pointer_unsafe.go we use unsafe +// instead of reflect to implement the same (but faster) interface. +type pointer struct { v reflect.Value } -func (p structPointerSlice) Len() int { return p.v.Len() } -func (p structPointerSlice) Index(i int) structPointer { return structPointer{p.v.Index(i)} } -func (p structPointerSlice) Append(q structPointer) { - p.v.Set(reflect.Append(p.v, q.v)) +// toPointer converts an interface of pointer type to a pointer +// that points to the same target. +func toPointer(i *Message) pointer { + return pointer{v: reflect.ValueOf(*i)} } -var ( - int32Type = reflect.TypeOf(int32(0)) - uint32Type = reflect.TypeOf(uint32(0)) - float32Type = reflect.TypeOf(float32(0)) - int64Type = reflect.TypeOf(int64(0)) - uint64Type = reflect.TypeOf(uint64(0)) - float64Type = reflect.TypeOf(float64(0)) -) - -// A word32 represents a field of type *int32, *uint32, *float32, or *enum. -// That is, v.Type() is *int32, *uint32, *float32, or *enum and v is assignable. -type word32 struct { - v reflect.Value +// toAddrPointer converts an interface to a pointer that points to +// the interface data. +func toAddrPointer(i *interface{}, isptr bool) pointer { + v := reflect.ValueOf(*i) + u := reflect.New(v.Type()) + u.Elem().Set(v) + return pointer{v: u} } -// IsNil reports whether p is nil. -func word32_IsNil(p word32) bool { +// valToPointer converts v to a pointer. v must be of pointer type. +func valToPointer(v reflect.Value) pointer { + return pointer{v: v} +} + +// offset converts from a pointer to a structure to a pointer to +// one of its fields. +func (p pointer) offset(f field) pointer { + return pointer{v: p.v.Elem().FieldByIndex(f).Addr()} +} + +func (p pointer) isNil() bool { return p.v.IsNil() } -// Set sets p to point at a newly allocated word with bits set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - t := p.v.Type().Elem() - switch t { - case int32Type: - if len(o.int32s) == 0 { - o.int32s = make([]int32, uint32PoolSize) - } - o.int32s[0] = int32(x) - p.v.Set(reflect.ValueOf(&o.int32s[0])) - o.int32s = o.int32s[1:] - return - case uint32Type: - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) - } - o.uint32s[0] = x - p.v.Set(reflect.ValueOf(&o.uint32s[0])) - o.uint32s = o.uint32s[1:] - return - case float32Type: - if len(o.float32s) == 0 { - o.float32s = make([]float32, uint32PoolSize) - } - o.float32s[0] = math.Float32frombits(x) - p.v.Set(reflect.ValueOf(&o.float32s[0])) - o.float32s = o.float32s[1:] - return - } - - // must be enum - p.v.Set(reflect.New(t)) - p.v.Elem().SetInt(int64(int32(x))) -} - -// Get gets the bits pointed at by p, as a uint32. -func word32_Get(p word32) uint32 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") -} - -// Word32 returns a reference to a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32{structPointer_field(p, f)} -} - -// A word32Val represents a field of type int32, uint32, float32, or enum. -// That is, v.Type() is int32, uint32, float32, or enum and v is assignable. -type word32Val struct { - v reflect.Value -} - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - switch p.v.Type() { - case int32Type: - p.v.SetInt(int64(x)) - return - case uint32Type: - p.v.SetUint(uint64(x)) - return - case float32Type: - p.v.SetFloat(float64(math.Float32frombits(x))) - return - } - - // must be enum - p.v.SetInt(int64(int32(x))) -} - -// Get gets the bits pointed at by p, as a uint32. -func word32Val_Get(p word32Val) uint32 { - elem := p.v - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") -} - -// Word32Val returns a reference to a int32, uint32, float32, or enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val{structPointer_field(p, f)} -} - -// A word32Slice is a slice of 32-bit values. -// That is, v.Type() is []int32, []uint32, []float32, or []enum. -type word32Slice struct { - v reflect.Value -} - -func (p word32Slice) Append(x uint32) { - n, m := p.v.Len(), p.v.Cap() +// grow updates the slice s in place to make it one element longer. +// s must be addressable. +// Returns the (addressable) new element. +func grow(s reflect.Value) reflect.Value { + n, m := s.Len(), s.Cap() if n < m { - p.v.SetLen(n + 1) + s.SetLen(n + 1) } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) + s.Set(reflect.Append(s, reflect.Zero(s.Type().Elem()))) } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int32: - elem.SetInt(int64(int32(x))) - case reflect.Uint32: - elem.SetUint(uint64(x)) - case reflect.Float32: - elem.SetFloat(float64(math.Float32frombits(x))) + return s.Index(n) +} + +func (p pointer) toInt64() *int64 { + return p.v.Interface().(*int64) +} +func (p pointer) toInt64Ptr() **int64 { + return p.v.Interface().(**int64) +} +func (p pointer) toInt64Slice() *[]int64 { + return p.v.Interface().(*[]int64) +} + +var int32ptr = reflect.TypeOf((*int32)(nil)) + +func (p pointer) toInt32() *int32 { + return p.v.Convert(int32ptr).Interface().(*int32) +} + +// The toInt32Ptr/Slice methods don't work because of enums. +// Instead, we must use set/get methods for the int32ptr/slice case. +/* + func (p pointer) toInt32Ptr() **int32 { + return p.v.Interface().(**int32) +} + func (p pointer) toInt32Slice() *[]int32 { + return p.v.Interface().(*[]int32) +} +*/ +func (p pointer) getInt32Ptr() *int32 { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + return p.v.Elem().Interface().(*int32) } + // an enum + return p.v.Elem().Convert(int32PtrType).Interface().(*int32) +} +func (p pointer) setInt32Ptr(v int32) { + // Allocate value in a *int32. Possibly convert that to a *enum. + // Then assign it to a **int32 or **enum. + // Note: we can convert *int32 to *enum, but we can't convert + // **int32 to **enum! + p.v.Elem().Set(reflect.ValueOf(&v).Convert(p.v.Type().Elem())) } -func (p word32Slice) Len() int { - return p.v.Len() -} - -func (p word32Slice) Index(i int) uint32 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) +// getInt32Slice copies []int32 from p as a new slice. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) getInt32Slice() []int32 { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + return p.v.Elem().Interface().([]int32) } - panic("unreachable") + // an enum + // Allocate a []int32, then assign []enum's values into it. + // Note: we can't convert []enum to []int32. + slice := p.v.Elem() + s := make([]int32, slice.Len()) + for i := 0; i < slice.Len(); i++ { + s[i] = int32(slice.Index(i).Int()) + } + return s } -// Word32Slice returns a reference to a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) word32Slice { - return word32Slice{structPointer_field(p, f)} -} - -// word64 is like word32 but for 64-bit values. -type word64 struct { - v reflect.Value -} - -func word64_Set(p word64, o *Buffer, x uint64) { - t := p.v.Type().Elem() - switch t { - case int64Type: - if len(o.int64s) == 0 { - o.int64s = make([]int64, uint64PoolSize) - } - o.int64s[0] = int64(x) - p.v.Set(reflect.ValueOf(&o.int64s[0])) - o.int64s = o.int64s[1:] - return - case uint64Type: - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) - } - o.uint64s[0] = x - p.v.Set(reflect.ValueOf(&o.uint64s[0])) - o.uint64s = o.uint64s[1:] - return - case float64Type: - if len(o.float64s) == 0 { - o.float64s = make([]float64, uint64PoolSize) - } - o.float64s[0] = math.Float64frombits(x) - p.v.Set(reflect.ValueOf(&o.float64s[0])) - o.float64s = o.float64s[1:] +// setInt32Slice copies []int32 into p as a new slice. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) setInt32Slice(v []int32) { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + p.v.Elem().Set(reflect.ValueOf(v)) return } - panic("unreachable") -} - -func word64_IsNil(p word64) bool { - return p.v.IsNil() -} - -func word64_Get(p word64) uint64 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) + // an enum + // Allocate a []enum, then assign []int32's values into it. + // Note: we can't convert []enum to []int32. + slice := reflect.MakeSlice(p.v.Type().Elem(), len(v), cap(v)) + for i, x := range v { + slice.Index(i).SetInt(int64(x)) } - panic("unreachable") + p.v.Elem().Set(slice) +} +func (p pointer) appendInt32Slice(v int32) { + grow(p.v.Elem()).SetInt(int64(v)) } -func structPointer_Word64(p structPointer, f field) word64 { - return word64{structPointer_field(p, f)} +func (p pointer) toUint64() *uint64 { + return p.v.Interface().(*uint64) +} +func (p pointer) toUint64Ptr() **uint64 { + return p.v.Interface().(**uint64) +} +func (p pointer) toUint64Slice() *[]uint64 { + return p.v.Interface().(*[]uint64) +} +func (p pointer) toUint32() *uint32 { + return p.v.Interface().(*uint32) +} +func (p pointer) toUint32Ptr() **uint32 { + return p.v.Interface().(**uint32) +} +func (p pointer) toUint32Slice() *[]uint32 { + return p.v.Interface().(*[]uint32) +} +func (p pointer) toBool() *bool { + return p.v.Interface().(*bool) +} +func (p pointer) toBoolPtr() **bool { + return p.v.Interface().(**bool) +} +func (p pointer) toBoolSlice() *[]bool { + return p.v.Interface().(*[]bool) +} +func (p pointer) toFloat64() *float64 { + return p.v.Interface().(*float64) +} +func (p pointer) toFloat64Ptr() **float64 { + return p.v.Interface().(**float64) +} +func (p pointer) toFloat64Slice() *[]float64 { + return p.v.Interface().(*[]float64) +} +func (p pointer) toFloat32() *float32 { + return p.v.Interface().(*float32) +} +func (p pointer) toFloat32Ptr() **float32 { + return p.v.Interface().(**float32) +} +func (p pointer) toFloat32Slice() *[]float32 { + return p.v.Interface().(*[]float32) +} +func (p pointer) toString() *string { + return p.v.Interface().(*string) +} +func (p pointer) toStringPtr() **string { + return p.v.Interface().(**string) +} +func (p pointer) toStringSlice() *[]string { + return p.v.Interface().(*[]string) +} +func (p pointer) toBytes() *[]byte { + return p.v.Interface().(*[]byte) +} +func (p pointer) toBytesSlice() *[][]byte { + return p.v.Interface().(*[][]byte) +} +func (p pointer) toExtensions() *XXX_InternalExtensions { + return p.v.Interface().(*XXX_InternalExtensions) +} +func (p pointer) toOldExtensions() *map[int32]Extension { + return p.v.Interface().(*map[int32]Extension) +} +func (p pointer) getPointer() pointer { + return pointer{v: p.v.Elem()} +} +func (p pointer) setPointer(q pointer) { + p.v.Elem().Set(q.v) +} +func (p pointer) appendPointer(q pointer) { + grow(p.v.Elem()).Set(q.v) } -// word64Val is like word32Val but for 64-bit values. -type word64Val struct { - v reflect.Value +// getPointerSlice copies []*T from p as a new []pointer. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) getPointerSlice() []pointer { + if p.v.IsNil() { + return nil + } + n := p.v.Elem().Len() + s := make([]pointer, n) + for i := 0; i < n; i++ { + s[i] = pointer{v: p.v.Elem().Index(i)} + } + return s } -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - switch p.v.Type() { - case int64Type: - p.v.SetInt(int64(x)) - return - case uint64Type: - p.v.SetUint(x) - return - case float64Type: - p.v.SetFloat(math.Float64frombits(x)) +// setPointerSlice copies []pointer into p as a new []*T. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) setPointerSlice(v []pointer) { + if v == nil { + p.v.Elem().Set(reflect.New(p.v.Elem().Type()).Elem()) return } - panic("unreachable") -} - -func word64Val_Get(p word64Val) uint64 { - elem := p.v - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) + s := reflect.MakeSlice(p.v.Elem().Type(), 0, len(v)) + for _, p := range v { + s = reflect.Append(s, p.v) } - panic("unreachable") + p.v.Elem().Set(s) } -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val{structPointer_field(p, f)} -} - -type word64Slice struct { - v reflect.Value -} - -func (p word64Slice) Append(x uint64) { - n, m := p.v.Len(), p.v.Cap() - if n < m { - p.v.SetLen(n + 1) - } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) - } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int64: - elem.SetInt(int64(int64(x))) - case reflect.Uint64: - elem.SetUint(uint64(x)) - case reflect.Float64: - elem.SetFloat(float64(math.Float64frombits(x))) +// getInterfacePointer returns a pointer that points to the +// interface data of the interface pointed by p. +func (p pointer) getInterfacePointer() pointer { + if p.v.Elem().IsNil() { + return pointer{v: p.v.Elem()} } + return pointer{v: p.v.Elem().Elem().Elem().Field(0).Addr()} // *interface -> interface -> *struct -> struct } -func (p word64Slice) Len() int { - return p.v.Len() +func (p pointer) asPointerTo(t reflect.Type) reflect.Value { + // TODO: check that p.v.Type().Elem() == t? + return p.v } -func (p word64Slice) Index(i int) uint64 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return uint64(elem.Uint()) - case reflect.Float64: - return math.Float64bits(float64(elem.Float())) - } - panic("unreachable") +func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v +} +func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v +} +func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v +} +func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v } -func structPointer_Word64Slice(p structPointer, f field) word64Slice { - return word64Slice{structPointer_field(p, f)} -} +var atomicLock sync.Mutex diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go index 6b5567d4..d55a335d 100644 --- a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go +++ b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go @@ -29,7 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// +build !appengine,!js +// +build !purego,!appengine,!js // This file contains the implementation of the proto field accesses using package unsafe. @@ -37,38 +37,13 @@ package proto import ( "reflect" + "sync/atomic" "unsafe" ) -// NOTE: These type_Foo functions would more idiomatically be methods, -// but Go does not allow methods on pointer types, and we must preserve -// some pointer type for the garbage collector. We use these -// funcs with clunky names as our poor approximation to methods. -// -// An alternative would be -// type structPointer struct { p unsafe.Pointer } -// but that does not registerize as well. +const unsafeAllowed = true -// A structPointer is a pointer to a struct. -type structPointer unsafe.Pointer - -// toStructPointer returns a structPointer equivalent to the given reflect value. -func toStructPointer(v reflect.Value) structPointer { - return structPointer(unsafe.Pointer(v.Pointer())) -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p == nil -} - -// Interface returns the struct pointer, assumed to have element type t, -// as an interface value. -func structPointer_Interface(p structPointer, t reflect.Type) interface{} { - return reflect.NewAt(t, unsafe.Pointer(p)).Interface() -} - -// A field identifies a field in a struct, accessible from a structPointer. +// A field identifies a field in a struct, accessible from a pointer. // In this implementation, a field is identified by its byte offset from the start of the struct. type field uintptr @@ -80,191 +55,254 @@ func toField(f *reflect.StructField) field { // invalidField is an invalid field identifier. const invalidField = ^field(0) +// zeroField is a noop when calling pointer.offset. +const zeroField = field(0) + // IsValid reports whether the field identifier is valid. func (f field) IsValid() bool { - return f != ^field(0) + return f != invalidField } -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return (*[]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// The pointer type below is for the new table-driven encoder/decoder. +// The implementation here uses unsafe.Pointer to create a generic pointer. +// In pointer_reflect.go we use reflect instead of unsafe to implement +// the same (but slower) interface. +type pointer struct { + p unsafe.Pointer } -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return (*[][]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// size of pointer +var ptrSize = unsafe.Sizeof(uintptr(0)) + +// toPointer converts an interface of pointer type to a pointer +// that points to the same target. +func toPointer(i *Message) pointer { + // Super-tricky - read pointer out of data word of interface value. + // Saves ~25ns over the equivalent: + // return valToPointer(reflect.ValueOf(*i)) + return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} } -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return (**bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return (*bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return (*[]bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return (**string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return (*string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return (*[]string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// ExtMap returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return (*XXX_InternalExtensions)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return (*map[int32]Extension)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return reflect.NewAt(typ, unsafe.Pointer(uintptr(p)+uintptr(f))) -} - -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) = q -} - -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) *structPointerSlice { - return (*structPointerSlice)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// A structPointerSlice represents a slice of pointers to structs (themselves submessages or groups). -type structPointerSlice []structPointer - -func (v *structPointerSlice) Len() int { return len(*v) } -func (v *structPointerSlice) Index(i int) structPointer { return (*v)[i] } -func (v *structPointerSlice) Append(p structPointer) { *v = append(*v, p) } - -// A word32 is the address of a "pointer to 32-bit value" field. -type word32 **uint32 - -// IsNil reports whether *v is nil. -func word32_IsNil(p word32) bool { - return *p == nil -} - -// Set sets *v to point at a newly allocated word set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) +// toAddrPointer converts an interface to a pointer that points to +// the interface data. +func toAddrPointer(i *interface{}, isptr bool) pointer { + // Super-tricky - read or get the address of data word of interface value. + if isptr { + // The interface is of pointer type, thus it is a direct interface. + // The data word is the pointer data itself. We take its address. + return pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)} } - o.uint32s[0] = x - *p = &o.uint32s[0] - o.uint32s = o.uint32s[1:] + // The interface is not of pointer type. The data word is the pointer + // to the data. + return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} } -// Get gets the value pointed at by *v. -func word32_Get(p word32) uint32 { - return **p +// valToPointer converts v to a pointer. v must be of pointer type. +func valToPointer(v reflect.Value) pointer { + return pointer{p: unsafe.Pointer(v.Pointer())} } -// Word32 returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32((**uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +// offset converts from a pointer to a structure to a pointer to +// one of its fields. +func (p pointer) offset(f field) pointer { + // For safety, we should panic if !f.IsValid, however calling panic causes + // this to no longer be inlineable, which is a serious performance cost. + /* + if !f.IsValid() { + panic("invalid field") + } + */ + return pointer{p: unsafe.Pointer(uintptr(p.p) + uintptr(f))} } -// A word32Val is the address of a 32-bit value field. -type word32Val *uint32 - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - *p = x +func (p pointer) isNil() bool { + return p.p == nil } -// Get gets the value pointed at by p. -func word32Val_Get(p word32Val) uint32 { - return *p +func (p pointer) toInt64() *int64 { + return (*int64)(p.p) +} +func (p pointer) toInt64Ptr() **int64 { + return (**int64)(p.p) +} +func (p pointer) toInt64Slice() *[]int64 { + return (*[]int64)(p.p) +} +func (p pointer) toInt32() *int32 { + return (*int32)(p.p) } -// Word32Val returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val((*uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) -} - -// A word32Slice is a slice of 32-bit values. -type word32Slice []uint32 - -func (v *word32Slice) Append(x uint32) { *v = append(*v, x) } -func (v *word32Slice) Len() int { return len(*v) } -func (v *word32Slice) Index(i int) uint32 { return (*v)[i] } - -// Word32Slice returns the address of a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) *word32Slice { - return (*word32Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// word64 is like word32 but for 64-bit values. -type word64 **uint64 - -func word64_Set(p word64, o *Buffer, x uint64) { - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) +// See pointer_reflect.go for why toInt32Ptr/Slice doesn't exist. +/* + func (p pointer) toInt32Ptr() **int32 { + return (**int32)(p.p) } - o.uint64s[0] = x - *p = &o.uint64s[0] - o.uint64s = o.uint64s[1:] + func (p pointer) toInt32Slice() *[]int32 { + return (*[]int32)(p.p) + } +*/ +func (p pointer) getInt32Ptr() *int32 { + return *(**int32)(p.p) +} +func (p pointer) setInt32Ptr(v int32) { + *(**int32)(p.p) = &v } -func word64_IsNil(p word64) bool { - return *p == nil +// getInt32Slice loads a []int32 from p. +// The value returned is aliased with the original slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) getInt32Slice() []int32 { + return *(*[]int32)(p.p) } -func word64_Get(p word64) uint64 { - return **p +// setInt32Slice stores a []int32 to p. +// The value set is aliased with the input slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) setInt32Slice(v []int32) { + *(*[]int32)(p.p) = v } -func structPointer_Word64(p structPointer, f field) word64 { - return word64((**uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +// TODO: Can we get rid of appendInt32Slice and use setInt32Slice instead? +func (p pointer) appendInt32Slice(v int32) { + s := (*[]int32)(p.p) + *s = append(*s, v) } -// word64Val is like word32Val but for 64-bit values. -type word64Val *uint64 - -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - *p = x +func (p pointer) toUint64() *uint64 { + return (*uint64)(p.p) +} +func (p pointer) toUint64Ptr() **uint64 { + return (**uint64)(p.p) +} +func (p pointer) toUint64Slice() *[]uint64 { + return (*[]uint64)(p.p) +} +func (p pointer) toUint32() *uint32 { + return (*uint32)(p.p) +} +func (p pointer) toUint32Ptr() **uint32 { + return (**uint32)(p.p) +} +func (p pointer) toUint32Slice() *[]uint32 { + return (*[]uint32)(p.p) +} +func (p pointer) toBool() *bool { + return (*bool)(p.p) +} +func (p pointer) toBoolPtr() **bool { + return (**bool)(p.p) +} +func (p pointer) toBoolSlice() *[]bool { + return (*[]bool)(p.p) +} +func (p pointer) toFloat64() *float64 { + return (*float64)(p.p) +} +func (p pointer) toFloat64Ptr() **float64 { + return (**float64)(p.p) +} +func (p pointer) toFloat64Slice() *[]float64 { + return (*[]float64)(p.p) +} +func (p pointer) toFloat32() *float32 { + return (*float32)(p.p) +} +func (p pointer) toFloat32Ptr() **float32 { + return (**float32)(p.p) +} +func (p pointer) toFloat32Slice() *[]float32 { + return (*[]float32)(p.p) +} +func (p pointer) toString() *string { + return (*string)(p.p) +} +func (p pointer) toStringPtr() **string { + return (**string)(p.p) +} +func (p pointer) toStringSlice() *[]string { + return (*[]string)(p.p) +} +func (p pointer) toBytes() *[]byte { + return (*[]byte)(p.p) +} +func (p pointer) toBytesSlice() *[][]byte { + return (*[][]byte)(p.p) +} +func (p pointer) toExtensions() *XXX_InternalExtensions { + return (*XXX_InternalExtensions)(p.p) +} +func (p pointer) toOldExtensions() *map[int32]Extension { + return (*map[int32]Extension)(p.p) } -func word64Val_Get(p word64Val) uint64 { - return *p +// getPointerSlice loads []*T from p as a []pointer. +// The value returned is aliased with the original slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) getPointerSlice() []pointer { + // Super-tricky - p should point to a []*T where T is a + // message type. We load it as []pointer. + return *(*[]pointer)(p.p) } -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val((*uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +// setPointerSlice stores []pointer into p as a []*T. +// The value set is aliased with the input slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) setPointerSlice(v []pointer) { + // Super-tricky - p should point to a []*T where T is a + // message type. We store it as []pointer. + *(*[]pointer)(p.p) = v } -// word64Slice is like word32Slice but for 64-bit values. -type word64Slice []uint64 - -func (v *word64Slice) Append(x uint64) { *v = append(*v, x) } -func (v *word64Slice) Len() int { return len(*v) } -func (v *word64Slice) Index(i int) uint64 { return (*v)[i] } - -func structPointer_Word64Slice(p structPointer, f field) *word64Slice { - return (*word64Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// getPointer loads the pointer at p and returns it. +func (p pointer) getPointer() pointer { + return pointer{p: *(*unsafe.Pointer)(p.p)} +} + +// setPointer stores the pointer q at p. +func (p pointer) setPointer(q pointer) { + *(*unsafe.Pointer)(p.p) = q.p +} + +// append q to the slice pointed to by p. +func (p pointer) appendPointer(q pointer) { + s := (*[]unsafe.Pointer)(p.p) + *s = append(*s, q.p) +} + +// getInterfacePointer returns a pointer that points to the +// interface data of the interface pointed by p. +func (p pointer) getInterfacePointer() pointer { + // Super-tricky - read pointer out of data word of interface value. + return pointer{p: (*(*[2]unsafe.Pointer)(p.p))[1]} +} + +// asPointerTo returns a reflect.Value that is a pointer to an +// object of type t stored at p. +func (p pointer) asPointerTo(t reflect.Type) reflect.Value { + return reflect.NewAt(t, p.p) +} + +func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { + return (*unmarshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { + return (*marshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { + return (*mergeInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { + return (*discardInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) } diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go index ec2289c0..f710adab 100644 --- a/vendor/github.com/golang/protobuf/proto/properties.go +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -58,42 +58,6 @@ const ( WireFixed32 = 5 ) -const startSize = 10 // initial slice/string sizes - -// Encoders are defined in encode.go -// An encoder outputs the full representation of a field, including its -// tag and encoder type. -type encoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueEncoder encodes a single integer in a particular encoding. -type valueEncoder func(o *Buffer, x uint64) error - -// Sizers are defined in encode.go -// A sizer returns the encoded size of a field, including its tag and encoder -// type. -type sizer func(prop *Properties, base structPointer) int - -// A valueSizer returns the encoded size of a single integer in a particular -// encoding. -type valueSizer func(x uint64) int - -// Decoders are defined in decode.go -// A decoder creates a value from its wire representation. -// Unrecognized subelements are saved in unrec. -type decoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueDecoder decodes a single integer in a particular encoding. -type valueDecoder func(o *Buffer) (x uint64, err error) - -// A oneofMarshaler does the marshaling for all oneof fields in a message. -type oneofMarshaler func(Message, *Buffer) error - -// A oneofUnmarshaler does the unmarshaling for a oneof field in a message. -type oneofUnmarshaler func(Message, int, int, *Buffer) (bool, error) - -// A oneofSizer does the sizing for all oneof fields in a message. -type oneofSizer func(Message) int - // tagMap is an optimization over map[int]int for typical protocol buffer // use-cases. Encoded protocol buffers are often in tag order with small tag // numbers. @@ -140,13 +104,6 @@ type StructProperties struct { decoderTags tagMap // map from proto tag to struct field number decoderOrigNames map[string]int // map from original name to struct field number order []int // list of struct field numbers in tag order - unrecField field // field id of the XXX_unrecognized []byte field - extendable bool // is this an extendable proto - - oneofMarshaler oneofMarshaler - oneofUnmarshaler oneofUnmarshaler - oneofSizer oneofSizer - stype reflect.Type // OneofTypes contains information about the oneof fields in this message. // It is keyed by the original name of a field. @@ -187,36 +144,19 @@ type Properties struct { Default string // default value HasDefault bool // whether an explicit default was provided - def_uint64 uint64 - enc encoder - valEnc valueEncoder // set for bool and numeric types only - field field - tagcode []byte // encoding of EncodeVarint((Tag<<3)|WireType) - tagbuf [8]byte - stype reflect.Type // set for struct types only - sprop *StructProperties // set for struct types only - isMarshaler bool - isUnmarshaler bool + stype reflect.Type // set for struct types only + sprop *StructProperties // set for struct types only mtype reflect.Type // set for map types only mkeyprop *Properties // set for map types only mvalprop *Properties // set for map types only - - size sizer - valSize valueSizer // set for bool and numeric types only - - dec decoder - valDec valueDecoder // set for bool and numeric types only - - // If this is a packable field, this will be the decoder for the packed version of the field. - packedDec decoder } // String formats the properties in the protobuf struct field tag style. func (p *Properties) String() string { s := p.Wire - s = "," + s += "," s += strconv.Itoa(p.Tag) if p.Required { s += ",req" @@ -262,29 +202,14 @@ func (p *Properties) Parse(s string) { switch p.Wire { case "varint": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeVarint - p.valDec = (*Buffer).DecodeVarint - p.valSize = sizeVarint case "fixed32": p.WireType = WireFixed32 - p.valEnc = (*Buffer).EncodeFixed32 - p.valDec = (*Buffer).DecodeFixed32 - p.valSize = sizeFixed32 case "fixed64": p.WireType = WireFixed64 - p.valEnc = (*Buffer).EncodeFixed64 - p.valDec = (*Buffer).DecodeFixed64 - p.valSize = sizeFixed64 case "zigzag32": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag32 - p.valDec = (*Buffer).DecodeZigzag32 - p.valSize = sizeZigzag32 case "zigzag64": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag64 - p.valDec = (*Buffer).DecodeZigzag64 - p.valSize = sizeZigzag64 case "bytes", "group": p.WireType = WireBytes // no numeric converter for non-numeric types @@ -299,6 +224,7 @@ func (p *Properties) Parse(s string) { return } +outer: for i := 2; i < len(fields); i++ { f := fields[i] switch { @@ -326,229 +252,28 @@ func (p *Properties) Parse(s string) { if i+1 < len(fields) { // Commas aren't escaped, and def is always last. p.Default += "," + strings.Join(fields[i+1:], ",") - break + break outer } } } } -func logNoSliceEnc(t1, t2 reflect.Type) { - fmt.Fprintf(os.Stderr, "proto: no slice oenc for %T = []%T\n", t1, t2) -} - var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() -// Initialize the fields for encoding and decoding. -func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { - p.enc = nil - p.dec = nil - p.size = nil - +// setFieldProps initializes the field properties for submessages and maps. +func (p *Properties) setFieldProps(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { switch t1 := typ; t1.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1) - - // proto3 scalar types - - case reflect.Bool: - p.enc = (*Buffer).enc_proto3_bool - p.dec = (*Buffer).dec_proto3_bool - p.size = size_proto3_bool - case reflect.Int32: - p.enc = (*Buffer).enc_proto3_int32 - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_proto3_uint32 - p.dec = (*Buffer).dec_proto3_int32 // can reuse - p.size = size_proto3_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_proto3_int64 - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_proto3_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_proto3_int64 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.String: - p.enc = (*Buffer).enc_proto3_string - p.dec = (*Buffer).dec_proto3_string - p.size = size_proto3_string - case reflect.Ptr: - switch t2 := t1.Elem(); t2.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no encoder function for %v -> %v\n", t1, t2) - break - case reflect.Bool: - p.enc = (*Buffer).enc_bool - p.dec = (*Buffer).dec_bool - p.size = size_bool - case reflect.Int32: - p.enc = (*Buffer).enc_int32 - p.dec = (*Buffer).dec_int32 - p.size = size_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_uint32 - p.dec = (*Buffer).dec_int32 // can reuse - p.size = size_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_int64 - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_int32 - p.size = size_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_int64 // can just treat them as bits - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.String: - p.enc = (*Buffer).enc_string - p.dec = (*Buffer).dec_string - p.size = size_string - case reflect.Struct: + if t1.Elem().Kind() == reflect.Struct { p.stype = t1.Elem() - p.isMarshaler = isMarshaler(t1) - p.isUnmarshaler = isUnmarshaler(t1) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_struct_message - p.dec = (*Buffer).dec_struct_message - p.size = size_struct_message - } else { - p.enc = (*Buffer).enc_struct_group - p.dec = (*Buffer).dec_struct_group - p.size = size_struct_group - } } case reflect.Slice: - switch t2 := t1.Elem(); t2.Kind() { - default: - logNoSliceEnc(t1, t2) - break - case reflect.Bool: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_bool - p.size = size_slice_packed_bool - } else { - p.enc = (*Buffer).enc_slice_bool - p.size = size_slice_bool - } - p.dec = (*Buffer).dec_slice_bool - p.packedDec = (*Buffer).dec_slice_packed_bool - case reflect.Int32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int32 - p.size = size_slice_packed_int32 - } else { - p.enc = (*Buffer).enc_slice_int32 - p.size = size_slice_int32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Uint32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Int64, reflect.Uint64: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - case reflect.Uint8: - p.dec = (*Buffer).dec_slice_byte - if p.proto3 { - p.enc = (*Buffer).enc_proto3_slice_byte - p.size = size_proto3_slice_byte - } else { - p.enc = (*Buffer).enc_slice_byte - p.size = size_slice_byte - } - case reflect.Float32, reflect.Float64: - switch t2.Bits() { - case 32: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case 64: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - default: - logNoSliceEnc(t1, t2) - break - } - case reflect.String: - p.enc = (*Buffer).enc_slice_string - p.dec = (*Buffer).dec_slice_string - p.size = size_slice_string - case reflect.Ptr: - switch t3 := t2.Elem(); t3.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no ptr oenc for %T -> %T -> %T\n", t1, t2, t3) - break - case reflect.Struct: - p.stype = t2.Elem() - p.isMarshaler = isMarshaler(t2) - p.isUnmarshaler = isUnmarshaler(t2) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_slice_struct_message - p.dec = (*Buffer).dec_slice_struct_message - p.size = size_slice_struct_message - } else { - p.enc = (*Buffer).enc_slice_struct_group - p.dec = (*Buffer).dec_slice_struct_group - p.size = size_slice_struct_group - } - } - case reflect.Slice: - switch t2.Elem().Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no slice elem oenc for %T -> %T -> %T\n", t1, t2, t2.Elem()) - break - case reflect.Uint8: - p.enc = (*Buffer).enc_slice_slice_byte - p.dec = (*Buffer).dec_slice_slice_byte - p.size = size_slice_slice_byte - } + if t2 := t1.Elem(); t2.Kind() == reflect.Ptr && t2.Elem().Kind() == reflect.Struct { + p.stype = t2.Elem() } case reflect.Map: - p.enc = (*Buffer).enc_new_map - p.dec = (*Buffer).dec_new_map - p.size = size_new_map - p.mtype = t1 p.mkeyprop = &Properties{} p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) @@ -562,20 +287,6 @@ func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lock p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) } - // precalculate tag code - wire := p.WireType - if p.Packed { - wire = WireBytes - } - x := uint32(p.Tag)<<3 | uint32(wire) - i := 0 - for i = 0; x > 127; i++ { - p.tagbuf[i] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - p.tagbuf[i] = uint8(x) - p.tagcode = p.tagbuf[0 : i+1] - if p.stype != nil { if lockGetProp { p.sprop = GetProperties(p.stype) @@ -586,32 +297,9 @@ func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lock } var ( - marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() - unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem() + marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() ) -// isMarshaler reports whether type t implements Marshaler. -func isMarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isMarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isMarshaler") - } - return t.Implements(marshalerType) -} - -// isUnmarshaler reports whether type t implements Unmarshaler. -func isUnmarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isUnmarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isUnmarshaler") - } - return t.Implements(unmarshalerType) -} - // Init populates the properties from a protocol buffer struct tag. func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { p.init(typ, name, tag, f, true) @@ -621,14 +309,11 @@ func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructF // "bytes,49,opt,def=hello!" p.Name = name p.OrigName = name - if f != nil { - p.field = toField(f) - } if tag == "" { return } p.Parse(tag) - p.setEncAndDec(typ, f, lockGetProp) + p.setFieldProps(typ, f, lockGetProp) } var ( @@ -678,9 +363,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { propertiesMap[t] = prop // build properties - prop.extendable = reflect.PtrTo(t).Implements(extendableProtoType) || - reflect.PtrTo(t).Implements(extendableProtoV1Type) - prop.unrecField = invalidField prop.Prop = make([]*Properties, t.NumField()) prop.order = make([]int, t.NumField()) @@ -690,17 +372,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { name := f.Name p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) - if f.Name == "XXX_InternalExtensions" { // special case - p.enc = (*Buffer).enc_exts - p.dec = nil // not needed - p.size = size_exts - } else if f.Name == "XXX_extensions" { // special case - p.enc = (*Buffer).enc_map - p.dec = nil // not needed - p.size = size_map - } else if f.Name == "XXX_unrecognized" { // special case - prop.unrecField = toField(&f) - } oneof := f.Tag.Get("protobuf_oneof") // special case if oneof != "" { // Oneof fields don't use the traditional protobuf tag. @@ -715,9 +386,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { } print("\n") } - if p.enc == nil && !strings.HasPrefix(f.Name, "XXX_") && oneof == "" { - fmt.Fprintln(os.Stderr, "proto: no encoder for", f.Name, f.Type.String(), "[GetProperties]") - } } // Re-order prop.order. @@ -728,8 +396,7 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { } if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { var oots []interface{} - prop.oneofMarshaler, prop.oneofUnmarshaler, prop.oneofSizer, oots = om.XXX_OneofFuncs() - prop.stype = t + _, _, _, oots = om.XXX_OneofFuncs() // Interpret oneof metadata. prop.OneofTypes = make(map[string]*OneofProperties) @@ -779,30 +446,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { return prop } -// Return the Properties object for the x[0]'th field of the structure. -func propByIndex(t reflect.Type, x []int) *Properties { - if len(x) != 1 { - fmt.Fprintf(os.Stderr, "proto: field index dimension %d (not 1) for type %s\n", len(x), t) - return nil - } - prop := GetProperties(t) - return prop.Prop[x[0]] -} - -// Get the address and type of a pointer to a struct from an interface. -func getbase(pb Message) (t reflect.Type, b structPointer, err error) { - if pb == nil { - err = ErrNil - return - } - // get the reflect type of the pointer to the struct. - t = reflect.TypeOf(pb) - // get the address of the struct. - value := reflect.ValueOf(pb) - b = toStructPointer(value) - return -} - // A global registry of enum types. // The generated code will register the generated maps by calling RegisterEnum. @@ -826,20 +469,42 @@ func EnumValueMap(enumType string) map[string]int32 { // A registry of all linked message types. // The string is a fully-qualified proto name ("pkg.Message"). var ( - protoTypes = make(map[string]reflect.Type) - revProtoTypes = make(map[reflect.Type]string) + protoTypedNils = make(map[string]Message) // a map from proto names to typed nil pointers + protoMapTypes = make(map[string]reflect.Type) // a map from proto names to map types + revProtoTypes = make(map[reflect.Type]string) ) // RegisterType is called from generated code and maps from the fully qualified // proto name to the type (pointer to struct) of the protocol buffer. func RegisterType(x Message, name string) { - if _, ok := protoTypes[name]; ok { + if _, ok := protoTypedNils[name]; ok { // TODO: Some day, make this a panic. log.Printf("proto: duplicate proto type registered: %s", name) return } t := reflect.TypeOf(x) - protoTypes[name] = t + if v := reflect.ValueOf(x); v.Kind() == reflect.Ptr && v.Pointer() == 0 { + // Generated code always calls RegisterType with nil x. + // This check is just for extra safety. + protoTypedNils[name] = x + } else { + protoTypedNils[name] = reflect.Zero(t).Interface().(Message) + } + revProtoTypes[t] = name +} + +// RegisterMapType is called from generated code and maps from the fully qualified +// proto name to the native map type of the proto map definition. +func RegisterMapType(x interface{}, name string) { + if reflect.TypeOf(x).Kind() != reflect.Map { + panic(fmt.Sprintf("RegisterMapType(%T, %q); want map", x, name)) + } + if _, ok := protoMapTypes[name]; ok { + log.Printf("proto: duplicate proto type registered: %s", name) + return + } + t := reflect.TypeOf(x) + protoMapTypes[name] = t revProtoTypes[t] = name } @@ -855,7 +520,14 @@ func MessageName(x Message) string { } // MessageType returns the message type (pointer to struct) for a named message. -func MessageType(name string) reflect.Type { return protoTypes[name] } +// The type is not guaranteed to implement proto.Message if the name refers to a +// map entry. +func MessageType(name string) reflect.Type { + if t, ok := protoTypedNils[name]; ok { + return reflect.TypeOf(t) + } + return protoMapTypes[name] +} // A registry of all linked proto files. var ( diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go index cc4d0489..a80f0893 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go @@ -1,27 +1,13 @@ -// Code generated by protoc-gen-go. +// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto3_proto/proto3.proto -// DO NOT EDIT! -/* -Package proto3_proto is a generated protocol buffer package. - -It is generated from these files: - proto3_proto/proto3.proto - -It has these top-level messages: - Message - Nested - MessageWithMap - IntMap - IntMaps -*/ package proto3_proto import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import testdata "github.com/golang/protobuf/proto/testdata" +import test_proto "github.com/golang/protobuf/proto/test_proto" +import any "github.com/golang/protobuf/ptypes/any" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -59,33 +45,58 @@ var Message_Humour_value = map[string]int32{ func (x Message_Humour) String() string { return proto.EnumName(Message_Humour_name, int32(x)) } -func (Message_Humour) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } - -type Message struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"` - HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm" json:"height_in_cm,omitempty"` - Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` - ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount" json:"result_count,omitempty"` - TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman" json:"true_scotsman,omitempty"` - Score float32 `protobuf:"fixed32,9,opt,name=score" json:"score,omitempty"` - Key []uint64 `protobuf:"varint,5,rep,packed,name=key" json:"key,omitempty"` - ShortKey []int32 `protobuf:"varint,19,rep,packed,name=short_key,json=shortKey" json:"short_key,omitempty"` - Nested *Nested `protobuf:"bytes,6,opt,name=nested" json:"nested,omitempty"` - RFunny []Message_Humour `protobuf:"varint,16,rep,packed,name=r_funny,json=rFunny,enum=proto3_proto.Message_Humour" json:"r_funny,omitempty"` - Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Proto2Field *testdata.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field" json:"proto2_field,omitempty"` - Proto2Value map[string]*testdata.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Anything *google_protobuf.Any `protobuf:"bytes,14,opt,name=anything" json:"anything,omitempty"` - ManyThings []*google_protobuf.Any `protobuf:"bytes,15,rep,name=many_things,json=manyThings" json:"many_things,omitempty"` - Submessage *Message `protobuf:"bytes,17,opt,name=submessage" json:"submessage,omitempty"` - Children []*Message `protobuf:"bytes,18,rep,name=children" json:"children,omitempty"` +func (Message_Humour) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{0, 0} } -func (m *Message) Reset() { *m = Message{} } -func (m *Message) String() string { return proto.CompactTextString(m) } -func (*Message) ProtoMessage() {} -func (*Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +type Message struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"` + HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm" json:"height_in_cm,omitempty"` + Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` + ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount" json:"result_count,omitempty"` + TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman" json:"true_scotsman,omitempty"` + Score float32 `protobuf:"fixed32,9,opt,name=score" json:"score,omitempty"` + Key []uint64 `protobuf:"varint,5,rep,packed,name=key" json:"key,omitempty"` + ShortKey []int32 `protobuf:"varint,19,rep,packed,name=short_key,json=shortKey" json:"short_key,omitempty"` + Nested *Nested `protobuf:"bytes,6,opt,name=nested" json:"nested,omitempty"` + RFunny []Message_Humour `protobuf:"varint,16,rep,packed,name=r_funny,json=rFunny,enum=proto3_proto.Message_Humour" json:"r_funny,omitempty"` + Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Proto2Field *test_proto.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field" json:"proto2_field,omitempty"` + Proto2Value map[string]*test_proto.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Anything *any.Any `protobuf:"bytes,14,opt,name=anything" json:"anything,omitempty"` + ManyThings []*any.Any `protobuf:"bytes,15,rep,name=many_things,json=manyThings" json:"many_things,omitempty"` + Submessage *Message `protobuf:"bytes,17,opt,name=submessage" json:"submessage,omitempty"` + Children []*Message `protobuf:"bytes,18,rep,name=children" json:"children,omitempty"` + StringMap map[string]string `protobuf:"bytes,20,rep,name=string_map,json=stringMap" json:"string_map,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Message) Reset() { *m = Message{} } +func (m *Message) String() string { return proto.CompactTextString(m) } +func (*Message) ProtoMessage() {} +func (*Message) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{0} +} +func (m *Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Message.Unmarshal(m, b) +} +func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Message.Marshal(b, m, deterministic) +} +func (dst *Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Message.Merge(dst, src) +} +func (m *Message) XXX_Size() int { + return xxx_messageInfo_Message.Size(m) +} +func (m *Message) XXX_DiscardUnknown() { + xxx_messageInfo_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Message proto.InternalMessageInfo func (m *Message) GetName() string { if m != nil { @@ -171,28 +182,28 @@ func (m *Message) GetTerrain() map[string]*Nested { return nil } -func (m *Message) GetProto2Field() *testdata.SubDefaults { +func (m *Message) GetProto2Field() *test_proto.SubDefaults { if m != nil { return m.Proto2Field } return nil } -func (m *Message) GetProto2Value() map[string]*testdata.SubDefaults { +func (m *Message) GetProto2Value() map[string]*test_proto.SubDefaults { if m != nil { return m.Proto2Value } return nil } -func (m *Message) GetAnything() *google_protobuf.Any { +func (m *Message) GetAnything() *any.Any { if m != nil { return m.Anything } return nil } -func (m *Message) GetManyThings() []*google_protobuf.Any { +func (m *Message) GetManyThings() []*any.Any { if m != nil { return m.ManyThings } @@ -213,15 +224,44 @@ func (m *Message) GetChildren() []*Message { return nil } -type Nested struct { - Bunny string `protobuf:"bytes,1,opt,name=bunny" json:"bunny,omitempty"` - Cute bool `protobuf:"varint,2,opt,name=cute" json:"cute,omitempty"` +func (m *Message) GetStringMap() map[string]string { + if m != nil { + return m.StringMap + } + return nil } -func (m *Nested) Reset() { *m = Nested{} } -func (m *Nested) String() string { return proto.CompactTextString(m) } -func (*Nested) ProtoMessage() {} -func (*Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +type Nested struct { + Bunny string `protobuf:"bytes,1,opt,name=bunny" json:"bunny,omitempty"` + Cute bool `protobuf:"varint,2,opt,name=cute" json:"cute,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Nested) Reset() { *m = Nested{} } +func (m *Nested) String() string { return proto.CompactTextString(m) } +func (*Nested) ProtoMessage() {} +func (*Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{1} +} +func (m *Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Nested.Unmarshal(m, b) +} +func (m *Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Nested.Marshal(b, m, deterministic) +} +func (dst *Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_Nested.Merge(dst, src) +} +func (m *Nested) XXX_Size() int { + return xxx_messageInfo_Nested.Size(m) +} +func (m *Nested) XXX_DiscardUnknown() { + xxx_messageInfo_Nested.DiscardUnknown(m) +} + +var xxx_messageInfo_Nested proto.InternalMessageInfo func (m *Nested) GetBunny() string { if m != nil { @@ -238,13 +278,35 @@ func (m *Nested) GetCute() bool { } type MessageWithMap struct { - ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` + ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } -func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } -func (*MessageWithMap) ProtoMessage() {} -func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{2} +} +func (m *MessageWithMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageWithMap.Unmarshal(m, b) +} +func (m *MessageWithMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageWithMap.Marshal(b, m, deterministic) +} +func (dst *MessageWithMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageWithMap.Merge(dst, src) +} +func (m *MessageWithMap) XXX_Size() int { + return xxx_messageInfo_MessageWithMap.Size(m) +} +func (m *MessageWithMap) XXX_DiscardUnknown() { + xxx_messageInfo_MessageWithMap.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageWithMap proto.InternalMessageInfo func (m *MessageWithMap) GetByteMapping() map[bool][]byte { if m != nil { @@ -254,13 +316,35 @@ func (m *MessageWithMap) GetByteMapping() map[bool][]byte { } type IntMap struct { - Rtt map[int32]int32 `protobuf:"bytes,1,rep,name=rtt" json:"rtt,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Rtt map[int32]int32 `protobuf:"bytes,1,rep,name=rtt" json:"rtt,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *IntMap) Reset() { *m = IntMap{} } -func (m *IntMap) String() string { return proto.CompactTextString(m) } -func (*IntMap) ProtoMessage() {} -func (*IntMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *IntMap) Reset() { *m = IntMap{} } +func (m *IntMap) String() string { return proto.CompactTextString(m) } +func (*IntMap) ProtoMessage() {} +func (*IntMap) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{3} +} +func (m *IntMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntMap.Unmarshal(m, b) +} +func (m *IntMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntMap.Marshal(b, m, deterministic) +} +func (dst *IntMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntMap.Merge(dst, src) +} +func (m *IntMap) XXX_Size() int { + return xxx_messageInfo_IntMap.Size(m) +} +func (m *IntMap) XXX_DiscardUnknown() { + xxx_messageInfo_IntMap.DiscardUnknown(m) +} + +var xxx_messageInfo_IntMap proto.InternalMessageInfo func (m *IntMap) GetRtt() map[int32]int32 { if m != nil { @@ -270,13 +354,35 @@ func (m *IntMap) GetRtt() map[int32]int32 { } type IntMaps struct { - Maps []*IntMap `protobuf:"bytes,1,rep,name=maps" json:"maps,omitempty"` + Maps []*IntMap `protobuf:"bytes,1,rep,name=maps" json:"maps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *IntMaps) Reset() { *m = IntMaps{} } -func (m *IntMaps) String() string { return proto.CompactTextString(m) } -func (*IntMaps) ProtoMessage() {} -func (*IntMaps) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (m *IntMaps) Reset() { *m = IntMaps{} } +func (m *IntMaps) String() string { return proto.CompactTextString(m) } +func (*IntMaps) ProtoMessage() {} +func (*IntMaps) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_e706e4ff19a5dbea, []int{4} +} +func (m *IntMaps) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntMaps.Unmarshal(m, b) +} +func (m *IntMaps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntMaps.Marshal(b, m, deterministic) +} +func (dst *IntMaps) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntMaps.Merge(dst, src) +} +func (m *IntMaps) XXX_Size() int { + return xxx_messageInfo_IntMaps.Size(m) +} +func (m *IntMaps) XXX_DiscardUnknown() { + xxx_messageInfo_IntMaps.DiscardUnknown(m) +} + +var xxx_messageInfo_IntMaps proto.InternalMessageInfo func (m *IntMaps) GetMaps() []*IntMap { if m != nil { @@ -287,61 +393,69 @@ func (m *IntMaps) GetMaps() []*IntMap { func init() { proto.RegisterType((*Message)(nil), "proto3_proto.Message") + proto.RegisterMapType((map[string]*test_proto.SubDefaults)(nil), "proto3_proto.Message.Proto2ValueEntry") + proto.RegisterMapType((map[string]string)(nil), "proto3_proto.Message.StringMapEntry") + proto.RegisterMapType((map[string]*Nested)(nil), "proto3_proto.Message.TerrainEntry") proto.RegisterType((*Nested)(nil), "proto3_proto.Nested") proto.RegisterType((*MessageWithMap)(nil), "proto3_proto.MessageWithMap") + proto.RegisterMapType((map[bool][]byte)(nil), "proto3_proto.MessageWithMap.ByteMappingEntry") proto.RegisterType((*IntMap)(nil), "proto3_proto.IntMap") + proto.RegisterMapType((map[int32]int32)(nil), "proto3_proto.IntMap.RttEntry") proto.RegisterType((*IntMaps)(nil), "proto3_proto.IntMaps") proto.RegisterEnum("proto3_proto.Message_Humour", Message_Humour_name, Message_Humour_value) } -func init() { proto.RegisterFile("proto3_proto/proto3.proto", fileDescriptor0) } +func init() { proto.RegisterFile("proto3_proto/proto3.proto", fileDescriptor_proto3_e706e4ff19a5dbea) } -var fileDescriptor0 = []byte{ - // 733 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x84, 0x53, 0x6d, 0x6f, 0xf3, 0x34, - 0x14, 0x25, 0x4d, 0x5f, 0xd2, 0x9b, 0x74, 0x0b, 0x5e, 0x91, 0xbc, 0x02, 0x52, 0x28, 0x12, 0x8a, - 0x78, 0x49, 0xa1, 0xd3, 0xd0, 0x84, 0x10, 0x68, 0x1b, 0x9b, 0xa8, 0xd6, 0x95, 0xca, 0xdd, 0x98, - 0xf8, 0x14, 0xa5, 0xad, 0xdb, 0x46, 0x34, 0x4e, 0x49, 0x1c, 0xa4, 0xfc, 0x1d, 0xfe, 0x28, 0x8f, - 0x6c, 0xa7, 0x5d, 0x36, 0x65, 0xcf, 0xf3, 0x29, 0xf6, 0xf1, 0xb9, 0xf7, 0x9c, 0x1c, 0x5f, 0xc3, - 0xe9, 0x2e, 0x89, 0x79, 0x7c, 0xe6, 0xcb, 0xcf, 0x40, 0x6d, 0x3c, 0xf9, 0x41, 0x56, 0xf9, 0xa8, - 0x77, 0xba, 0x8e, 0xe3, 0xf5, 0x96, 0x2a, 0xca, 0x3c, 0x5b, 0x0d, 0x02, 0x96, 0x2b, 0x62, 0xef, - 0x84, 0xd3, 0x94, 0x2f, 0x03, 0x1e, 0x0c, 0xc4, 0x42, 0x81, 0xfd, 0xff, 0x5b, 0xd0, 0xba, 0xa7, - 0x69, 0x1a, 0xac, 0x29, 0x42, 0x50, 0x67, 0x41, 0x44, 0xb1, 0xe6, 0x68, 0x6e, 0x9b, 0xc8, 0x35, - 0xba, 0x00, 0x63, 0x13, 0x6e, 0x83, 0x24, 0xe4, 0x39, 0xae, 0x39, 0x9a, 0x7b, 0x34, 0xfc, 0xcc, - 0x2b, 0x0b, 0x7a, 0x45, 0xb1, 0xf7, 0x7b, 0x16, 0xc5, 0x59, 0x42, 0x0e, 0x6c, 0xe4, 0x80, 0xb5, - 0xa1, 0xe1, 0x7a, 0xc3, 0xfd, 0x90, 0xf9, 0x8b, 0x08, 0xeb, 0x8e, 0xe6, 0x76, 0x08, 0x28, 0x6c, - 0xc4, 0xae, 0x23, 0xa1, 0x27, 0xec, 0xe0, 0xba, 0xa3, 0xb9, 0x16, 0x91, 0x6b, 0xf4, 0x05, 0x58, - 0x09, 0x4d, 0xb3, 0x2d, 0xf7, 0x17, 0x71, 0xc6, 0x38, 0x6e, 0x39, 0x9a, 0xab, 0x13, 0x53, 0x61, - 0xd7, 0x02, 0x42, 0x5f, 0x42, 0x87, 0x27, 0x19, 0xf5, 0xd3, 0x45, 0xcc, 0xd3, 0x28, 0x60, 0xd8, - 0x70, 0x34, 0xd7, 0x20, 0x96, 0x00, 0x67, 0x05, 0x86, 0xba, 0xd0, 0x48, 0x17, 0x71, 0x42, 0x71, - 0xdb, 0xd1, 0xdc, 0x1a, 0x51, 0x1b, 0x64, 0x83, 0xfe, 0x37, 0xcd, 0x71, 0xc3, 0xd1, 0xdd, 0x3a, - 0x11, 0x4b, 0xf4, 0x29, 0xb4, 0xd3, 0x4d, 0x9c, 0x70, 0x5f, 0xe0, 0x27, 0x8e, 0xee, 0x36, 0x88, - 0x21, 0x81, 0x3b, 0x9a, 0xa3, 0x6f, 0xa1, 0xc9, 0x68, 0xca, 0xe9, 0x12, 0x37, 0x1d, 0xcd, 0x35, - 0x87, 0xdd, 0x97, 0xbf, 0x3e, 0x91, 0x67, 0xa4, 0xe0, 0xa0, 0x73, 0x68, 0x25, 0xfe, 0x2a, 0x63, - 0x2c, 0xc7, 0xb6, 0xa3, 0x7f, 0x30, 0xa9, 0x66, 0x72, 0x2b, 0xb8, 0xe8, 0x67, 0x68, 0x71, 0x9a, - 0x24, 0x41, 0xc8, 0x30, 0x38, 0xba, 0x6b, 0x0e, 0xfb, 0xd5, 0x65, 0x0f, 0x8a, 0x74, 0xc3, 0x78, - 0x92, 0x93, 0x7d, 0x09, 0xba, 0x00, 0x75, 0xff, 0x43, 0x7f, 0x15, 0xd2, 0xed, 0x12, 0x9b, 0xd2, - 0xe8, 0x27, 0xde, 0xfe, 0xae, 0xbd, 0x59, 0x36, 0xff, 0x8d, 0xae, 0x82, 0x6c, 0xcb, 0x53, 0x62, - 0x2a, 0xea, 0xad, 0x60, 0xa2, 0xd1, 0xa1, 0xf2, 0xdf, 0x60, 0x9b, 0x51, 0xdc, 0x91, 0xe2, 0x5f, - 0x55, 0x8b, 0x4f, 0x25, 0xf3, 0x4f, 0x41, 0x54, 0x06, 0x8a, 0x56, 0x12, 0x41, 0xdf, 0x83, 0x11, - 0xb0, 0x9c, 0x6f, 0x42, 0xb6, 0xc6, 0x47, 0x45, 0x52, 0x6a, 0x0e, 0xbd, 0xfd, 0x1c, 0x7a, 0x97, - 0x2c, 0x27, 0x07, 0x16, 0x3a, 0x07, 0x33, 0x0a, 0x58, 0xee, 0xcb, 0x5d, 0x8a, 0x8f, 0xa5, 0x76, - 0x75, 0x11, 0x08, 0xe2, 0x83, 0xe4, 0xa1, 0x73, 0x80, 0x34, 0x9b, 0x47, 0xca, 0x14, 0xfe, 0xb8, - 0xf8, 0xd7, 0x2a, 0xc7, 0xa4, 0x44, 0x44, 0x3f, 0x80, 0xb1, 0xd8, 0x84, 0xdb, 0x65, 0x42, 0x19, - 0x46, 0x52, 0xea, 0x8d, 0xa2, 0x03, 0xad, 0x37, 0x05, 0xab, 0x1c, 0xf8, 0x7e, 0x72, 0xd4, 0xd3, - 0x90, 0x93, 0xf3, 0x35, 0x34, 0x54, 0x70, 0xb5, 0xf7, 0xcc, 0x86, 0xa2, 0xfc, 0x54, 0xbb, 0xd0, - 0x7a, 0x8f, 0x60, 0xbf, 0x4e, 0xb1, 0xa2, 0xeb, 0x37, 0x2f, 0xbb, 0xbe, 0x71, 0x91, 0xcf, 0x6d, - 0xfb, 0xbf, 0x42, 0x53, 0x0d, 0x14, 0x32, 0xa1, 0xf5, 0x38, 0xb9, 0x9b, 0xfc, 0xf1, 0x34, 0xb1, - 0x3f, 0x42, 0x06, 0xd4, 0xa7, 0x8f, 0x93, 0x99, 0xad, 0xa1, 0x0e, 0xb4, 0x67, 0xe3, 0xcb, 0xe9, - 0xec, 0x61, 0x74, 0x7d, 0x67, 0xd7, 0xd0, 0x31, 0x98, 0x57, 0xa3, 0xf1, 0xd8, 0xbf, 0xba, 0x1c, - 0x8d, 0x6f, 0xfe, 0xb2, 0xf5, 0xfe, 0x10, 0x9a, 0xca, 0xac, 0x78, 0x33, 0x73, 0x39, 0xbe, 0xca, - 0x8f, 0xda, 0x88, 0x57, 0xba, 0xc8, 0xb8, 0x32, 0x64, 0x10, 0xb9, 0xee, 0xff, 0xa7, 0xc1, 0x51, - 0x91, 0xd9, 0x53, 0xc8, 0x37, 0xf7, 0xc1, 0x0e, 0x4d, 0xc1, 0x9a, 0xe7, 0x9c, 0xfa, 0x51, 0xb0, - 0xdb, 0x89, 0x39, 0xd0, 0x64, 0xce, 0xdf, 0x55, 0xe6, 0x5c, 0xd4, 0x78, 0x57, 0x39, 0xa7, 0xf7, - 0x8a, 0x5f, 0x4c, 0xd5, 0xfc, 0x19, 0xe9, 0xfd, 0x02, 0xf6, 0x6b, 0x42, 0x39, 0x30, 0x43, 0x05, - 0xd6, 0x2d, 0x07, 0x66, 0x95, 0x93, 0xf9, 0x07, 0x9a, 0x23, 0xc6, 0x85, 0xb7, 0x01, 0xe8, 0x09, - 0xe7, 0x85, 0xa5, 0xcf, 0x5f, 0x5a, 0x52, 0x14, 0x8f, 0x70, 0xae, 0x2c, 0x08, 0x66, 0xef, 0x47, - 0x30, 0xf6, 0x40, 0x59, 0xb2, 0x51, 0x21, 0xd9, 0x28, 0x4b, 0x9e, 0x41, 0x4b, 0xf5, 0x4b, 0x91, - 0x0b, 0xf5, 0x28, 0xd8, 0xa5, 0x85, 0x68, 0xb7, 0x4a, 0x94, 0x48, 0xc6, 0xbc, 0xa9, 0x8e, 0xde, - 0x05, 0x00, 0x00, 0xff, 0xff, 0x75, 0x38, 0xad, 0x84, 0xe4, 0x05, 0x00, 0x00, +var fileDescriptor_proto3_e706e4ff19a5dbea = []byte{ + // 774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0x6f, 0x8f, 0xdb, 0x44, + 0x10, 0xc6, 0x71, 0x9c, 0x3f, 0xce, 0xd8, 0x77, 0x35, 0x4b, 0x2a, 0xb6, 0x01, 0x24, 0x13, 0x10, + 0xb2, 0x10, 0xf5, 0x41, 0xaa, 0x43, 0x55, 0x55, 0x81, 0xee, 0x8e, 0x56, 0x44, 0x77, 0x17, 0xa2, + 0xcd, 0x95, 0x13, 0xaf, 0xac, 0x4d, 0x6e, 0x93, 0x58, 0xc4, 0xeb, 0xe0, 0x5d, 0x23, 0xf9, 0x0b, + 0xf0, 0x41, 0xf8, 0xa4, 0x68, 0x77, 0x9d, 0xd4, 0xa9, 0x5c, 0xfa, 0x2a, 0xbb, 0x8f, 0x7f, 0x33, + 0xcf, 0x78, 0x66, 0x1c, 0x78, 0xb2, 0xcb, 0x33, 0x99, 0x3d, 0x8b, 0xf5, 0xcf, 0x99, 0xb9, 0x44, + 0xfa, 0x07, 0x79, 0xf5, 0x47, 0xc3, 0x27, 0xeb, 0x2c, 0x5b, 0x6f, 0x99, 0x41, 0x16, 0xc5, 0xea, + 0x8c, 0xf2, 0xd2, 0x80, 0xc3, 0xc7, 0x92, 0x09, 0x59, 0x65, 0x50, 0x47, 0x23, 0x8f, 0xfe, 0xe9, + 0x43, 0xef, 0x96, 0x09, 0x41, 0xd7, 0x0c, 0x21, 0x68, 0x73, 0x9a, 0x32, 0x6c, 0x05, 0x56, 0xd8, + 0x27, 0xfa, 0x8c, 0x9e, 0x83, 0xb3, 0x49, 0xb6, 0x34, 0x4f, 0x64, 0x89, 0x5b, 0x81, 0x15, 0x9e, + 0x8e, 0x3f, 0x8f, 0xea, 0x96, 0x51, 0x15, 0x1c, 0xfd, 0x5a, 0xa4, 0x59, 0x91, 0x93, 0x03, 0x8d, + 0x02, 0xf0, 0x36, 0x2c, 0x59, 0x6f, 0x64, 0x9c, 0xf0, 0x78, 0x99, 0x62, 0x3b, 0xb0, 0xc2, 0x13, + 0x02, 0x46, 0x9b, 0xf0, 0xab, 0x54, 0xf9, 0x3d, 0x50, 0x49, 0x71, 0x3b, 0xb0, 0x42, 0x8f, 0xe8, + 0x33, 0xfa, 0x12, 0xbc, 0x9c, 0x89, 0x62, 0x2b, 0xe3, 0x65, 0x56, 0x70, 0x89, 0x7b, 0x81, 0x15, + 0xda, 0xc4, 0x35, 0xda, 0x95, 0x92, 0xd0, 0x57, 0x70, 0x22, 0xf3, 0x82, 0xc5, 0x62, 0x99, 0x49, + 0x91, 0x52, 0x8e, 0x9d, 0xc0, 0x0a, 0x1d, 0xe2, 0x29, 0x71, 0x5e, 0x69, 0x68, 0x00, 0x1d, 0xb1, + 0xcc, 0x72, 0x86, 0xfb, 0x81, 0x15, 0xb6, 0x88, 0xb9, 0x20, 0x1f, 0xec, 0x3f, 0x59, 0x89, 0x3b, + 0x81, 0x1d, 0xb6, 0x89, 0x3a, 0xa2, 0xcf, 0xa0, 0x2f, 0x36, 0x59, 0x2e, 0x63, 0xa5, 0x7f, 0x12, + 0xd8, 0x61, 0x87, 0x38, 0x5a, 0xb8, 0x66, 0x25, 0xfa, 0x0e, 0xba, 0x9c, 0x09, 0xc9, 0x1e, 0x70, + 0x37, 0xb0, 0x42, 0x77, 0x3c, 0x38, 0x7e, 0xf5, 0xa9, 0x7e, 0x46, 0x2a, 0x06, 0x9d, 0x43, 0x2f, + 0x8f, 0x57, 0x05, 0xe7, 0x25, 0xf6, 0x03, 0xfb, 0x83, 0x9d, 0xea, 0xe6, 0xaf, 0x15, 0x8b, 0x5e, + 0x42, 0x4f, 0xb2, 0x3c, 0xa7, 0x09, 0xc7, 0x10, 0xd8, 0xa1, 0x3b, 0x1e, 0x35, 0x87, 0xdd, 0x19, + 0xe8, 0x15, 0x97, 0x79, 0x49, 0xf6, 0x21, 0xe8, 0x05, 0x98, 0x0d, 0x18, 0xc7, 0xab, 0x84, 0x6d, + 0x1f, 0xb0, 0xab, 0x0b, 0xfd, 0x34, 0x7a, 0x3b, 0xed, 0x68, 0x5e, 0x2c, 0x7e, 0x61, 0x2b, 0x5a, + 0x6c, 0xa5, 0x20, 0xae, 0x81, 0x5f, 0x2b, 0x16, 0x4d, 0x0e, 0xb1, 0x7f, 0xd3, 0x6d, 0xc1, 0xf0, + 0x89, 0xb6, 0xff, 0xa6, 0xd9, 0x7e, 0xa6, 0xc9, 0xdf, 0x15, 0x68, 0x4a, 0xa8, 0x52, 0x69, 0x05, + 0x7d, 0x0f, 0x0e, 0xe5, 0xa5, 0xdc, 0x24, 0x7c, 0x8d, 0x4f, 0xab, 0x5e, 0x99, 0x5d, 0x8c, 0xf6, + 0xbb, 0x18, 0x5d, 0xf0, 0x92, 0x1c, 0x28, 0x74, 0x0e, 0x6e, 0x4a, 0x79, 0x19, 0xeb, 0x9b, 0xc0, + 0x8f, 0xb4, 0x77, 0x73, 0x10, 0x28, 0xf0, 0x4e, 0x73, 0xe8, 0x1c, 0x40, 0x14, 0x8b, 0xd4, 0x14, + 0x85, 0x3f, 0xd6, 0x56, 0x8f, 0x1b, 0x2b, 0x26, 0x35, 0x10, 0xfd, 0x00, 0xce, 0x72, 0x93, 0x6c, + 0x1f, 0x72, 0xc6, 0x31, 0xd2, 0x56, 0xef, 0x09, 0x3a, 0x60, 0xe8, 0x0a, 0x40, 0xc8, 0x3c, 0xe1, + 0xeb, 0x38, 0xa5, 0x3b, 0x3c, 0xd0, 0x41, 0x5f, 0x37, 0xf7, 0x66, 0xae, 0xb9, 0x5b, 0xba, 0x33, + 0x9d, 0xe9, 0x8b, 0xfd, 0x7d, 0x38, 0x03, 0xaf, 0x3e, 0xb7, 0xfd, 0x02, 0x9a, 0x2f, 0x4c, 0x2f, + 0xe0, 0xb7, 0xd0, 0x31, 0xdd, 0x6f, 0xfd, 0xcf, 0x8a, 0x19, 0xe4, 0x45, 0xeb, 0xb9, 0x35, 0xbc, + 0x07, 0xff, 0xdd, 0x51, 0x34, 0x64, 0x7d, 0x7a, 0x9c, 0xf5, 0xbd, 0xfb, 0x50, 0x4b, 0xfc, 0x12, + 0x4e, 0x8f, 0xdf, 0xa3, 0x21, 0xed, 0xa0, 0x9e, 0xb6, 0x5f, 0x8b, 0x1e, 0xfd, 0x0c, 0x5d, 0xb3, + 0xd7, 0xc8, 0x85, 0xde, 0x9b, 0xe9, 0xf5, 0xf4, 0xb7, 0xfb, 0xa9, 0xff, 0x11, 0x72, 0xa0, 0x3d, + 0x7b, 0x33, 0x9d, 0xfb, 0x16, 0x3a, 0x81, 0xfe, 0xfc, 0xe6, 0x62, 0x36, 0xbf, 0x9b, 0x5c, 0x5d, + 0xfb, 0x2d, 0xf4, 0x08, 0xdc, 0xcb, 0xc9, 0xcd, 0x4d, 0x7c, 0x79, 0x31, 0xb9, 0x79, 0xf5, 0x87, + 0x6f, 0x8f, 0xc6, 0xd0, 0x35, 0x2f, 0xab, 0x4c, 0x16, 0xfa, 0x2b, 0x32, 0xc6, 0xe6, 0xa2, 0xfe, + 0x2c, 0x96, 0x85, 0x34, 0xce, 0x0e, 0xd1, 0xe7, 0xd1, 0xbf, 0x16, 0x9c, 0x56, 0x33, 0xb8, 0x4f, + 0xe4, 0xe6, 0x96, 0xee, 0xd0, 0x0c, 0xbc, 0x45, 0x29, 0x99, 0x9a, 0xd9, 0x4e, 0x2d, 0xa3, 0xa5, + 0xe7, 0xf6, 0xb4, 0x71, 0x6e, 0x55, 0x4c, 0x74, 0x59, 0x4a, 0x76, 0x6b, 0xf8, 0x6a, 0xb5, 0x17, + 0x6f, 0x95, 0xe1, 0x4f, 0xe0, 0xbf, 0x0b, 0xd4, 0x3b, 0xe3, 0x34, 0x74, 0xc6, 0xab, 0x77, 0xe6, + 0x2f, 0xe8, 0x4e, 0xb8, 0x54, 0xb5, 0x9d, 0x81, 0x9d, 0x4b, 0x59, 0x95, 0xf4, 0xc5, 0x71, 0x49, + 0x06, 0x89, 0x88, 0x94, 0xa6, 0x04, 0x45, 0x0e, 0x7f, 0x04, 0x67, 0x2f, 0xd4, 0x2d, 0x3b, 0x0d, + 0x96, 0x9d, 0xba, 0xe5, 0x33, 0xe8, 0x99, 0x7c, 0x02, 0x85, 0xd0, 0x4e, 0xe9, 0x4e, 0x54, 0xa6, + 0x83, 0x26, 0x53, 0xa2, 0x89, 0x45, 0xd7, 0x3c, 0xfa, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x99, 0x24, + 0x6b, 0x12, 0x6d, 0x06, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto index 20486557..c81fe1e5 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto @@ -32,7 +32,7 @@ syntax = "proto3"; import "google/protobuf/any.proto"; -import "testdata/test.proto"; +import "test_proto/test.proto"; package proto3_proto; @@ -58,14 +58,16 @@ message Message { repeated Humour r_funny = 16; map terrain = 10; - testdata.SubDefaults proto2_field = 11; - map proto2_value = 13; + test_proto.SubDefaults proto2_field = 11; + map proto2_value = 13; google.protobuf.Any anything = 14; repeated google.protobuf.Any many_things = 15; Message submessage = 17; repeated Message children = 18; + + map string_map = 20; } message Nested { diff --git a/vendor/github.com/golang/protobuf/proto/proto3_test.go b/vendor/github.com/golang/protobuf/proto/proto3_test.go index 735837f2..73eed6c0 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_test.go +++ b/vendor/github.com/golang/protobuf/proto/proto3_test.go @@ -32,11 +32,12 @@ package proto_test import ( + "bytes" "testing" "github.com/golang/protobuf/proto" pb "github.com/golang/protobuf/proto/proto3_proto" - tpb "github.com/golang/protobuf/proto/testdata" + tpb "github.com/golang/protobuf/proto/test_proto" ) func TestProto3ZeroValues(t *testing.T) { @@ -133,3 +134,18 @@ func TestProto3SetDefaults(t *testing.T) { t.Errorf("with in = %v\nproto.SetDefaults(in) =>\ngot %v\nwant %v", in, got, want) } } + +func TestUnknownFieldPreservation(t *testing.T) { + b1 := "\x0a\x05David" // Known tag 1 + b2 := "\xc2\x0c\x06Google" // Unknown tag 200 + b := []byte(b1 + b2) + + m := new(pb.Message) + if err := proto.Unmarshal(b, m); err != nil { + t.Fatalf("proto.Unmarshal: %v", err) + } + + if !bytes.Equal(m.XXX_unrecognized, []byte(b2)) { + t.Fatalf("mismatching unknown fields:\ngot %q\nwant %q", m.XXX_unrecognized, b2) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/size2_test.go b/vendor/github.com/golang/protobuf/proto/size2_test.go index a2729c39..7846b061 100644 --- a/vendor/github.com/golang/protobuf/proto/size2_test.go +++ b/vendor/github.com/golang/protobuf/proto/size2_test.go @@ -55,7 +55,7 @@ func TestVarintSize(t *testing.T) { {1 << 63, 10}, } for _, tc := range testCases { - size := sizeVarint(tc.n) + size := SizeVarint(tc.n) if size != tc.size { t.Errorf("sizeVarint(%d) = %d, want %d", tc.n, size, tc.size) } diff --git a/vendor/github.com/golang/protobuf/proto/size_test.go b/vendor/github.com/golang/protobuf/proto/size_test.go index af1034dc..3abac418 100644 --- a/vendor/github.com/golang/protobuf/proto/size_test.go +++ b/vendor/github.com/golang/protobuf/proto/size_test.go @@ -38,7 +38,7 @@ import ( . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) var messageWithExtension1 = &pb.MyMessage{Count: Int32(7)} @@ -59,6 +59,30 @@ func init() { } +// non-pointer custom message +type nonptrMessage struct{} + +func (m nonptrMessage) ProtoMessage() {} +func (m nonptrMessage) Reset() {} +func (m nonptrMessage) String() string { return "" } + +func (m nonptrMessage) Marshal() ([]byte, error) { + return []byte{42}, nil +} + +// custom message embedding a proto.Message +type messageWithEmbedding struct { + *pb.OtherMessage +} + +func (m *messageWithEmbedding) ProtoMessage() {} +func (m *messageWithEmbedding) Reset() {} +func (m *messageWithEmbedding) String() string { return "" } + +func (m *messageWithEmbedding) Marshal() ([]byte, error) { + return []byte{42}, nil +} + var SizeTests = []struct { desc string pb Message @@ -146,6 +170,9 @@ var SizeTests = []struct { {"oneof group", &pb.Oneof{Union: &pb.Oneof_FGroup{&pb.Oneof_F_Group{X: Int32(52)}}}}, {"oneof largest tag", &pb.Oneof{Union: &pb.Oneof_F_Largest_Tag{1}}}, {"multiple oneofs", &pb.Oneof{Union: &pb.Oneof_F_Int32{1}, Tormato: &pb.Oneof_Value{2}}}, + + {"non-pointer message", nonptrMessage{}}, + {"custom message with embedding", &messageWithEmbedding{&pb.OtherMessage{}}}, } func TestSize(t *testing.T) { diff --git a/vendor/github.com/golang/protobuf/proto/table_marshal.go b/vendor/github.com/golang/protobuf/proto/table_marshal.go new file mode 100644 index 00000000..0f212b30 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_marshal.go @@ -0,0 +1,2681 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "errors" + "fmt" + "math" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "sync/atomic" + "unicode/utf8" +) + +// a sizer takes a pointer to a field and the size of its tag, computes the size of +// the encoded data. +type sizer func(pointer, int) int + +// a marshaler takes a byte slice, a pointer to a field, and its tag (in wire format), +// marshals the field to the end of the slice, returns the slice and error (if any). +type marshaler func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) + +// marshalInfo is the information used for marshaling a message. +type marshalInfo struct { + typ reflect.Type + fields []*marshalFieldInfo + unrecognized field // offset of XXX_unrecognized + extensions field // offset of XXX_InternalExtensions + v1extensions field // offset of XXX_extensions + sizecache field // offset of XXX_sizecache + initialized int32 // 0 -- only typ is set, 1 -- fully initialized + messageset bool // uses message set wire format + hasmarshaler bool // has custom marshaler + sync.RWMutex // protect extElems map, also for initialization + extElems map[int32]*marshalElemInfo // info of extension elements +} + +// marshalFieldInfo is the information used for marshaling a field of a message. +type marshalFieldInfo struct { + field field + wiretag uint64 // tag in wire format + tagsize int // size of tag in wire format + sizer sizer + marshaler marshaler + isPointer bool + required bool // field is required + name string // name of the field, for error reporting + oneofElems map[reflect.Type]*marshalElemInfo // info of oneof elements +} + +// marshalElemInfo is the information used for marshaling an extension or oneof element. +type marshalElemInfo struct { + wiretag uint64 // tag in wire format + tagsize int // size of tag in wire format + sizer sizer + marshaler marshaler + isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only) +} + +var ( + marshalInfoMap = map[reflect.Type]*marshalInfo{} + marshalInfoLock sync.Mutex +) + +// getMarshalInfo returns the information to marshal a given type of message. +// The info it returns may not necessarily initialized. +// t is the type of the message (NOT the pointer to it). +func getMarshalInfo(t reflect.Type) *marshalInfo { + marshalInfoLock.Lock() + u, ok := marshalInfoMap[t] + if !ok { + u = &marshalInfo{typ: t} + marshalInfoMap[t] = u + } + marshalInfoLock.Unlock() + return u +} + +// Size is the entry point from generated code, +// and should be ONLY called by generated code. +// It computes the size of encoded data of msg. +// a is a pointer to a place to store cached marshal info. +func (a *InternalMessageInfo) Size(msg Message) int { + u := getMessageMarshalInfo(msg, a) + ptr := toPointer(&msg) + if ptr.isNil() { + // We get here if msg is a typed nil ((*SomeMessage)(nil)), + // so it satisfies the interface, and msg == nil wouldn't + // catch it. We don't want crash in this case. + return 0 + } + return u.size(ptr) +} + +// Marshal is the entry point from generated code, +// and should be ONLY called by generated code. +// It marshals msg to the end of b. +// a is a pointer to a place to store cached marshal info. +func (a *InternalMessageInfo) Marshal(b []byte, msg Message, deterministic bool) ([]byte, error) { + u := getMessageMarshalInfo(msg, a) + ptr := toPointer(&msg) + if ptr.isNil() { + // We get here if msg is a typed nil ((*SomeMessage)(nil)), + // so it satisfies the interface, and msg == nil wouldn't + // catch it. We don't want crash in this case. + return b, ErrNil + } + return u.marshal(b, ptr, deterministic) +} + +func getMessageMarshalInfo(msg interface{}, a *InternalMessageInfo) *marshalInfo { + // u := a.marshal, but atomically. + // We use an atomic here to ensure memory consistency. + u := atomicLoadMarshalInfo(&a.marshal) + if u == nil { + // Get marshal information from type of message. + t := reflect.ValueOf(msg).Type() + if t.Kind() != reflect.Ptr { + panic(fmt.Sprintf("cannot handle non-pointer message type %v", t)) + } + u = getMarshalInfo(t.Elem()) + // Store it in the cache for later users. + // a.marshal = u, but atomically. + atomicStoreMarshalInfo(&a.marshal, u) + } + return u +} + +// size is the main function to compute the size of the encoded data of a message. +// ptr is the pointer to the message. +func (u *marshalInfo) size(ptr pointer) int { + if atomic.LoadInt32(&u.initialized) == 0 { + u.computeMarshalInfo() + } + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if u.hasmarshaler { + m := ptr.asPointerTo(u.typ).Interface().(Marshaler) + b, _ := m.Marshal() + return len(b) + } + + n := 0 + for _, f := range u.fields { + if f.isPointer && ptr.offset(f.field).getPointer().isNil() { + // nil pointer always marshals to nothing + continue + } + n += f.sizer(ptr.offset(f.field), f.tagsize) + } + if u.extensions.IsValid() { + e := ptr.offset(u.extensions).toExtensions() + if u.messageset { + n += u.sizeMessageSet(e) + } else { + n += u.sizeExtensions(e) + } + } + if u.v1extensions.IsValid() { + m := *ptr.offset(u.v1extensions).toOldExtensions() + n += u.sizeV1Extensions(m) + } + if u.unrecognized.IsValid() { + s := *ptr.offset(u.unrecognized).toBytes() + n += len(s) + } + // cache the result for use in marshal + if u.sizecache.IsValid() { + atomic.StoreInt32(ptr.offset(u.sizecache).toInt32(), int32(n)) + } + return n +} + +// cachedsize gets the size from cache. If there is no cache (i.e. message is not generated), +// fall back to compute the size. +func (u *marshalInfo) cachedsize(ptr pointer) int { + if u.sizecache.IsValid() { + return int(atomic.LoadInt32(ptr.offset(u.sizecache).toInt32())) + } + return u.size(ptr) +} + +// marshal is the main function to marshal a message. It takes a byte slice and appends +// the encoded data to the end of the slice, returns the slice and error (if any). +// ptr is the pointer to the message. +// If deterministic is true, map is marshaled in deterministic order. +func (u *marshalInfo) marshal(b []byte, ptr pointer, deterministic bool) ([]byte, error) { + if atomic.LoadInt32(&u.initialized) == 0 { + u.computeMarshalInfo() + } + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if u.hasmarshaler { + m := ptr.asPointerTo(u.typ).Interface().(Marshaler) + b1, err := m.Marshal() + b = append(b, b1...) + return b, err + } + + var err, errreq error + // The old marshaler encodes extensions at beginning. + if u.extensions.IsValid() { + e := ptr.offset(u.extensions).toExtensions() + if u.messageset { + b, err = u.appendMessageSet(b, e, deterministic) + } else { + b, err = u.appendExtensions(b, e, deterministic) + } + if err != nil { + return b, err + } + } + if u.v1extensions.IsValid() { + m := *ptr.offset(u.v1extensions).toOldExtensions() + b, err = u.appendV1Extensions(b, m, deterministic) + if err != nil { + return b, err + } + } + for _, f := range u.fields { + if f.required && errreq == nil { + if ptr.offset(f.field).getPointer().isNil() { + // Required field is not set. + // We record the error but keep going, to give a complete marshaling. + errreq = &RequiredNotSetError{f.name} + continue + } + } + if f.isPointer && ptr.offset(f.field).getPointer().isNil() { + // nil pointer always marshals to nothing + continue + } + b, err = f.marshaler(b, ptr.offset(f.field), f.wiretag, deterministic) + if err != nil { + if err1, ok := err.(*RequiredNotSetError); ok { + // Required field in submessage is not set. + // We record the error but keep going, to give a complete marshaling. + if errreq == nil { + errreq = &RequiredNotSetError{f.name + "." + err1.field} + } + continue + } + if err == errRepeatedHasNil { + err = errors.New("proto: repeated field " + f.name + " has nil element") + } + return b, err + } + } + if u.unrecognized.IsValid() { + s := *ptr.offset(u.unrecognized).toBytes() + b = append(b, s...) + } + return b, errreq +} + +// computeMarshalInfo initializes the marshal info. +func (u *marshalInfo) computeMarshalInfo() { + u.Lock() + defer u.Unlock() + if u.initialized != 0 { // non-atomic read is ok as it is protected by the lock + return + } + + t := u.typ + u.unrecognized = invalidField + u.extensions = invalidField + u.v1extensions = invalidField + u.sizecache = invalidField + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if reflect.PtrTo(t).Implements(marshalerType) { + u.hasmarshaler = true + atomic.StoreInt32(&u.initialized, 1) + return + } + + // get oneof implementers + var oneofImplementers []interface{} + if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { + _, _, _, oneofImplementers = m.XXX_OneofFuncs() + } + + n := t.NumField() + + // deal with XXX fields first + for i := 0; i < t.NumField(); i++ { + f := t.Field(i) + if !strings.HasPrefix(f.Name, "XXX_") { + continue + } + switch f.Name { + case "XXX_sizecache": + u.sizecache = toField(&f) + case "XXX_unrecognized": + u.unrecognized = toField(&f) + case "XXX_InternalExtensions": + u.extensions = toField(&f) + u.messageset = f.Tag.Get("protobuf_messageset") == "1" + case "XXX_extensions": + u.v1extensions = toField(&f) + case "XXX_NoUnkeyedLiteral": + // nothing to do + default: + panic("unknown XXX field: " + f.Name) + } + n-- + } + + // normal fields + fields := make([]marshalFieldInfo, n) // batch allocation + u.fields = make([]*marshalFieldInfo, 0, n) + for i, j := 0, 0; i < t.NumField(); i++ { + f := t.Field(i) + + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + field := &fields[j] + j++ + field.name = f.Name + u.fields = append(u.fields, field) + if f.Tag.Get("protobuf_oneof") != "" { + field.computeOneofFieldInfo(&f, oneofImplementers) + continue + } + if f.Tag.Get("protobuf") == "" { + // field has no tag (not in generated message), ignore it + u.fields = u.fields[:len(u.fields)-1] + j-- + continue + } + field.computeMarshalFieldInfo(&f) + } + + // fields are marshaled in tag order on the wire. + sort.Sort(byTag(u.fields)) + + atomic.StoreInt32(&u.initialized, 1) +} + +// helper for sorting fields by tag +type byTag []*marshalFieldInfo + +func (a byTag) Len() int { return len(a) } +func (a byTag) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byTag) Less(i, j int) bool { return a[i].wiretag < a[j].wiretag } + +// getExtElemInfo returns the information to marshal an extension element. +// The info it returns is initialized. +func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo { + // get from cache first + u.RLock() + e, ok := u.extElems[desc.Field] + u.RUnlock() + if ok { + return e + } + + t := reflect.TypeOf(desc.ExtensionType) // pointer or slice to basic type or struct + tags := strings.Split(desc.Tag, ",") + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + sizer, marshaler := typeMarshaler(t, tags, false, false) + e = &marshalElemInfo{ + wiretag: uint64(tag)<<3 | wt, + tagsize: SizeVarint(uint64(tag) << 3), + sizer: sizer, + marshaler: marshaler, + isptr: t.Kind() == reflect.Ptr, + } + + // update cache + u.Lock() + if u.extElems == nil { + u.extElems = make(map[int32]*marshalElemInfo) + } + u.extElems[desc.Field] = e + u.Unlock() + return e +} + +// computeMarshalFieldInfo fills up the information to marshal a field. +func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) { + // parse protobuf tag of the field. + // tag has format of "bytes,49,opt,name=foo,def=hello!" + tags := strings.Split(f.Tag.Get("protobuf"), ",") + if tags[0] == "" { + return + } + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + if tags[2] == "req" { + fi.required = true + } + fi.setTag(f, tag, wt) + fi.setMarshaler(f, tags) +} + +func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) { + fi.field = toField(f) + fi.wiretag = 1<<31 - 1 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire. + fi.isPointer = true + fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f) + fi.oneofElems = make(map[reflect.Type]*marshalElemInfo) + + ityp := f.Type // interface type + for _, o := range oneofImplementers { + t := reflect.TypeOf(o) + if !t.Implements(ityp) { + continue + } + sf := t.Elem().Field(0) // oneof implementer is a struct with a single field + tags := strings.Split(sf.Tag.Get("protobuf"), ",") + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + sizer, marshaler := typeMarshaler(sf.Type, tags, false, true) // oneof should not omit any zero value + fi.oneofElems[t.Elem()] = &marshalElemInfo{ + wiretag: uint64(tag)<<3 | wt, + tagsize: SizeVarint(uint64(tag) << 3), + sizer: sizer, + marshaler: marshaler, + } + } +} + +type oneofMessage interface { + XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) +} + +// wiretype returns the wire encoding of the type. +func wiretype(encoding string) uint64 { + switch encoding { + case "fixed32": + return WireFixed32 + case "fixed64": + return WireFixed64 + case "varint", "zigzag32", "zigzag64": + return WireVarint + case "bytes": + return WireBytes + case "group": + return WireStartGroup + } + panic("unknown wire type " + encoding) +} + +// setTag fills up the tag (in wire format) and its size in the info of a field. +func (fi *marshalFieldInfo) setTag(f *reflect.StructField, tag int, wt uint64) { + fi.field = toField(f) + fi.wiretag = uint64(tag)<<3 | wt + fi.tagsize = SizeVarint(uint64(tag) << 3) +} + +// setMarshaler fills up the sizer and marshaler in the info of a field. +func (fi *marshalFieldInfo) setMarshaler(f *reflect.StructField, tags []string) { + switch f.Type.Kind() { + case reflect.Map: + // map field + fi.isPointer = true + fi.sizer, fi.marshaler = makeMapMarshaler(f) + return + case reflect.Ptr, reflect.Slice: + fi.isPointer = true + } + fi.sizer, fi.marshaler = typeMarshaler(f.Type, tags, true, false) +} + +// typeMarshaler returns the sizer and marshaler of a given field. +// t is the type of the field. +// tags is the generated "protobuf" tag of the field. +// If nozero is true, zero value is not marshaled to the wire. +// If oneof is true, it is a oneof field. +func typeMarshaler(t reflect.Type, tags []string, nozero, oneof bool) (sizer, marshaler) { + encoding := tags[0] + + pointer := false + slice := false + if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { + slice = true + t = t.Elem() + } + if t.Kind() == reflect.Ptr { + pointer = true + t = t.Elem() + } + + packed := false + proto3 := false + for i := 2; i < len(tags); i++ { + if tags[i] == "packed" { + packed = true + } + if tags[i] == "proto3" { + proto3 = true + } + } + + switch t.Kind() { + case reflect.Bool: + if pointer { + return sizeBoolPtr, appendBoolPtr + } + if slice { + if packed { + return sizeBoolPackedSlice, appendBoolPackedSlice + } + return sizeBoolSlice, appendBoolSlice + } + if nozero { + return sizeBoolValueNoZero, appendBoolValueNoZero + } + return sizeBoolValue, appendBoolValue + case reflect.Uint32: + switch encoding { + case "fixed32": + if pointer { + return sizeFixed32Ptr, appendFixed32Ptr + } + if slice { + if packed { + return sizeFixed32PackedSlice, appendFixed32PackedSlice + } + return sizeFixed32Slice, appendFixed32Slice + } + if nozero { + return sizeFixed32ValueNoZero, appendFixed32ValueNoZero + } + return sizeFixed32Value, appendFixed32Value + case "varint": + if pointer { + return sizeVarint32Ptr, appendVarint32Ptr + } + if slice { + if packed { + return sizeVarint32PackedSlice, appendVarint32PackedSlice + } + return sizeVarint32Slice, appendVarint32Slice + } + if nozero { + return sizeVarint32ValueNoZero, appendVarint32ValueNoZero + } + return sizeVarint32Value, appendVarint32Value + } + case reflect.Int32: + switch encoding { + case "fixed32": + if pointer { + return sizeFixedS32Ptr, appendFixedS32Ptr + } + if slice { + if packed { + return sizeFixedS32PackedSlice, appendFixedS32PackedSlice + } + return sizeFixedS32Slice, appendFixedS32Slice + } + if nozero { + return sizeFixedS32ValueNoZero, appendFixedS32ValueNoZero + } + return sizeFixedS32Value, appendFixedS32Value + case "varint": + if pointer { + return sizeVarintS32Ptr, appendVarintS32Ptr + } + if slice { + if packed { + return sizeVarintS32PackedSlice, appendVarintS32PackedSlice + } + return sizeVarintS32Slice, appendVarintS32Slice + } + if nozero { + return sizeVarintS32ValueNoZero, appendVarintS32ValueNoZero + } + return sizeVarintS32Value, appendVarintS32Value + case "zigzag32": + if pointer { + return sizeZigzag32Ptr, appendZigzag32Ptr + } + if slice { + if packed { + return sizeZigzag32PackedSlice, appendZigzag32PackedSlice + } + return sizeZigzag32Slice, appendZigzag32Slice + } + if nozero { + return sizeZigzag32ValueNoZero, appendZigzag32ValueNoZero + } + return sizeZigzag32Value, appendZigzag32Value + } + case reflect.Uint64: + switch encoding { + case "fixed64": + if pointer { + return sizeFixed64Ptr, appendFixed64Ptr + } + if slice { + if packed { + return sizeFixed64PackedSlice, appendFixed64PackedSlice + } + return sizeFixed64Slice, appendFixed64Slice + } + if nozero { + return sizeFixed64ValueNoZero, appendFixed64ValueNoZero + } + return sizeFixed64Value, appendFixed64Value + case "varint": + if pointer { + return sizeVarint64Ptr, appendVarint64Ptr + } + if slice { + if packed { + return sizeVarint64PackedSlice, appendVarint64PackedSlice + } + return sizeVarint64Slice, appendVarint64Slice + } + if nozero { + return sizeVarint64ValueNoZero, appendVarint64ValueNoZero + } + return sizeVarint64Value, appendVarint64Value + } + case reflect.Int64: + switch encoding { + case "fixed64": + if pointer { + return sizeFixedS64Ptr, appendFixedS64Ptr + } + if slice { + if packed { + return sizeFixedS64PackedSlice, appendFixedS64PackedSlice + } + return sizeFixedS64Slice, appendFixedS64Slice + } + if nozero { + return sizeFixedS64ValueNoZero, appendFixedS64ValueNoZero + } + return sizeFixedS64Value, appendFixedS64Value + case "varint": + if pointer { + return sizeVarintS64Ptr, appendVarintS64Ptr + } + if slice { + if packed { + return sizeVarintS64PackedSlice, appendVarintS64PackedSlice + } + return sizeVarintS64Slice, appendVarintS64Slice + } + if nozero { + return sizeVarintS64ValueNoZero, appendVarintS64ValueNoZero + } + return sizeVarintS64Value, appendVarintS64Value + case "zigzag64": + if pointer { + return sizeZigzag64Ptr, appendZigzag64Ptr + } + if slice { + if packed { + return sizeZigzag64PackedSlice, appendZigzag64PackedSlice + } + return sizeZigzag64Slice, appendZigzag64Slice + } + if nozero { + return sizeZigzag64ValueNoZero, appendZigzag64ValueNoZero + } + return sizeZigzag64Value, appendZigzag64Value + } + case reflect.Float32: + if pointer { + return sizeFloat32Ptr, appendFloat32Ptr + } + if slice { + if packed { + return sizeFloat32PackedSlice, appendFloat32PackedSlice + } + return sizeFloat32Slice, appendFloat32Slice + } + if nozero { + return sizeFloat32ValueNoZero, appendFloat32ValueNoZero + } + return sizeFloat32Value, appendFloat32Value + case reflect.Float64: + if pointer { + return sizeFloat64Ptr, appendFloat64Ptr + } + if slice { + if packed { + return sizeFloat64PackedSlice, appendFloat64PackedSlice + } + return sizeFloat64Slice, appendFloat64Slice + } + if nozero { + return sizeFloat64ValueNoZero, appendFloat64ValueNoZero + } + return sizeFloat64Value, appendFloat64Value + case reflect.String: + if pointer { + return sizeStringPtr, appendStringPtr + } + if slice { + return sizeStringSlice, appendStringSlice + } + if nozero { + return sizeStringValueNoZero, appendStringValueNoZero + } + return sizeStringValue, appendStringValue + case reflect.Slice: + if slice { + return sizeBytesSlice, appendBytesSlice + } + if oneof { + // Oneof bytes field may also have "proto3" tag. + // We want to marshal it as a oneof field. Do this + // check before the proto3 check. + return sizeBytesOneof, appendBytesOneof + } + if proto3 { + return sizeBytes3, appendBytes3 + } + return sizeBytes, appendBytes + case reflect.Struct: + switch encoding { + case "group": + if slice { + return makeGroupSliceMarshaler(getMarshalInfo(t)) + } + return makeGroupMarshaler(getMarshalInfo(t)) + case "bytes": + if slice { + return makeMessageSliceMarshaler(getMarshalInfo(t)) + } + return makeMessageMarshaler(getMarshalInfo(t)) + } + } + panic(fmt.Sprintf("unknown or mismatched type: type: %v, wire type: %v", t, encoding)) +} + +// Below are functions to size/marshal a specific type of a field. +// They are stored in the field's info, and called by function pointers. +// They have type sizer or marshaler. + +func sizeFixed32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFixed32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFixed32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFixed32Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + return (4 + tagsize) * len(s) +} +func sizeFixed32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFixedS32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFixedS32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFixedS32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFixedS32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + return (4 + tagsize) * len(s) +} +func sizeFixedS32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFloat32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFloat32ValueNoZero(ptr pointer, tagsize int) int { + v := math.Float32bits(*ptr.toFloat32()) + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFloat32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toFloat32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFloat32Slice(ptr pointer, tagsize int) int { + s := *ptr.toFloat32Slice() + return (4 + tagsize) * len(s) +} +func sizeFloat32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toFloat32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFixed64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFixed64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFixed64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFixed64Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + return (8 + tagsize) * len(s) +} +func sizeFixed64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeFixedS64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFixedS64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFixedS64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFixedS64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + return (8 + tagsize) * len(s) +} +func sizeFixedS64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeFloat64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFloat64ValueNoZero(ptr pointer, tagsize int) int { + v := math.Float64bits(*ptr.toFloat64()) + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFloat64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toFloat64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFloat64Slice(ptr pointer, tagsize int) int { + s := *ptr.toFloat64Slice() + return (8 + tagsize) * len(s) +} +func sizeFloat64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toFloat64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeVarint32Value(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarint32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarint32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint32Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarint32Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarint32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarintS32Value(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarintS32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarintS32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarint64Value(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + return SizeVarint(v) + tagsize +} +func sizeVarint64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + if v == 0 { + return 0 + } + return SizeVarint(v) + tagsize +} +func sizeVarint64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint64Ptr() + if p == nil { + return 0 + } + return SizeVarint(*p) + tagsize +} +func sizeVarint64Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(v) + tagsize + } + return n +} +func sizeVarint64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(v) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarintS64Value(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarintS64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarintS64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeZigzag32Value(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + v := *p + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize + } + return n +} +func sizeZigzag32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeZigzag64Value(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + v := *p + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize + } + return n +} +func sizeZigzag64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeBoolValue(_ pointer, tagsize int) int { + return 1 + tagsize +} +func sizeBoolValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toBool() + if !v { + return 0 + } + return 1 + tagsize +} +func sizeBoolPtr(ptr pointer, tagsize int) int { + p := *ptr.toBoolPtr() + if p == nil { + return 0 + } + return 1 + tagsize +} +func sizeBoolSlice(ptr pointer, tagsize int) int { + s := *ptr.toBoolSlice() + return (1 + tagsize) * len(s) +} +func sizeBoolPackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toBoolSlice() + if len(s) == 0 { + return 0 + } + return len(s) + SizeVarint(uint64(len(s))) + tagsize +} +func sizeStringValue(ptr pointer, tagsize int) int { + v := *ptr.toString() + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toString() + if v == "" { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringPtr(ptr pointer, tagsize int) int { + p := *ptr.toStringPtr() + if p == nil { + return 0 + } + v := *p + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringSlice(ptr pointer, tagsize int) int { + s := *ptr.toStringSlice() + n := 0 + for _, v := range s { + n += len(v) + SizeVarint(uint64(len(v))) + tagsize + } + return n +} +func sizeBytes(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + if v == nil { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytes3(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + if len(v) == 0 { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytesOneof(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytesSlice(ptr pointer, tagsize int) int { + s := *ptr.toBytesSlice() + n := 0 + for _, v := range s { + n += len(v) + SizeVarint(uint64(len(v))) + tagsize + } + return n +} + +// appendFixed32 appends an encoded fixed32 to b. +func appendFixed32(b []byte, v uint32) []byte { + b = append(b, + byte(v), + byte(v>>8), + byte(v>>16), + byte(v>>24)) + return b +} + +// appendFixed64 appends an encoded fixed64 to b. +func appendFixed64(b []byte, v uint64) []byte { + b = append(b, + byte(v), + byte(v>>8), + byte(v>>16), + byte(v>>24), + byte(v>>32), + byte(v>>40), + byte(v>>48), + byte(v>>56)) + return b +} + +// appendVarint appends an encoded varint to b. +func appendVarint(b []byte, v uint64) []byte { + // TODO: make 1-byte (maybe 2-byte) case inline-able, once we + // have non-leaf inliner. + switch { + case v < 1<<7: + b = append(b, byte(v)) + case v < 1<<14: + b = append(b, + byte(v&0x7f|0x80), + byte(v>>7)) + case v < 1<<21: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte(v>>14)) + case v < 1<<28: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte(v>>21)) + case v < 1<<35: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte(v>>28)) + case v < 1<<42: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte(v>>35)) + case v < 1<<49: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte(v>>42)) + case v < 1<<56: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte(v>>49)) + case v < 1<<63: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte(v>>56)) + default: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte((v>>56)&0x7f|0x80), + 1) + } + return b +} + +func appendFixed32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFixed32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFixed32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, *p) + return b, nil +} +func appendFixed32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + } + return b, nil +} +func appendFixed32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, v) + } + return b, nil +} +func appendFixedS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + return b, nil +} +func appendFixedS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + return b, nil +} +func appendFixedS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(*p)) + return b, nil +} +func appendFixedS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + } + return b, nil +} +func appendFixedS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, uint32(v)) + } + return b, nil +} +func appendFloat32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float32bits(*ptr.toFloat32()) + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFloat32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float32bits(*ptr.toFloat32()) + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFloat32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toFloat32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, math.Float32bits(*p)) + return b, nil +} +func appendFloat32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, math.Float32bits(v)) + } + return b, nil +} +func appendFloat32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, math.Float32bits(v)) + } + return b, nil +} +func appendFixed64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFixed64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFixed64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, *p) + return b, nil +} +func appendFixed64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + } + return b, nil +} +func appendFixed64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, v) + } + return b, nil +} +func appendFixedS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + return b, nil +} +func appendFixedS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + return b, nil +} +func appendFixedS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(*p)) + return b, nil +} +func appendFixedS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + } + return b, nil +} +func appendFixedS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, uint64(v)) + } + return b, nil +} +func appendFloat64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float64bits(*ptr.toFloat64()) + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFloat64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float64bits(*ptr.toFloat64()) + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFloat64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toFloat64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, math.Float64bits(*p)) + return b, nil +} +func appendFloat64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, math.Float64bits(v)) + } + return b, nil +} +func appendFloat64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, math.Float64bits(v)) + } + return b, nil +} +func appendVarint32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarint32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarint32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarint32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarint32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarintS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarint64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + return b, nil +} +func appendVarint64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + return b, nil +} +func appendVarint64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, *p) + return b, nil +} +func appendVarint64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + } + return b, nil +} +func appendVarint64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(v) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, v) + } + return b, nil +} +func appendVarintS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarintS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendZigzag32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + v := *p + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + } + return b, nil +} +func appendZigzag32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + } + return b, nil +} +func appendZigzag64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + v := *p + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + } + return b, nil +} +func appendZigzag64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + } + return b, nil +} +func appendBoolValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBool() + b = appendVarint(b, wiretag) + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + return b, nil +} +func appendBoolValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBool() + if !v { + return b, nil + } + b = appendVarint(b, wiretag) + b = append(b, 1) + return b, nil +} + +func appendBoolPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toBoolPtr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + if *p { + b = append(b, 1) + } else { + b = append(b, 0) + } + return b, nil +} +func appendBoolSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBoolSlice() + for _, v := range s { + b = appendVarint(b, wiretag) + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + } + return b, nil +} +func appendBoolPackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBoolSlice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(len(s))) + for _, v := range s { + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + } + return b, nil +} +func appendStringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toString() + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toString() + if v == "" { + return b, nil + } + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toStringPtr() + if p == nil { + return b, nil + } + v := *p + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toStringSlice() + for _, v := range s { + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + } + return b, nil +} +func appendBytes(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + if v == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytes3(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + if len(v) == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytesOneof(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytesSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBytesSlice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + } + return b, nil +} + +// makeGroupMarshaler returns the sizer and marshaler for a group. +// u is the marshal info of the underlying message. +func makeGroupMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + p := ptr.getPointer() + if p.isNil() { + return 0 + } + return u.size(p) + 2*tagsize + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + p := ptr.getPointer() + if p.isNil() { + return b, nil + } + var err error + b = appendVarint(b, wiretag) // start group + b, err = u.marshal(b, p, deterministic) + b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group + return b, err + } +} + +// makeGroupSliceMarshaler returns the sizer and marshaler for a group slice. +// u is the marshal info of the underlying message. +func makeGroupSliceMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + s := ptr.getPointerSlice() + n := 0 + for _, v := range s { + if v.isNil() { + continue + } + n += u.size(v) + 2*tagsize + } + return n + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + s := ptr.getPointerSlice() + var err, errreq error + for _, v := range s { + if v.isNil() { + return b, errRepeatedHasNil + } + b = appendVarint(b, wiretag) // start group + b, err = u.marshal(b, v, deterministic) + b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group + if err != nil { + if _, ok := err.(*RequiredNotSetError); ok { + // Required field in submessage is not set. + // We record the error but keep going, to give a complete marshaling. + if errreq == nil { + errreq = err + } + continue + } + if err == ErrNil { + err = errRepeatedHasNil + } + return b, err + } + } + return b, errreq + } +} + +// makeMessageMarshaler returns the sizer and marshaler for a message field. +// u is the marshal info of the message. +func makeMessageMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + p := ptr.getPointer() + if p.isNil() { + return 0 + } + siz := u.size(p) + return siz + SizeVarint(uint64(siz)) + tagsize + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + p := ptr.getPointer() + if p.isNil() { + return b, nil + } + b = appendVarint(b, wiretag) + siz := u.cachedsize(p) + b = appendVarint(b, uint64(siz)) + return u.marshal(b, p, deterministic) + } +} + +// makeMessageSliceMarshaler returns the sizer and marshaler for a message slice. +// u is the marshal info of the message. +func makeMessageSliceMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + s := ptr.getPointerSlice() + n := 0 + for _, v := range s { + if v.isNil() { + continue + } + siz := u.size(v) + n += siz + SizeVarint(uint64(siz)) + tagsize + } + return n + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + s := ptr.getPointerSlice() + var err, errreq error + for _, v := range s { + if v.isNil() { + return b, errRepeatedHasNil + } + b = appendVarint(b, wiretag) + siz := u.cachedsize(v) + b = appendVarint(b, uint64(siz)) + b, err = u.marshal(b, v, deterministic) + + if err != nil { + if _, ok := err.(*RequiredNotSetError); ok { + // Required field in submessage is not set. + // We record the error but keep going, to give a complete marshaling. + if errreq == nil { + errreq = err + } + continue + } + if err == ErrNil { + err = errRepeatedHasNil + } + return b, err + } + } + return b, errreq + } +} + +// makeMapMarshaler returns the sizer and marshaler for a map field. +// f is the pointer to the reflect data structure of the field. +func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) { + // figure out key and value type + t := f.Type + keyType := t.Key() + valType := t.Elem() + keyTags := strings.Split(f.Tag.Get("protobuf_key"), ",") + valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") + keySizer, keyMarshaler := typeMarshaler(keyType, keyTags, false, false) // don't omit zero value in map + valSizer, valMarshaler := typeMarshaler(valType, valTags, false, false) // don't omit zero value in map + keyWireTag := 1<<3 | wiretype(keyTags[0]) + valWireTag := 2<<3 | wiretype(valTags[0]) + + // We create an interface to get the addresses of the map key and value. + // If value is pointer-typed, the interface is a direct interface, the + // idata itself is the value. Otherwise, the idata is the pointer to the + // value. + // Key cannot be pointer-typed. + valIsPtr := valType.Kind() == reflect.Ptr + return func(ptr pointer, tagsize int) int { + m := ptr.asPointerTo(t).Elem() // the map + n := 0 + for _, k := range m.MapKeys() { + ki := k.Interface() + vi := m.MapIndex(k).Interface() + kaddr := toAddrPointer(&ki, false) // pointer to key + vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value + siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) + n += siz + SizeVarint(uint64(siz)) + tagsize + } + return n + }, + func(b []byte, ptr pointer, tag uint64, deterministic bool) ([]byte, error) { + m := ptr.asPointerTo(t).Elem() // the map + var err error + keys := m.MapKeys() + if len(keys) > 1 && deterministic { + sort.Sort(mapKeys(keys)) + } + for _, k := range keys { + ki := k.Interface() + vi := m.MapIndex(k).Interface() + kaddr := toAddrPointer(&ki, false) // pointer to key + vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value + b = appendVarint(b, tag) + siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) + b = appendVarint(b, uint64(siz)) + b, err = keyMarshaler(b, kaddr, keyWireTag, deterministic) + if err != nil { + return b, err + } + b, err = valMarshaler(b, vaddr, valWireTag, deterministic) + if err != nil && err != ErrNil { // allow nil value in map + return b, err + } + } + return b, nil + } +} + +// makeOneOfMarshaler returns the sizer and marshaler for a oneof field. +// fi is the marshal info of the field. +// f is the pointer to the reflect data structure of the field. +func makeOneOfMarshaler(fi *marshalFieldInfo, f *reflect.StructField) (sizer, marshaler) { + // Oneof field is an interface. We need to get the actual data type on the fly. + t := f.Type + return func(ptr pointer, _ int) int { + p := ptr.getInterfacePointer() + if p.isNil() { + return 0 + } + v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct + telem := v.Type() + e := fi.oneofElems[telem] + return e.sizer(p, e.tagsize) + }, + func(b []byte, ptr pointer, _ uint64, deterministic bool) ([]byte, error) { + p := ptr.getInterfacePointer() + if p.isNil() { + return b, nil + } + v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct + telem := v.Type() + if telem.Field(0).Type.Kind() == reflect.Ptr && p.getPointer().isNil() { + return b, errOneofHasNil + } + e := fi.oneofElems[telem] + return e.marshaler(b, p, e.wiretag, deterministic) + } +} + +// sizeExtensions computes the size of encoded data for a XXX_InternalExtensions field. +func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int { + m, mu := ext.extensionsRead() + if m == nil { + return 0 + } + mu.Lock() + + n := 0 + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + n += len(e.enc) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + n += ei.sizer(p, ei.tagsize) + } + mu.Unlock() + return n +} + +// appendExtensions marshals a XXX_InternalExtensions field to the end of byte slice b. +func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { + m, mu := ext.extensionsRead() + if m == nil { + return b, nil + } + mu.Lock() + defer mu.Unlock() + + var err error + + // Fast-path for common cases: zero or one extensions. + // Don't bother sorting the keys. + if len(m) <= 1 { + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if err != nil { + return b, err + } + } + return b, nil + } + + // Sort the keys to provide a deterministic encoding. + // Not sure this is required, but the old code does it. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + for _, k := range keys { + e := m[int32(k)] + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if err != nil { + return b, err + } + } + return b, nil +} + +// message set format is: +// message MessageSet { +// repeated group Item = 1 { +// required int32 type_id = 2; +// required string message = 3; +// }; +// } + +// sizeMessageSet computes the size of encoded data for a XXX_InternalExtensions field +// in message set format (above). +func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int { + m, mu := ext.extensionsRead() + if m == nil { + return 0 + } + mu.Lock() + + n := 0 + for id, e := range m { + n += 2 // start group, end group. tag = 1 (size=1) + n += SizeVarint(uint64(id)) + 1 // type_id, tag = 2 (size=1) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + siz := len(msgWithLen) + n += siz + 1 // message, tag = 3 (size=1) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + n += ei.sizer(p, 1) // message, tag = 3 (size=1) + } + mu.Unlock() + return n +} + +// appendMessageSet marshals a XXX_InternalExtensions field in message set format (above) +// to the end of byte slice b. +func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { + m, mu := ext.extensionsRead() + if m == nil { + return b, nil + } + mu.Lock() + defer mu.Unlock() + + var err error + + // Fast-path for common cases: zero or one extensions. + // Don't bother sorting the keys. + if len(m) <= 1 { + for id, e := range m { + b = append(b, 1<<3|WireStartGroup) + b = append(b, 2<<3|WireVarint) + b = appendVarint(b, uint64(id)) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + b = append(b, 3<<3|WireBytes) + b = append(b, msgWithLen...) + b = append(b, 1<<3|WireEndGroup) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) + if err != nil { + return b, err + } + b = append(b, 1<<3|WireEndGroup) + } + return b, nil + } + + // Sort the keys to provide a deterministic encoding. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + for _, id := range keys { + e := m[int32(id)] + b = append(b, 1<<3|WireStartGroup) + b = append(b, 2<<3|WireVarint) + b = appendVarint(b, uint64(id)) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + b = append(b, 3<<3|WireBytes) + b = append(b, msgWithLen...) + b = append(b, 1<<3|WireEndGroup) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) + b = append(b, 1<<3|WireEndGroup) + if err != nil { + return b, err + } + } + return b, nil +} + +// sizeV1Extensions computes the size of encoded data for a V1-API extension field. +func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int { + if m == nil { + return 0 + } + + n := 0 + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + n += len(e.enc) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + n += ei.sizer(p, ei.tagsize) + } + return n +} + +// appendV1Extensions marshals a V1-API extension field to the end of byte slice b. +func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, deterministic bool) ([]byte, error) { + if m == nil { + return b, nil + } + + // Sort the keys to provide a deterministic encoding. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + var err error + for _, k := range keys { + e := m[int32(k)] + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if err != nil { + return b, err + } + } + return b, nil +} + +// newMarshaler is the interface representing objects that can marshal themselves. +// +// This exists to support protoc-gen-go generated messages. +// The proto package will stop type-asserting to this interface in the future. +// +// DO NOT DEPEND ON THIS. +type newMarshaler interface { + XXX_Size() int + XXX_Marshal(b []byte, deterministic bool) ([]byte, error) +} + +// Size returns the encoded size of a protocol buffer message. +// This is the main entry point. +func Size(pb Message) int { + if m, ok := pb.(newMarshaler); ok { + return m.XXX_Size() + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + b, _ := m.Marshal() + return len(b) + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return 0 + } + var info InternalMessageInfo + return info.Size(pb) +} + +// Marshal takes a protocol buffer message +// and encodes it into the wire format, returning the data. +// This is the main entry point. +func Marshal(pb Message) ([]byte, error) { + if m, ok := pb.(newMarshaler); ok { + siz := m.XXX_Size() + b := make([]byte, 0, siz) + return m.XXX_Marshal(b, false) + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + return m.Marshal() + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return nil, ErrNil + } + var info InternalMessageInfo + siz := info.Size(pb) + b := make([]byte, 0, siz) + return info.Marshal(b, pb, false) +} + +// Marshal takes a protocol buffer message +// and encodes it into the wire format, writing the result to the +// Buffer. +// This is an alternative entry point. It is not necessary to use +// a Buffer for most applications. +func (p *Buffer) Marshal(pb Message) error { + var err error + if m, ok := pb.(newMarshaler); ok { + siz := m.XXX_Size() + p.grow(siz) // make sure buf has enough capacity + p.buf, err = m.XXX_Marshal(p.buf, p.deterministic) + return err + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + b, err := m.Marshal() + p.buf = append(p.buf, b...) + return err + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return ErrNil + } + var info InternalMessageInfo + siz := info.Size(pb) + p.grow(siz) // make sure buf has enough capacity + p.buf, err = info.Marshal(p.buf, pb, p.deterministic) + return err +} + +// grow grows the buffer's capacity, if necessary, to guarantee space for +// another n bytes. After grow(n), at least n bytes can be written to the +// buffer without another allocation. +func (p *Buffer) grow(n int) { + need := len(p.buf) + n + if need <= cap(p.buf) { + return + } + newCap := len(p.buf) * 2 + if newCap < need { + newCap = need + } + p.buf = append(make([]byte, 0, newCap), p.buf...) +} diff --git a/vendor/github.com/golang/protobuf/proto/table_merge.go b/vendor/github.com/golang/protobuf/proto/table_merge.go new file mode 100644 index 00000000..5525def6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_merge.go @@ -0,0 +1,654 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "fmt" + "reflect" + "strings" + "sync" + "sync/atomic" +) + +// Merge merges the src message into dst. +// This assumes that dst and src of the same type and are non-nil. +func (a *InternalMessageInfo) Merge(dst, src Message) { + mi := atomicLoadMergeInfo(&a.merge) + if mi == nil { + mi = getMergeInfo(reflect.TypeOf(dst).Elem()) + atomicStoreMergeInfo(&a.merge, mi) + } + mi.merge(toPointer(&dst), toPointer(&src)) +} + +type mergeInfo struct { + typ reflect.Type + + initialized int32 // 0: only typ is valid, 1: everything is valid + lock sync.Mutex + + fields []mergeFieldInfo + unrecognized field // Offset of XXX_unrecognized +} + +type mergeFieldInfo struct { + field field // Offset of field, guaranteed to be valid + + // isPointer reports whether the value in the field is a pointer. + // This is true for the following situations: + // * Pointer to struct + // * Pointer to basic type (proto2 only) + // * Slice (first value in slice header is a pointer) + // * String (first value in string header is a pointer) + isPointer bool + + // basicWidth reports the width of the field assuming that it is directly + // embedded in the struct (as is the case for basic types in proto3). + // The possible values are: + // 0: invalid + // 1: bool + // 4: int32, uint32, float32 + // 8: int64, uint64, float64 + basicWidth int + + // Where dst and src are pointers to the types being merged. + merge func(dst, src pointer) +} + +var ( + mergeInfoMap = map[reflect.Type]*mergeInfo{} + mergeInfoLock sync.Mutex +) + +func getMergeInfo(t reflect.Type) *mergeInfo { + mergeInfoLock.Lock() + defer mergeInfoLock.Unlock() + mi := mergeInfoMap[t] + if mi == nil { + mi = &mergeInfo{typ: t} + mergeInfoMap[t] = mi + } + return mi +} + +// merge merges src into dst assuming they are both of type *mi.typ. +func (mi *mergeInfo) merge(dst, src pointer) { + if dst.isNil() { + panic("proto: nil destination") + } + if src.isNil() { + return // Nothing to do. + } + + if atomic.LoadInt32(&mi.initialized) == 0 { + mi.computeMergeInfo() + } + + for _, fi := range mi.fields { + sfp := src.offset(fi.field) + + // As an optimization, we can avoid the merge function call cost + // if we know for sure that the source will have no effect + // by checking if it is the zero value. + if unsafeAllowed { + if fi.isPointer && sfp.getPointer().isNil() { // Could be slice or string + continue + } + if fi.basicWidth > 0 { + switch { + case fi.basicWidth == 1 && !*sfp.toBool(): + continue + case fi.basicWidth == 4 && *sfp.toUint32() == 0: + continue + case fi.basicWidth == 8 && *sfp.toUint64() == 0: + continue + } + } + } + + dfp := dst.offset(fi.field) + fi.merge(dfp, sfp) + } + + // TODO: Make this faster? + out := dst.asPointerTo(mi.typ).Elem() + in := src.asPointerTo(mi.typ).Elem() + if emIn, err := extendable(in.Addr().Interface()); err == nil { + emOut, _ := extendable(out.Addr().Interface()) + mIn, muIn := emIn.extensionsRead() + if mIn != nil { + mOut := emOut.extensionsWrite() + muIn.Lock() + mergeExtension(mOut, mIn) + muIn.Unlock() + } + } + + if mi.unrecognized.IsValid() { + if b := *src.offset(mi.unrecognized).toBytes(); len(b) > 0 { + *dst.offset(mi.unrecognized).toBytes() = append([]byte(nil), b...) + } + } +} + +func (mi *mergeInfo) computeMergeInfo() { + mi.lock.Lock() + defer mi.lock.Unlock() + if mi.initialized != 0 { + return + } + t := mi.typ + n := t.NumField() + + props := GetProperties(t) + for i := 0; i < n; i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + + mfi := mergeFieldInfo{field: toField(&f)} + tf := f.Type + + // As an optimization, we can avoid the merge function call cost + // if we know for sure that the source will have no effect + // by checking if it is the zero value. + if unsafeAllowed { + switch tf.Kind() { + case reflect.Ptr, reflect.Slice, reflect.String: + // As a special case, we assume slices and strings are pointers + // since we know that the first field in the SliceSlice or + // StringHeader is a data pointer. + mfi.isPointer = true + case reflect.Bool: + mfi.basicWidth = 1 + case reflect.Int32, reflect.Uint32, reflect.Float32: + mfi.basicWidth = 4 + case reflect.Int64, reflect.Uint64, reflect.Float64: + mfi.basicWidth = 8 + } + } + + // Unwrap tf to get at its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic("both pointer and slice for basic type in " + tf.Name()) + } + + switch tf.Kind() { + case reflect.Int32: + switch { + case isSlice: // E.g., []int32 + mfi.merge = func(dst, src pointer) { + // NOTE: toInt32Slice is not defined (see pointer_reflect.go). + /* + sfsp := src.toInt32Slice() + if *sfsp != nil { + dfsp := dst.toInt32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []int64{} + } + } + */ + sfs := src.getInt32Slice() + if sfs != nil { + dfs := dst.getInt32Slice() + dfs = append(dfs, sfs...) + if dfs == nil { + dfs = []int32{} + } + dst.setInt32Slice(dfs) + } + } + case isPointer: // E.g., *int32 + mfi.merge = func(dst, src pointer) { + // NOTE: toInt32Ptr is not defined (see pointer_reflect.go). + /* + sfpp := src.toInt32Ptr() + if *sfpp != nil { + dfpp := dst.toInt32Ptr() + if *dfpp == nil { + *dfpp = Int32(**sfpp) + } else { + **dfpp = **sfpp + } + } + */ + sfp := src.getInt32Ptr() + if sfp != nil { + dfp := dst.getInt32Ptr() + if dfp == nil { + dst.setInt32Ptr(*sfp) + } else { + *dfp = *sfp + } + } + } + default: // E.g., int32 + mfi.merge = func(dst, src pointer) { + if v := *src.toInt32(); v != 0 { + *dst.toInt32() = v + } + } + } + case reflect.Int64: + switch { + case isSlice: // E.g., []int64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toInt64Slice() + if *sfsp != nil { + dfsp := dst.toInt64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []int64{} + } + } + } + case isPointer: // E.g., *int64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toInt64Ptr() + if *sfpp != nil { + dfpp := dst.toInt64Ptr() + if *dfpp == nil { + *dfpp = Int64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., int64 + mfi.merge = func(dst, src pointer) { + if v := *src.toInt64(); v != 0 { + *dst.toInt64() = v + } + } + } + case reflect.Uint32: + switch { + case isSlice: // E.g., []uint32 + mfi.merge = func(dst, src pointer) { + sfsp := src.toUint32Slice() + if *sfsp != nil { + dfsp := dst.toUint32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []uint32{} + } + } + } + case isPointer: // E.g., *uint32 + mfi.merge = func(dst, src pointer) { + sfpp := src.toUint32Ptr() + if *sfpp != nil { + dfpp := dst.toUint32Ptr() + if *dfpp == nil { + *dfpp = Uint32(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., uint32 + mfi.merge = func(dst, src pointer) { + if v := *src.toUint32(); v != 0 { + *dst.toUint32() = v + } + } + } + case reflect.Uint64: + switch { + case isSlice: // E.g., []uint64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toUint64Slice() + if *sfsp != nil { + dfsp := dst.toUint64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []uint64{} + } + } + } + case isPointer: // E.g., *uint64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toUint64Ptr() + if *sfpp != nil { + dfpp := dst.toUint64Ptr() + if *dfpp == nil { + *dfpp = Uint64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., uint64 + mfi.merge = func(dst, src pointer) { + if v := *src.toUint64(); v != 0 { + *dst.toUint64() = v + } + } + } + case reflect.Float32: + switch { + case isSlice: // E.g., []float32 + mfi.merge = func(dst, src pointer) { + sfsp := src.toFloat32Slice() + if *sfsp != nil { + dfsp := dst.toFloat32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []float32{} + } + } + } + case isPointer: // E.g., *float32 + mfi.merge = func(dst, src pointer) { + sfpp := src.toFloat32Ptr() + if *sfpp != nil { + dfpp := dst.toFloat32Ptr() + if *dfpp == nil { + *dfpp = Float32(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., float32 + mfi.merge = func(dst, src pointer) { + if v := *src.toFloat32(); v != 0 { + *dst.toFloat32() = v + } + } + } + case reflect.Float64: + switch { + case isSlice: // E.g., []float64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toFloat64Slice() + if *sfsp != nil { + dfsp := dst.toFloat64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []float64{} + } + } + } + case isPointer: // E.g., *float64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toFloat64Ptr() + if *sfpp != nil { + dfpp := dst.toFloat64Ptr() + if *dfpp == nil { + *dfpp = Float64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., float64 + mfi.merge = func(dst, src pointer) { + if v := *src.toFloat64(); v != 0 { + *dst.toFloat64() = v + } + } + } + case reflect.Bool: + switch { + case isSlice: // E.g., []bool + mfi.merge = func(dst, src pointer) { + sfsp := src.toBoolSlice() + if *sfsp != nil { + dfsp := dst.toBoolSlice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []bool{} + } + } + } + case isPointer: // E.g., *bool + mfi.merge = func(dst, src pointer) { + sfpp := src.toBoolPtr() + if *sfpp != nil { + dfpp := dst.toBoolPtr() + if *dfpp == nil { + *dfpp = Bool(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., bool + mfi.merge = func(dst, src pointer) { + if v := *src.toBool(); v { + *dst.toBool() = v + } + } + } + case reflect.String: + switch { + case isSlice: // E.g., []string + mfi.merge = func(dst, src pointer) { + sfsp := src.toStringSlice() + if *sfsp != nil { + dfsp := dst.toStringSlice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []string{} + } + } + } + case isPointer: // E.g., *string + mfi.merge = func(dst, src pointer) { + sfpp := src.toStringPtr() + if *sfpp != nil { + dfpp := dst.toStringPtr() + if *dfpp == nil { + *dfpp = String(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., string + mfi.merge = func(dst, src pointer) { + if v := *src.toString(); v != "" { + *dst.toString() = v + } + } + } + case reflect.Slice: + isProto3 := props.Prop[i].proto3 + switch { + case isPointer: + panic("bad pointer in byte slice case in " + tf.Name()) + case tf.Elem().Kind() != reflect.Uint8: + panic("bad element kind in byte slice case in " + tf.Name()) + case isSlice: // E.g., [][]byte + mfi.merge = func(dst, src pointer) { + sbsp := src.toBytesSlice() + if *sbsp != nil { + dbsp := dst.toBytesSlice() + for _, sb := range *sbsp { + if sb == nil { + *dbsp = append(*dbsp, nil) + } else { + *dbsp = append(*dbsp, append([]byte{}, sb...)) + } + } + if *dbsp == nil { + *dbsp = [][]byte{} + } + } + } + default: // E.g., []byte + mfi.merge = func(dst, src pointer) { + sbp := src.toBytes() + if *sbp != nil { + dbp := dst.toBytes() + if !isProto3 || len(*sbp) > 0 { + *dbp = append([]byte{}, *sbp...) + } + } + } + } + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("message field %s without pointer", tf)) + case isSlice: // E.g., []*pb.T + mi := getMergeInfo(tf) + mfi.merge = func(dst, src pointer) { + sps := src.getPointerSlice() + if sps != nil { + dps := dst.getPointerSlice() + for _, sp := range sps { + var dp pointer + if !sp.isNil() { + dp = valToPointer(reflect.New(tf)) + mi.merge(dp, sp) + } + dps = append(dps, dp) + } + if dps == nil { + dps = []pointer{} + } + dst.setPointerSlice(dps) + } + } + default: // E.g., *pb.T + mi := getMergeInfo(tf) + mfi.merge = func(dst, src pointer) { + sp := src.getPointer() + if !sp.isNil() { + dp := dst.getPointer() + if dp.isNil() { + dp = valToPointer(reflect.New(tf)) + dst.setPointer(dp) + } + mi.merge(dp, sp) + } + } + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic("bad pointer or slice in map case in " + tf.Name()) + default: // E.g., map[K]V + mfi.merge = func(dst, src pointer) { + sm := src.asPointerTo(tf).Elem() + if sm.Len() == 0 { + return + } + dm := dst.asPointerTo(tf).Elem() + if dm.IsNil() { + dm.Set(reflect.MakeMap(tf)) + } + + switch tf.Elem().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + val = reflect.ValueOf(Clone(val.Interface().(Message))) + dm.SetMapIndex(key, val) + } + case reflect.Slice: // E.g. Bytes type (e.g., []byte) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) + dm.SetMapIndex(key, val) + } + default: // Basic type (e.g., string) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + dm.SetMapIndex(key, val) + } + } + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic("bad pointer or slice in interface case in " + tf.Name()) + default: // E.g., interface{} + // TODO: Make this faster? + mfi.merge = func(dst, src pointer) { + su := src.asPointerTo(tf).Elem() + if !su.IsNil() { + du := dst.asPointerTo(tf).Elem() + typ := su.Elem().Type() + if du.IsNil() || du.Elem().Type() != typ { + du.Set(reflect.New(typ.Elem())) // Initialize interface if empty + } + sv := su.Elem().Elem().Field(0) + if sv.Kind() == reflect.Ptr && sv.IsNil() { + return + } + dv := du.Elem().Elem().Field(0) + if dv.Kind() == reflect.Ptr && dv.IsNil() { + dv.Set(reflect.New(sv.Type().Elem())) // Initialize proto message if empty + } + switch sv.Type().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + Merge(dv.Interface().(Message), sv.Interface().(Message)) + case reflect.Slice: // E.g. Bytes type (e.g., []byte) + dv.Set(reflect.ValueOf(append([]byte{}, sv.Bytes()...))) + default: // Basic type (e.g., string) + dv.Set(sv) + } + } + } + } + default: + panic(fmt.Sprintf("merger not found for type:%s", tf)) + } + mi.fields = append(mi.fields, mfi) + } + + mi.unrecognized = invalidField + if f, ok := t.FieldByName("XXX_unrecognized"); ok { + if f.Type != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + mi.unrecognized = toField(&f) + } + + atomic.StoreInt32(&mi.initialized, 1) +} diff --git a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go new file mode 100644 index 00000000..55f0340a --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go @@ -0,0 +1,1967 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "errors" + "fmt" + "io" + "math" + "reflect" + "strconv" + "strings" + "sync" + "sync/atomic" + "unicode/utf8" +) + +// Unmarshal is the entry point from the generated .pb.go files. +// This function is not intended to be used by non-generated code. +// This function is not subject to any compatibility guarantee. +// msg contains a pointer to a protocol buffer struct. +// b is the data to be unmarshaled into the protocol buffer. +// a is a pointer to a place to store cached unmarshal information. +func (a *InternalMessageInfo) Unmarshal(msg Message, b []byte) error { + // Load the unmarshal information for this message type. + // The atomic load ensures memory consistency. + u := atomicLoadUnmarshalInfo(&a.unmarshal) + if u == nil { + // Slow path: find unmarshal info for msg, update a with it. + u = getUnmarshalInfo(reflect.TypeOf(msg).Elem()) + atomicStoreUnmarshalInfo(&a.unmarshal, u) + } + // Then do the unmarshaling. + err := u.unmarshal(toPointer(&msg), b) + return err +} + +type unmarshalInfo struct { + typ reflect.Type // type of the protobuf struct + + // 0 = only typ field is initialized + // 1 = completely initialized + initialized int32 + lock sync.Mutex // prevents double initialization + dense []unmarshalFieldInfo // fields indexed by tag # + sparse map[uint64]unmarshalFieldInfo // fields indexed by tag # + reqFields []string // names of required fields + reqMask uint64 // 1< 0 { + // Read tag and wire type. + // Special case 1 and 2 byte varints. + var x uint64 + if b[0] < 128 { + x = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + x = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + x, n = decodeVarint(b) + if n == 0 { + return io.ErrUnexpectedEOF + } + b = b[n:] + } + tag := x >> 3 + wire := int(x) & 7 + + // Dispatch on the tag to one of the unmarshal* functions below. + var f unmarshalFieldInfo + if tag < uint64(len(u.dense)) { + f = u.dense[tag] + } else { + f = u.sparse[tag] + } + if fn := f.unmarshal; fn != nil { + var err error + b, err = fn(b, m.offset(f.field), wire) + if err == nil { + reqMask |= f.reqMask + continue + } + if r, ok := err.(*RequiredNotSetError); ok { + // Remember this error, but keep parsing. We need to produce + // a full parse even if a required field is missing. + rnse = r + reqMask |= f.reqMask + continue + } + if err != errInternalBadWireType { + return err + } + // Fragments with bad wire type are treated as unknown fields. + } + + // Unknown tag. + if !u.unrecognized.IsValid() { + // Don't keep unrecognized data; just skip it. + var err error + b, err = skipField(b, wire) + if err != nil { + return err + } + continue + } + // Keep unrecognized data around. + // maybe in extensions, maybe in the unrecognized field. + z := m.offset(u.unrecognized).toBytes() + var emap map[int32]Extension + var e Extension + for _, r := range u.extensionRanges { + if uint64(r.Start) <= tag && tag <= uint64(r.End) { + if u.extensions.IsValid() { + mp := m.offset(u.extensions).toExtensions() + emap = mp.extensionsWrite() + e = emap[int32(tag)] + z = &e.enc + break + } + if u.oldExtensions.IsValid() { + p := m.offset(u.oldExtensions).toOldExtensions() + emap = *p + if emap == nil { + emap = map[int32]Extension{} + *p = emap + } + e = emap[int32(tag)] + z = &e.enc + break + } + panic("no extensions field available") + } + } + + // Use wire type to skip data. + var err error + b0 := b + b, err = skipField(b, wire) + if err != nil { + return err + } + *z = encodeVarint(*z, tag<<3|uint64(wire)) + *z = append(*z, b0[:len(b0)-len(b)]...) + + if emap != nil { + emap[int32(tag)] = e + } + } + if rnse != nil { + // A required field of a submessage/group is missing. Return that error. + return rnse + } + if reqMask != u.reqMask { + // A required field of this message is missing. + for _, n := range u.reqFields { + if reqMask&1 == 0 { + return &RequiredNotSetError{n} + } + reqMask >>= 1 + } + } + return nil +} + +// computeUnmarshalInfo fills in u with information for use +// in unmarshaling protocol buffers of type u.typ. +func (u *unmarshalInfo) computeUnmarshalInfo() { + u.lock.Lock() + defer u.lock.Unlock() + if u.initialized != 0 { + return + } + t := u.typ + n := t.NumField() + + // Set up the "not found" value for the unrecognized byte buffer. + // This is the default for proto3. + u.unrecognized = invalidField + u.extensions = invalidField + u.oldExtensions = invalidField + + // List of the generated type and offset for each oneof field. + type oneofField struct { + ityp reflect.Type // interface type of oneof field + field field // offset in containing message + } + var oneofFields []oneofField + + for i := 0; i < n; i++ { + f := t.Field(i) + if f.Name == "XXX_unrecognized" { + // The byte slice used to hold unrecognized input is special. + if f.Type != reflect.TypeOf(([]byte)(nil)) { + panic("bad type for XXX_unrecognized field: " + f.Type.Name()) + } + u.unrecognized = toField(&f) + continue + } + if f.Name == "XXX_InternalExtensions" { + // Ditto here. + if f.Type != reflect.TypeOf(XXX_InternalExtensions{}) { + panic("bad type for XXX_InternalExtensions field: " + f.Type.Name()) + } + u.extensions = toField(&f) + if f.Tag.Get("protobuf_messageset") == "1" { + u.isMessageSet = true + } + continue + } + if f.Name == "XXX_extensions" { + // An older form of the extensions field. + if f.Type != reflect.TypeOf((map[int32]Extension)(nil)) { + panic("bad type for XXX_extensions field: " + f.Type.Name()) + } + u.oldExtensions = toField(&f) + continue + } + if f.Name == "XXX_NoUnkeyedLiteral" || f.Name == "XXX_sizecache" { + continue + } + + oneof := f.Tag.Get("protobuf_oneof") + if oneof != "" { + oneofFields = append(oneofFields, oneofField{f.Type, toField(&f)}) + // The rest of oneof processing happens below. + continue + } + + tags := f.Tag.Get("protobuf") + tagArray := strings.Split(tags, ",") + if len(tagArray) < 2 { + panic("protobuf tag not enough fields in " + t.Name() + "." + f.Name + ": " + tags) + } + tag, err := strconv.Atoi(tagArray[1]) + if err != nil { + panic("protobuf tag field not an integer: " + tagArray[1]) + } + + name := "" + for _, tag := range tagArray[3:] { + if strings.HasPrefix(tag, "name=") { + name = tag[5:] + } + } + + // Extract unmarshaling function from the field (its type and tags). + unmarshal := fieldUnmarshaler(&f) + + // Required field? + var reqMask uint64 + if tagArray[2] == "req" { + bit := len(u.reqFields) + u.reqFields = append(u.reqFields, name) + reqMask = uint64(1) << uint(bit) + // TODO: if we have more than 64 required fields, we end up + // not verifying that all required fields are present. + // Fix this, perhaps using a count of required fields? + } + + // Store the info in the correct slot in the message. + u.setTag(tag, toField(&f), unmarshal, reqMask) + } + + // Find any types associated with oneof fields. + // TODO: XXX_OneofFuncs returns more info than we need. Get rid of some of it? + fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("XXX_OneofFuncs") + if fn.IsValid() { + res := fn.Call(nil)[3] // last return value from XXX_OneofFuncs: []interface{} + for i := res.Len() - 1; i >= 0; i-- { + v := res.Index(i) // interface{} + tptr := reflect.ValueOf(v.Interface()).Type() // *Msg_X + typ := tptr.Elem() // Msg_X + + f := typ.Field(0) // oneof implementers have one field + baseUnmarshal := fieldUnmarshaler(&f) + tagstr := strings.Split(f.Tag.Get("protobuf"), ",")[1] + tag, err := strconv.Atoi(tagstr) + if err != nil { + panic("protobuf tag field not an integer: " + tagstr) + } + + // Find the oneof field that this struct implements. + // Might take O(n^2) to process all of the oneofs, but who cares. + for _, of := range oneofFields { + if tptr.Implements(of.ityp) { + // We have found the corresponding interface for this struct. + // That lets us know where this struct should be stored + // when we encounter it during unmarshaling. + unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal) + u.setTag(tag, of.field, unmarshal, 0) + } + } + } + } + + // Get extension ranges, if any. + fn = reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray") + if fn.IsValid() { + if !u.extensions.IsValid() && !u.oldExtensions.IsValid() { + panic("a message with extensions, but no extensions field in " + t.Name()) + } + u.extensionRanges = fn.Call(nil)[0].Interface().([]ExtensionRange) + } + + // Explicitly disallow tag 0. This will ensure we flag an error + // when decoding a buffer of all zeros. Without this code, we + // would decode and skip an all-zero buffer of even length. + // [0 0] is [tag=0/wiretype=varint varint-encoded-0]. + u.setTag(0, zeroField, func(b []byte, f pointer, w int) ([]byte, error) { + return nil, fmt.Errorf("proto: %s: illegal tag 0 (wire type %d)", t, w) + }, 0) + + // Set mask for required field check. + u.reqMask = uint64(1)<= 0 && (tag < 16 || tag < 2*n) { // TODO: what are the right numbers here? + for len(u.dense) <= tag { + u.dense = append(u.dense, unmarshalFieldInfo{}) + } + u.dense[tag] = i + return + } + if u.sparse == nil { + u.sparse = map[uint64]unmarshalFieldInfo{} + } + u.sparse[uint64(tag)] = i +} + +// fieldUnmarshaler returns an unmarshaler for the given field. +func fieldUnmarshaler(f *reflect.StructField) unmarshaler { + if f.Type.Kind() == reflect.Map { + return makeUnmarshalMap(f) + } + return typeUnmarshaler(f.Type, f.Tag.Get("protobuf")) +} + +// typeUnmarshaler returns an unmarshaler for the given field type / field tag pair. +func typeUnmarshaler(t reflect.Type, tags string) unmarshaler { + tagArray := strings.Split(tags, ",") + encoding := tagArray[0] + name := "unknown" + for _, tag := range tagArray[3:] { + if strings.HasPrefix(tag, "name=") { + name = tag[5:] + } + } + + // Figure out packaging (pointer, slice, or both) + slice := false + pointer := false + if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { + slice = true + t = t.Elem() + } + if t.Kind() == reflect.Ptr { + pointer = true + t = t.Elem() + } + + // We'll never have both pointer and slice for basic types. + if pointer && slice && t.Kind() != reflect.Struct { + panic("both pointer and slice for basic type in " + t.Name()) + } + + switch t.Kind() { + case reflect.Bool: + if pointer { + return unmarshalBoolPtr + } + if slice { + return unmarshalBoolSlice + } + return unmarshalBoolValue + case reflect.Int32: + switch encoding { + case "fixed32": + if pointer { + return unmarshalFixedS32Ptr + } + if slice { + return unmarshalFixedS32Slice + } + return unmarshalFixedS32Value + case "varint": + // this could be int32 or enum + if pointer { + return unmarshalInt32Ptr + } + if slice { + return unmarshalInt32Slice + } + return unmarshalInt32Value + case "zigzag32": + if pointer { + return unmarshalSint32Ptr + } + if slice { + return unmarshalSint32Slice + } + return unmarshalSint32Value + } + case reflect.Int64: + switch encoding { + case "fixed64": + if pointer { + return unmarshalFixedS64Ptr + } + if slice { + return unmarshalFixedS64Slice + } + return unmarshalFixedS64Value + case "varint": + if pointer { + return unmarshalInt64Ptr + } + if slice { + return unmarshalInt64Slice + } + return unmarshalInt64Value + case "zigzag64": + if pointer { + return unmarshalSint64Ptr + } + if slice { + return unmarshalSint64Slice + } + return unmarshalSint64Value + } + case reflect.Uint32: + switch encoding { + case "fixed32": + if pointer { + return unmarshalFixed32Ptr + } + if slice { + return unmarshalFixed32Slice + } + return unmarshalFixed32Value + case "varint": + if pointer { + return unmarshalUint32Ptr + } + if slice { + return unmarshalUint32Slice + } + return unmarshalUint32Value + } + case reflect.Uint64: + switch encoding { + case "fixed64": + if pointer { + return unmarshalFixed64Ptr + } + if slice { + return unmarshalFixed64Slice + } + return unmarshalFixed64Value + case "varint": + if pointer { + return unmarshalUint64Ptr + } + if slice { + return unmarshalUint64Slice + } + return unmarshalUint64Value + } + case reflect.Float32: + if pointer { + return unmarshalFloat32Ptr + } + if slice { + return unmarshalFloat32Slice + } + return unmarshalFloat32Value + case reflect.Float64: + if pointer { + return unmarshalFloat64Ptr + } + if slice { + return unmarshalFloat64Slice + } + return unmarshalFloat64Value + case reflect.Map: + panic("map type in typeUnmarshaler in " + t.Name()) + case reflect.Slice: + if pointer { + panic("bad pointer in slice case in " + t.Name()) + } + if slice { + return unmarshalBytesSlice + } + return unmarshalBytesValue + case reflect.String: + if pointer { + return unmarshalStringPtr + } + if slice { + return unmarshalStringSlice + } + return unmarshalStringValue + case reflect.Struct: + // message or group field + if !pointer { + panic(fmt.Sprintf("message/group field %s:%s without pointer", t, encoding)) + } + switch encoding { + case "bytes": + if slice { + return makeUnmarshalMessageSlicePtr(getUnmarshalInfo(t), name) + } + return makeUnmarshalMessagePtr(getUnmarshalInfo(t), name) + case "group": + if slice { + return makeUnmarshalGroupSlicePtr(getUnmarshalInfo(t), name) + } + return makeUnmarshalGroupPtr(getUnmarshalInfo(t), name) + } + } + panic(fmt.Sprintf("unmarshaler not found type:%s encoding:%s", t, encoding)) +} + +// Below are all the unmarshalers for individual fields of various types. + +func unmarshalInt64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + *f.toInt64() = v + return b, nil +} + +func unmarshalInt64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + *f.toInt64Ptr() = &v + return b, nil +} + +func unmarshalInt64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + s := f.toInt64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + s := f.toInt64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalSint64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + *f.toInt64() = v + return b, nil +} + +func unmarshalSint64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + *f.toInt64Ptr() = &v + return b, nil +} + +func unmarshalSint64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + s := f.toInt64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + s := f.toInt64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalUint64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + *f.toUint64() = v + return b, nil +} + +func unmarshalUint64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + *f.toUint64Ptr() = &v + return b, nil +} + +func unmarshalUint64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + s := f.toUint64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + s := f.toUint64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalInt32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + *f.toInt32() = v + return b, nil +} + +func unmarshalInt32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.setInt32Ptr(v) + return b, nil +} + +func unmarshalInt32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.appendInt32Slice(v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.appendInt32Slice(v) + return b, nil +} + +func unmarshalSint32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + *f.toInt32() = v + return b, nil +} + +func unmarshalSint32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.setInt32Ptr(v) + return b, nil +} + +func unmarshalSint32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.appendInt32Slice(v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.appendInt32Slice(v) + return b, nil +} + +func unmarshalUint32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + *f.toUint32() = v + return b, nil +} + +func unmarshalUint32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + *f.toUint32Ptr() = &v + return b, nil +} + +func unmarshalUint32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + s := f.toUint32Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + s := f.toUint32Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalFixed64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + *f.toUint64() = v + return b[8:], nil +} + +func unmarshalFixed64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + *f.toUint64Ptr() = &v + return b[8:], nil +} + +func unmarshalFixed64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + s := f.toUint64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + s := f.toUint64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFixedS64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + *f.toInt64() = v + return b[8:], nil +} + +func unmarshalFixedS64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + *f.toInt64Ptr() = &v + return b[8:], nil +} + +func unmarshalFixedS64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + s := f.toInt64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + s := f.toInt64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFixed32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + *f.toUint32() = v + return b[4:], nil +} + +func unmarshalFixed32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + *f.toUint32Ptr() = &v + return b[4:], nil +} + +func unmarshalFixed32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + s := f.toUint32Slice() + *s = append(*s, v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + s := f.toUint32Slice() + *s = append(*s, v) + return b[4:], nil +} + +func unmarshalFixedS32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + *f.toInt32() = v + return b[4:], nil +} + +func unmarshalFixedS32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.setInt32Ptr(v) + return b[4:], nil +} + +func unmarshalFixedS32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.appendInt32Slice(v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.appendInt32Slice(v) + return b[4:], nil +} + +func unmarshalBoolValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + // Note: any length varint is allowed, even though any sane + // encoder will use one byte. + // See https://github.com/golang/protobuf/issues/76 + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + // TODO: check if x>1? Tests seem to indicate no. + v := x != 0 + *f.toBool() = v + return b[n:], nil +} + +func unmarshalBoolPtr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + *f.toBoolPtr() = &v + return b[n:], nil +} + +func unmarshalBoolSlice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + s := f.toBoolSlice() + *s = append(*s, v) + b = b[n:] + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + s := f.toBoolSlice() + *s = append(*s, v) + return b[n:], nil +} + +func unmarshalFloat64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + *f.toFloat64() = v + return b[8:], nil +} + +func unmarshalFloat64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + *f.toFloat64Ptr() = &v + return b[8:], nil +} + +func unmarshalFloat64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + s := f.toFloat64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + s := f.toFloat64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFloat32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + *f.toFloat32() = v + return b[4:], nil +} + +func unmarshalFloat32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + *f.toFloat32Ptr() = &v + return b[4:], nil +} + +func unmarshalFloat32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + s := f.toFloat32Slice() + *s = append(*s, v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + s := f.toFloat32Slice() + *s = append(*s, v) + return b[4:], nil +} + +func unmarshalStringValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + *f.toString() = v + return b[x:], nil +} + +func unmarshalStringPtr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + *f.toStringPtr() = &v + return b[x:], nil +} + +func unmarshalStringSlice(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + if !utf8.ValidString(v) { + return nil, errInvalidUTF8 + } + s := f.toStringSlice() + *s = append(*s, v) + return b[x:], nil +} + +var emptyBuf [0]byte + +func unmarshalBytesValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + // The use of append here is a trick which avoids the zeroing + // that would be required if we used a make/copy pair. + // We append to emptyBuf instead of nil because we want + // a non-nil result even when the length is 0. + v := append(emptyBuf[:], b[:x]...) + *f.toBytes() = v + return b[x:], nil +} + +func unmarshalBytesSlice(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := append(emptyBuf[:], b[:x]...) + s := f.toBytesSlice() + *s = append(*s, v) + return b[x:], nil +} + +func makeUnmarshalMessagePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + // First read the message field to see if something is there. + // The semantics of multiple submessages are weird. Instead of + // the last one winning (as it is for all other fields), multiple + // submessages are merged. + v := f.getPointer() + if v.isNil() { + v = valToPointer(reflect.New(sub.typ)) + f.setPointer(v) + } + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + return b[x:], err + } +} + +func makeUnmarshalMessageSlicePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := valToPointer(reflect.New(sub.typ)) + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + f.appendPointer(v) + return b[x:], err + } +} + +func makeUnmarshalGroupPtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireStartGroup { + return b, errInternalBadWireType + } + x, y := findEndGroup(b) + if x < 0 { + return nil, io.ErrUnexpectedEOF + } + v := f.getPointer() + if v.isNil() { + v = valToPointer(reflect.New(sub.typ)) + f.setPointer(v) + } + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + return b[y:], err + } +} + +func makeUnmarshalGroupSlicePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireStartGroup { + return b, errInternalBadWireType + } + x, y := findEndGroup(b) + if x < 0 { + return nil, io.ErrUnexpectedEOF + } + v := valToPointer(reflect.New(sub.typ)) + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + f.appendPointer(v) + return b[y:], err + } +} + +func makeUnmarshalMap(f *reflect.StructField) unmarshaler { + t := f.Type + kt := t.Key() + vt := t.Elem() + unmarshalKey := typeUnmarshaler(kt, f.Tag.Get("protobuf_key")) + unmarshalVal := typeUnmarshaler(vt, f.Tag.Get("protobuf_val")) + return func(b []byte, f pointer, w int) ([]byte, error) { + // The map entry is a submessage. Figure out how big it is. + if w != WireBytes { + return nil, fmt.Errorf("proto: bad wiretype for map field: got %d want %d", w, WireBytes) + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + r := b[x:] // unused data to return + b = b[:x] // data for map entry + + // Note: we could use #keys * #values ~= 200 functions + // to do map decoding without reflection. Probably not worth it. + // Maps will be somewhat slow. Oh well. + + // Read key and value from data. + k := reflect.New(kt) + v := reflect.New(vt) + for len(b) > 0 { + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + wire := int(x) & 7 + b = b[n:] + + var err error + switch x >> 3 { + case 1: + b, err = unmarshalKey(b, valToPointer(k), wire) + case 2: + b, err = unmarshalVal(b, valToPointer(v), wire) + default: + err = errInternalBadWireType // skip unknown tag + } + + if err == nil { + continue + } + if err != errInternalBadWireType { + return nil, err + } + + // Skip past unknown fields. + b, err = skipField(b, wire) + if err != nil { + return nil, err + } + } + + // Get map, allocate if needed. + m := f.asPointerTo(t).Elem() // an addressable map[K]T + if m.IsNil() { + m.Set(reflect.MakeMap(t)) + } + + // Insert into map. + m.SetMapIndex(k.Elem(), v.Elem()) + + return r, nil + } +} + +// makeUnmarshalOneof makes an unmarshaler for oneof fields. +// for: +// message Msg { +// oneof F { +// int64 X = 1; +// float64 Y = 2; +// } +// } +// typ is the type of the concrete entry for a oneof case (e.g. Msg_X). +// ityp is the interface type of the oneof field (e.g. isMsg_F). +// unmarshal is the unmarshaler for the base type of the oneof case (e.g. int64). +// Note that this function will be called once for each case in the oneof. +func makeUnmarshalOneof(typ, ityp reflect.Type, unmarshal unmarshaler) unmarshaler { + sf := typ.Field(0) + field0 := toField(&sf) + return func(b []byte, f pointer, w int) ([]byte, error) { + // Allocate holder for value. + v := reflect.New(typ) + + // Unmarshal data into holder. + // We unmarshal into the first field of the holder object. + var err error + b, err = unmarshal(b, valToPointer(v).offset(field0), w) + if err != nil { + return nil, err + } + + // Write pointer to holder into target field. + f.asPointerTo(ityp).Elem().Set(v) + + return b, nil + } +} + +// Error used by decode internally. +var errInternalBadWireType = errors.New("proto: internal error: bad wiretype") + +// skipField skips past a field of type wire and returns the remaining bytes. +func skipField(b []byte, wire int) ([]byte, error) { + switch wire { + case WireVarint: + _, k := decodeVarint(b) + if k == 0 { + return b, io.ErrUnexpectedEOF + } + b = b[k:] + case WireFixed32: + if len(b) < 4 { + return b, io.ErrUnexpectedEOF + } + b = b[4:] + case WireFixed64: + if len(b) < 8 { + return b, io.ErrUnexpectedEOF + } + b = b[8:] + case WireBytes: + m, k := decodeVarint(b) + if k == 0 || uint64(len(b)-k) < m { + return b, io.ErrUnexpectedEOF + } + b = b[uint64(k)+m:] + case WireStartGroup: + _, i := findEndGroup(b) + if i == -1 { + return b, io.ErrUnexpectedEOF + } + b = b[i:] + default: + return b, fmt.Errorf("proto: can't skip unknown wire type %d", wire) + } + return b, nil +} + +// findEndGroup finds the index of the next EndGroup tag. +// Groups may be nested, so the "next" EndGroup tag is the first +// unpaired EndGroup. +// findEndGroup returns the indexes of the start and end of the EndGroup tag. +// Returns (-1,-1) if it can't find one. +func findEndGroup(b []byte) (int, int) { + depth := 1 + i := 0 + for { + x, n := decodeVarint(b[i:]) + if n == 0 { + return -1, -1 + } + j := i + i += n + switch x & 7 { + case WireVarint: + _, k := decodeVarint(b[i:]) + if k == 0 { + return -1, -1 + } + i += k + case WireFixed32: + if len(b)-4 < i { + return -1, -1 + } + i += 4 + case WireFixed64: + if len(b)-8 < i { + return -1, -1 + } + i += 8 + case WireBytes: + m, k := decodeVarint(b[i:]) + if k == 0 { + return -1, -1 + } + i += k + if uint64(len(b)-i) < m { + return -1, -1 + } + i += int(m) + case WireStartGroup: + depth++ + case WireEndGroup: + depth-- + if depth == 0 { + return j, i + } + default: + return -1, -1 + } + } +} + +// encodeVarint appends a varint-encoded integer to b and returns the result. +func encodeVarint(b []byte, x uint64) []byte { + for x >= 1<<7 { + b = append(b, byte(x&0x7f|0x80)) + x >>= 7 + } + return append(b, byte(x)) +} + +// decodeVarint reads a varint-encoded integer from b. +// Returns the decoded integer and the number of bytes read. +// If there is an error, it returns 0,0. +func decodeVarint(b []byte) (uint64, int) { + var x, y uint64 + if len(b) <= 0 { + goto bad + } + x = uint64(b[0]) + if x < 0x80 { + return x, 1 + } + x -= 0x80 + + if len(b) <= 1 { + goto bad + } + y = uint64(b[1]) + x += y << 7 + if y < 0x80 { + return x, 2 + } + x -= 0x80 << 7 + + if len(b) <= 2 { + goto bad + } + y = uint64(b[2]) + x += y << 14 + if y < 0x80 { + return x, 3 + } + x -= 0x80 << 14 + + if len(b) <= 3 { + goto bad + } + y = uint64(b[3]) + x += y << 21 + if y < 0x80 { + return x, 4 + } + x -= 0x80 << 21 + + if len(b) <= 4 { + goto bad + } + y = uint64(b[4]) + x += y << 28 + if y < 0x80 { + return x, 5 + } + x -= 0x80 << 28 + + if len(b) <= 5 { + goto bad + } + y = uint64(b[5]) + x += y << 35 + if y < 0x80 { + return x, 6 + } + x -= 0x80 << 35 + + if len(b) <= 6 { + goto bad + } + y = uint64(b[6]) + x += y << 42 + if y < 0x80 { + return x, 7 + } + x -= 0x80 << 42 + + if len(b) <= 7 { + goto bad + } + y = uint64(b[7]) + x += y << 49 + if y < 0x80 { + return x, 8 + } + x -= 0x80 << 49 + + if len(b) <= 8 { + goto bad + } + y = uint64(b[8]) + x += y << 56 + if y < 0x80 { + return x, 9 + } + x -= 0x80 << 56 + + if len(b) <= 9 { + goto bad + } + y = uint64(b[9]) + x += y << 63 + if y < 2 { + return x, 10 + } + +bad: + return 0, 0 +} diff --git a/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go b/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go new file mode 100644 index 00000000..049b5dd2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go @@ -0,0 +1,5118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test_proto/test.proto + +package test_proto // import "github.com/golang/protobuf/proto/test_proto" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FOO int32 + +const ( + FOO_FOO1 FOO = 1 +) + +var FOO_name = map[int32]string{ + 1: "FOO1", +} +var FOO_value = map[string]int32{ + "FOO1": 1, +} + +func (x FOO) Enum() *FOO { + p := new(FOO) + *p = x + return p +} +func (x FOO) String() string { + return proto.EnumName(FOO_name, int32(x)) +} +func (x *FOO) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FOO_value, data, "FOO") + if err != nil { + return err + } + *x = FOO(value) + return nil +} +func (FOO) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{0} +} + +// An enum, for completeness. +type GoTest_KIND int32 + +const ( + GoTest_VOID GoTest_KIND = 0 + // Basic types + GoTest_BOOL GoTest_KIND = 1 + GoTest_BYTES GoTest_KIND = 2 + GoTest_FINGERPRINT GoTest_KIND = 3 + GoTest_FLOAT GoTest_KIND = 4 + GoTest_INT GoTest_KIND = 5 + GoTest_STRING GoTest_KIND = 6 + GoTest_TIME GoTest_KIND = 7 + // Groupings + GoTest_TUPLE GoTest_KIND = 8 + GoTest_ARRAY GoTest_KIND = 9 + GoTest_MAP GoTest_KIND = 10 + // Table types + GoTest_TABLE GoTest_KIND = 11 + // Functions + GoTest_FUNCTION GoTest_KIND = 12 +) + +var GoTest_KIND_name = map[int32]string{ + 0: "VOID", + 1: "BOOL", + 2: "BYTES", + 3: "FINGERPRINT", + 4: "FLOAT", + 5: "INT", + 6: "STRING", + 7: "TIME", + 8: "TUPLE", + 9: "ARRAY", + 10: "MAP", + 11: "TABLE", + 12: "FUNCTION", +} +var GoTest_KIND_value = map[string]int32{ + "VOID": 0, + "BOOL": 1, + "BYTES": 2, + "FINGERPRINT": 3, + "FLOAT": 4, + "INT": 5, + "STRING": 6, + "TIME": 7, + "TUPLE": 8, + "ARRAY": 9, + "MAP": 10, + "TABLE": 11, + "FUNCTION": 12, +} + +func (x GoTest_KIND) Enum() *GoTest_KIND { + p := new(GoTest_KIND) + *p = x + return p +} +func (x GoTest_KIND) String() string { + return proto.EnumName(GoTest_KIND_name, int32(x)) +} +func (x *GoTest_KIND) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(GoTest_KIND_value, data, "GoTest_KIND") + if err != nil { + return err + } + *x = GoTest_KIND(value) + return nil +} +func (GoTest_KIND) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{2, 0} +} + +type MyMessage_Color int32 + +const ( + MyMessage_RED MyMessage_Color = 0 + MyMessage_GREEN MyMessage_Color = 1 + MyMessage_BLUE MyMessage_Color = 2 +) + +var MyMessage_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var MyMessage_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x MyMessage_Color) Enum() *MyMessage_Color { + p := new(MyMessage_Color) + *p = x + return p +} +func (x MyMessage_Color) String() string { + return proto.EnumName(MyMessage_Color_name, int32(x)) +} +func (x *MyMessage_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(MyMessage_Color_value, data, "MyMessage_Color") + if err != nil { + return err + } + *x = MyMessage_Color(value) + return nil +} +func (MyMessage_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{13, 0} +} + +type DefaultsMessage_DefaultsEnum int32 + +const ( + DefaultsMessage_ZERO DefaultsMessage_DefaultsEnum = 0 + DefaultsMessage_ONE DefaultsMessage_DefaultsEnum = 1 + DefaultsMessage_TWO DefaultsMessage_DefaultsEnum = 2 +) + +var DefaultsMessage_DefaultsEnum_name = map[int32]string{ + 0: "ZERO", + 1: "ONE", + 2: "TWO", +} +var DefaultsMessage_DefaultsEnum_value = map[string]int32{ + "ZERO": 0, + "ONE": 1, + "TWO": 2, +} + +func (x DefaultsMessage_DefaultsEnum) Enum() *DefaultsMessage_DefaultsEnum { + p := new(DefaultsMessage_DefaultsEnum) + *p = x + return p +} +func (x DefaultsMessage_DefaultsEnum) String() string { + return proto.EnumName(DefaultsMessage_DefaultsEnum_name, int32(x)) +} +func (x *DefaultsMessage_DefaultsEnum) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(DefaultsMessage_DefaultsEnum_value, data, "DefaultsMessage_DefaultsEnum") + if err != nil { + return err + } + *x = DefaultsMessage_DefaultsEnum(value) + return nil +} +func (DefaultsMessage_DefaultsEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{16, 0} +} + +type Defaults_Color int32 + +const ( + Defaults_RED Defaults_Color = 0 + Defaults_GREEN Defaults_Color = 1 + Defaults_BLUE Defaults_Color = 2 +) + +var Defaults_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var Defaults_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Defaults_Color) Enum() *Defaults_Color { + p := new(Defaults_Color) + *p = x + return p +} +func (x Defaults_Color) String() string { + return proto.EnumName(Defaults_Color_name, int32(x)) +} +func (x *Defaults_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Defaults_Color_value, data, "Defaults_Color") + if err != nil { + return err + } + *x = Defaults_Color(value) + return nil +} +func (Defaults_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{21, 0} +} + +type RepeatedEnum_Color int32 + +const ( + RepeatedEnum_RED RepeatedEnum_Color = 1 +) + +var RepeatedEnum_Color_name = map[int32]string{ + 1: "RED", +} +var RepeatedEnum_Color_value = map[string]int32{ + "RED": 1, +} + +func (x RepeatedEnum_Color) Enum() *RepeatedEnum_Color { + p := new(RepeatedEnum_Color) + *p = x + return p +} +func (x RepeatedEnum_Color) String() string { + return proto.EnumName(RepeatedEnum_Color_name, int32(x)) +} +func (x *RepeatedEnum_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(RepeatedEnum_Color_value, data, "RepeatedEnum_Color") + if err != nil { + return err + } + *x = RepeatedEnum_Color(value) + return nil +} +func (RepeatedEnum_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{23, 0} +} + +type GoEnum struct { + Foo *FOO `protobuf:"varint,1,req,name=foo,enum=test_proto.FOO" json:"foo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoEnum) Reset() { *m = GoEnum{} } +func (m *GoEnum) String() string { return proto.CompactTextString(m) } +func (*GoEnum) ProtoMessage() {} +func (*GoEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{0} +} +func (m *GoEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoEnum.Unmarshal(m, b) +} +func (m *GoEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoEnum.Marshal(b, m, deterministic) +} +func (dst *GoEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoEnum.Merge(dst, src) +} +func (m *GoEnum) XXX_Size() int { + return xxx_messageInfo_GoEnum.Size(m) +} +func (m *GoEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GoEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GoEnum proto.InternalMessageInfo + +func (m *GoEnum) GetFoo() FOO { + if m != nil && m.Foo != nil { + return *m.Foo + } + return FOO_FOO1 +} + +type GoTestField struct { + Label *string `protobuf:"bytes,1,req,name=Label" json:"Label,omitempty"` + Type *string `protobuf:"bytes,2,req,name=Type" json:"Type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestField) Reset() { *m = GoTestField{} } +func (m *GoTestField) String() string { return proto.CompactTextString(m) } +func (*GoTestField) ProtoMessage() {} +func (*GoTestField) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{1} +} +func (m *GoTestField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestField.Unmarshal(m, b) +} +func (m *GoTestField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestField.Marshal(b, m, deterministic) +} +func (dst *GoTestField) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestField.Merge(dst, src) +} +func (m *GoTestField) XXX_Size() int { + return xxx_messageInfo_GoTestField.Size(m) +} +func (m *GoTestField) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestField.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestField proto.InternalMessageInfo + +func (m *GoTestField) GetLabel() string { + if m != nil && m.Label != nil { + return *m.Label + } + return "" +} + +func (m *GoTestField) GetType() string { + if m != nil && m.Type != nil { + return *m.Type + } + return "" +} + +type GoTest struct { + // Some typical parameters + Kind *GoTest_KIND `protobuf:"varint,1,req,name=Kind,enum=test_proto.GoTest_KIND" json:"Kind,omitempty"` + Table *string `protobuf:"bytes,2,opt,name=Table" json:"Table,omitempty"` + Param *int32 `protobuf:"varint,3,opt,name=Param" json:"Param,omitempty"` + // Required, repeated and optional foreign fields. + RequiredField *GoTestField `protobuf:"bytes,4,req,name=RequiredField" json:"RequiredField,omitempty"` + RepeatedField []*GoTestField `protobuf:"bytes,5,rep,name=RepeatedField" json:"RepeatedField,omitempty"` + OptionalField *GoTestField `protobuf:"bytes,6,opt,name=OptionalField" json:"OptionalField,omitempty"` + // Required fields of all basic types + F_BoolRequired *bool `protobuf:"varint,10,req,name=F_Bool_required,json=FBoolRequired" json:"F_Bool_required,omitempty"` + F_Int32Required *int32 `protobuf:"varint,11,req,name=F_Int32_required,json=FInt32Required" json:"F_Int32_required,omitempty"` + F_Int64Required *int64 `protobuf:"varint,12,req,name=F_Int64_required,json=FInt64Required" json:"F_Int64_required,omitempty"` + F_Fixed32Required *uint32 `protobuf:"fixed32,13,req,name=F_Fixed32_required,json=FFixed32Required" json:"F_Fixed32_required,omitempty"` + F_Fixed64Required *uint64 `protobuf:"fixed64,14,req,name=F_Fixed64_required,json=FFixed64Required" json:"F_Fixed64_required,omitempty"` + F_Uint32Required *uint32 `protobuf:"varint,15,req,name=F_Uint32_required,json=FUint32Required" json:"F_Uint32_required,omitempty"` + F_Uint64Required *uint64 `protobuf:"varint,16,req,name=F_Uint64_required,json=FUint64Required" json:"F_Uint64_required,omitempty"` + F_FloatRequired *float32 `protobuf:"fixed32,17,req,name=F_Float_required,json=FFloatRequired" json:"F_Float_required,omitempty"` + F_DoubleRequired *float64 `protobuf:"fixed64,18,req,name=F_Double_required,json=FDoubleRequired" json:"F_Double_required,omitempty"` + F_StringRequired *string `protobuf:"bytes,19,req,name=F_String_required,json=FStringRequired" json:"F_String_required,omitempty"` + F_BytesRequired []byte `protobuf:"bytes,101,req,name=F_Bytes_required,json=FBytesRequired" json:"F_Bytes_required,omitempty"` + F_Sint32Required *int32 `protobuf:"zigzag32,102,req,name=F_Sint32_required,json=FSint32Required" json:"F_Sint32_required,omitempty"` + F_Sint64Required *int64 `protobuf:"zigzag64,103,req,name=F_Sint64_required,json=FSint64Required" json:"F_Sint64_required,omitempty"` + F_Sfixed32Required *int32 `protobuf:"fixed32,104,req,name=F_Sfixed32_required,json=FSfixed32Required" json:"F_Sfixed32_required,omitempty"` + F_Sfixed64Required *int64 `protobuf:"fixed64,105,req,name=F_Sfixed64_required,json=FSfixed64Required" json:"F_Sfixed64_required,omitempty"` + // Repeated fields of all basic types + F_BoolRepeated []bool `protobuf:"varint,20,rep,name=F_Bool_repeated,json=FBoolRepeated" json:"F_Bool_repeated,omitempty"` + F_Int32Repeated []int32 `protobuf:"varint,21,rep,name=F_Int32_repeated,json=FInt32Repeated" json:"F_Int32_repeated,omitempty"` + F_Int64Repeated []int64 `protobuf:"varint,22,rep,name=F_Int64_repeated,json=FInt64Repeated" json:"F_Int64_repeated,omitempty"` + F_Fixed32Repeated []uint32 `protobuf:"fixed32,23,rep,name=F_Fixed32_repeated,json=FFixed32Repeated" json:"F_Fixed32_repeated,omitempty"` + F_Fixed64Repeated []uint64 `protobuf:"fixed64,24,rep,name=F_Fixed64_repeated,json=FFixed64Repeated" json:"F_Fixed64_repeated,omitempty"` + F_Uint32Repeated []uint32 `protobuf:"varint,25,rep,name=F_Uint32_repeated,json=FUint32Repeated" json:"F_Uint32_repeated,omitempty"` + F_Uint64Repeated []uint64 `protobuf:"varint,26,rep,name=F_Uint64_repeated,json=FUint64Repeated" json:"F_Uint64_repeated,omitempty"` + F_FloatRepeated []float32 `protobuf:"fixed32,27,rep,name=F_Float_repeated,json=FFloatRepeated" json:"F_Float_repeated,omitempty"` + F_DoubleRepeated []float64 `protobuf:"fixed64,28,rep,name=F_Double_repeated,json=FDoubleRepeated" json:"F_Double_repeated,omitempty"` + F_StringRepeated []string `protobuf:"bytes,29,rep,name=F_String_repeated,json=FStringRepeated" json:"F_String_repeated,omitempty"` + F_BytesRepeated [][]byte `protobuf:"bytes,201,rep,name=F_Bytes_repeated,json=FBytesRepeated" json:"F_Bytes_repeated,omitempty"` + F_Sint32Repeated []int32 `protobuf:"zigzag32,202,rep,name=F_Sint32_repeated,json=FSint32Repeated" json:"F_Sint32_repeated,omitempty"` + F_Sint64Repeated []int64 `protobuf:"zigzag64,203,rep,name=F_Sint64_repeated,json=FSint64Repeated" json:"F_Sint64_repeated,omitempty"` + F_Sfixed32Repeated []int32 `protobuf:"fixed32,204,rep,name=F_Sfixed32_repeated,json=FSfixed32Repeated" json:"F_Sfixed32_repeated,omitempty"` + F_Sfixed64Repeated []int64 `protobuf:"fixed64,205,rep,name=F_Sfixed64_repeated,json=FSfixed64Repeated" json:"F_Sfixed64_repeated,omitempty"` + // Optional fields of all basic types + F_BoolOptional *bool `protobuf:"varint,30,opt,name=F_Bool_optional,json=FBoolOptional" json:"F_Bool_optional,omitempty"` + F_Int32Optional *int32 `protobuf:"varint,31,opt,name=F_Int32_optional,json=FInt32Optional" json:"F_Int32_optional,omitempty"` + F_Int64Optional *int64 `protobuf:"varint,32,opt,name=F_Int64_optional,json=FInt64Optional" json:"F_Int64_optional,omitempty"` + F_Fixed32Optional *uint32 `protobuf:"fixed32,33,opt,name=F_Fixed32_optional,json=FFixed32Optional" json:"F_Fixed32_optional,omitempty"` + F_Fixed64Optional *uint64 `protobuf:"fixed64,34,opt,name=F_Fixed64_optional,json=FFixed64Optional" json:"F_Fixed64_optional,omitempty"` + F_Uint32Optional *uint32 `protobuf:"varint,35,opt,name=F_Uint32_optional,json=FUint32Optional" json:"F_Uint32_optional,omitempty"` + F_Uint64Optional *uint64 `protobuf:"varint,36,opt,name=F_Uint64_optional,json=FUint64Optional" json:"F_Uint64_optional,omitempty"` + F_FloatOptional *float32 `protobuf:"fixed32,37,opt,name=F_Float_optional,json=FFloatOptional" json:"F_Float_optional,omitempty"` + F_DoubleOptional *float64 `protobuf:"fixed64,38,opt,name=F_Double_optional,json=FDoubleOptional" json:"F_Double_optional,omitempty"` + F_StringOptional *string `protobuf:"bytes,39,opt,name=F_String_optional,json=FStringOptional" json:"F_String_optional,omitempty"` + F_BytesOptional []byte `protobuf:"bytes,301,opt,name=F_Bytes_optional,json=FBytesOptional" json:"F_Bytes_optional,omitempty"` + F_Sint32Optional *int32 `protobuf:"zigzag32,302,opt,name=F_Sint32_optional,json=FSint32Optional" json:"F_Sint32_optional,omitempty"` + F_Sint64Optional *int64 `protobuf:"zigzag64,303,opt,name=F_Sint64_optional,json=FSint64Optional" json:"F_Sint64_optional,omitempty"` + F_Sfixed32Optional *int32 `protobuf:"fixed32,304,opt,name=F_Sfixed32_optional,json=FSfixed32Optional" json:"F_Sfixed32_optional,omitempty"` + F_Sfixed64Optional *int64 `protobuf:"fixed64,305,opt,name=F_Sfixed64_optional,json=FSfixed64Optional" json:"F_Sfixed64_optional,omitempty"` + // Default-valued fields of all basic types + F_BoolDefaulted *bool `protobuf:"varint,40,opt,name=F_Bool_defaulted,json=FBoolDefaulted,def=1" json:"F_Bool_defaulted,omitempty"` + F_Int32Defaulted *int32 `protobuf:"varint,41,opt,name=F_Int32_defaulted,json=FInt32Defaulted,def=32" json:"F_Int32_defaulted,omitempty"` + F_Int64Defaulted *int64 `protobuf:"varint,42,opt,name=F_Int64_defaulted,json=FInt64Defaulted,def=64" json:"F_Int64_defaulted,omitempty"` + F_Fixed32Defaulted *uint32 `protobuf:"fixed32,43,opt,name=F_Fixed32_defaulted,json=FFixed32Defaulted,def=320" json:"F_Fixed32_defaulted,omitempty"` + F_Fixed64Defaulted *uint64 `protobuf:"fixed64,44,opt,name=F_Fixed64_defaulted,json=FFixed64Defaulted,def=640" json:"F_Fixed64_defaulted,omitempty"` + F_Uint32Defaulted *uint32 `protobuf:"varint,45,opt,name=F_Uint32_defaulted,json=FUint32Defaulted,def=3200" json:"F_Uint32_defaulted,omitempty"` + F_Uint64Defaulted *uint64 `protobuf:"varint,46,opt,name=F_Uint64_defaulted,json=FUint64Defaulted,def=6400" json:"F_Uint64_defaulted,omitempty"` + F_FloatDefaulted *float32 `protobuf:"fixed32,47,opt,name=F_Float_defaulted,json=FFloatDefaulted,def=314159" json:"F_Float_defaulted,omitempty"` + F_DoubleDefaulted *float64 `protobuf:"fixed64,48,opt,name=F_Double_defaulted,json=FDoubleDefaulted,def=271828" json:"F_Double_defaulted,omitempty"` + F_StringDefaulted *string `protobuf:"bytes,49,opt,name=F_String_defaulted,json=FStringDefaulted,def=hello, \"world!\"\n" json:"F_String_defaulted,omitempty"` + F_BytesDefaulted []byte `protobuf:"bytes,401,opt,name=F_Bytes_defaulted,json=FBytesDefaulted,def=Bignose" json:"F_Bytes_defaulted,omitempty"` + F_Sint32Defaulted *int32 `protobuf:"zigzag32,402,opt,name=F_Sint32_defaulted,json=FSint32Defaulted,def=-32" json:"F_Sint32_defaulted,omitempty"` + F_Sint64Defaulted *int64 `protobuf:"zigzag64,403,opt,name=F_Sint64_defaulted,json=FSint64Defaulted,def=-64" json:"F_Sint64_defaulted,omitempty"` + F_Sfixed32Defaulted *int32 `protobuf:"fixed32,404,opt,name=F_Sfixed32_defaulted,json=FSfixed32Defaulted,def=-32" json:"F_Sfixed32_defaulted,omitempty"` + F_Sfixed64Defaulted *int64 `protobuf:"fixed64,405,opt,name=F_Sfixed64_defaulted,json=FSfixed64Defaulted,def=-64" json:"F_Sfixed64_defaulted,omitempty"` + // Packed repeated fields (no string or bytes). + F_BoolRepeatedPacked []bool `protobuf:"varint,50,rep,packed,name=F_Bool_repeated_packed,json=FBoolRepeatedPacked" json:"F_Bool_repeated_packed,omitempty"` + F_Int32RepeatedPacked []int32 `protobuf:"varint,51,rep,packed,name=F_Int32_repeated_packed,json=FInt32RepeatedPacked" json:"F_Int32_repeated_packed,omitempty"` + F_Int64RepeatedPacked []int64 `protobuf:"varint,52,rep,packed,name=F_Int64_repeated_packed,json=FInt64RepeatedPacked" json:"F_Int64_repeated_packed,omitempty"` + F_Fixed32RepeatedPacked []uint32 `protobuf:"fixed32,53,rep,packed,name=F_Fixed32_repeated_packed,json=FFixed32RepeatedPacked" json:"F_Fixed32_repeated_packed,omitempty"` + F_Fixed64RepeatedPacked []uint64 `protobuf:"fixed64,54,rep,packed,name=F_Fixed64_repeated_packed,json=FFixed64RepeatedPacked" json:"F_Fixed64_repeated_packed,omitempty"` + F_Uint32RepeatedPacked []uint32 `protobuf:"varint,55,rep,packed,name=F_Uint32_repeated_packed,json=FUint32RepeatedPacked" json:"F_Uint32_repeated_packed,omitempty"` + F_Uint64RepeatedPacked []uint64 `protobuf:"varint,56,rep,packed,name=F_Uint64_repeated_packed,json=FUint64RepeatedPacked" json:"F_Uint64_repeated_packed,omitempty"` + F_FloatRepeatedPacked []float32 `protobuf:"fixed32,57,rep,packed,name=F_Float_repeated_packed,json=FFloatRepeatedPacked" json:"F_Float_repeated_packed,omitempty"` + F_DoubleRepeatedPacked []float64 `protobuf:"fixed64,58,rep,packed,name=F_Double_repeated_packed,json=FDoubleRepeatedPacked" json:"F_Double_repeated_packed,omitempty"` + F_Sint32RepeatedPacked []int32 `protobuf:"zigzag32,502,rep,packed,name=F_Sint32_repeated_packed,json=FSint32RepeatedPacked" json:"F_Sint32_repeated_packed,omitempty"` + F_Sint64RepeatedPacked []int64 `protobuf:"zigzag64,503,rep,packed,name=F_Sint64_repeated_packed,json=FSint64RepeatedPacked" json:"F_Sint64_repeated_packed,omitempty"` + F_Sfixed32RepeatedPacked []int32 `protobuf:"fixed32,504,rep,packed,name=F_Sfixed32_repeated_packed,json=FSfixed32RepeatedPacked" json:"F_Sfixed32_repeated_packed,omitempty"` + F_Sfixed64RepeatedPacked []int64 `protobuf:"fixed64,505,rep,packed,name=F_Sfixed64_repeated_packed,json=FSfixed64RepeatedPacked" json:"F_Sfixed64_repeated_packed,omitempty"` + Requiredgroup *GoTest_RequiredGroup `protobuf:"group,70,req,name=RequiredGroup,json=requiredgroup" json:"requiredgroup,omitempty"` + Repeatedgroup []*GoTest_RepeatedGroup `protobuf:"group,80,rep,name=RepeatedGroup,json=repeatedgroup" json:"repeatedgroup,omitempty"` + Optionalgroup *GoTest_OptionalGroup `protobuf:"group,90,opt,name=OptionalGroup,json=optionalgroup" json:"optionalgroup,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest) Reset() { *m = GoTest{} } +func (m *GoTest) String() string { return proto.CompactTextString(m) } +func (*GoTest) ProtoMessage() {} +func (*GoTest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{2} +} +func (m *GoTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest.Unmarshal(m, b) +} +func (m *GoTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest.Marshal(b, m, deterministic) +} +func (dst *GoTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest.Merge(dst, src) +} +func (m *GoTest) XXX_Size() int { + return xxx_messageInfo_GoTest.Size(m) +} +func (m *GoTest) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest proto.InternalMessageInfo + +const Default_GoTest_F_BoolDefaulted bool = true +const Default_GoTest_F_Int32Defaulted int32 = 32 +const Default_GoTest_F_Int64Defaulted int64 = 64 +const Default_GoTest_F_Fixed32Defaulted uint32 = 320 +const Default_GoTest_F_Fixed64Defaulted uint64 = 640 +const Default_GoTest_F_Uint32Defaulted uint32 = 3200 +const Default_GoTest_F_Uint64Defaulted uint64 = 6400 +const Default_GoTest_F_FloatDefaulted float32 = 314159 +const Default_GoTest_F_DoubleDefaulted float64 = 271828 +const Default_GoTest_F_StringDefaulted string = "hello, \"world!\"\n" + +var Default_GoTest_F_BytesDefaulted []byte = []byte("Bignose") + +const Default_GoTest_F_Sint32Defaulted int32 = -32 +const Default_GoTest_F_Sint64Defaulted int64 = -64 +const Default_GoTest_F_Sfixed32Defaulted int32 = -32 +const Default_GoTest_F_Sfixed64Defaulted int64 = -64 + +func (m *GoTest) GetKind() GoTest_KIND { + if m != nil && m.Kind != nil { + return *m.Kind + } + return GoTest_VOID +} + +func (m *GoTest) GetTable() string { + if m != nil && m.Table != nil { + return *m.Table + } + return "" +} + +func (m *GoTest) GetParam() int32 { + if m != nil && m.Param != nil { + return *m.Param + } + return 0 +} + +func (m *GoTest) GetRequiredField() *GoTestField { + if m != nil { + return m.RequiredField + } + return nil +} + +func (m *GoTest) GetRepeatedField() []*GoTestField { + if m != nil { + return m.RepeatedField + } + return nil +} + +func (m *GoTest) GetOptionalField() *GoTestField { + if m != nil { + return m.OptionalField + } + return nil +} + +func (m *GoTest) GetF_BoolRequired() bool { + if m != nil && m.F_BoolRequired != nil { + return *m.F_BoolRequired + } + return false +} + +func (m *GoTest) GetF_Int32Required() int32 { + if m != nil && m.F_Int32Required != nil { + return *m.F_Int32Required + } + return 0 +} + +func (m *GoTest) GetF_Int64Required() int64 { + if m != nil && m.F_Int64Required != nil { + return *m.F_Int64Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Required() uint32 { + if m != nil && m.F_Fixed32Required != nil { + return *m.F_Fixed32Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Required() uint64 { + if m != nil && m.F_Fixed64Required != nil { + return *m.F_Fixed64Required + } + return 0 +} + +func (m *GoTest) GetF_Uint32Required() uint32 { + if m != nil && m.F_Uint32Required != nil { + return *m.F_Uint32Required + } + return 0 +} + +func (m *GoTest) GetF_Uint64Required() uint64 { + if m != nil && m.F_Uint64Required != nil { + return *m.F_Uint64Required + } + return 0 +} + +func (m *GoTest) GetF_FloatRequired() float32 { + if m != nil && m.F_FloatRequired != nil { + return *m.F_FloatRequired + } + return 0 +} + +func (m *GoTest) GetF_DoubleRequired() float64 { + if m != nil && m.F_DoubleRequired != nil { + return *m.F_DoubleRequired + } + return 0 +} + +func (m *GoTest) GetF_StringRequired() string { + if m != nil && m.F_StringRequired != nil { + return *m.F_StringRequired + } + return "" +} + +func (m *GoTest) GetF_BytesRequired() []byte { + if m != nil { + return m.F_BytesRequired + } + return nil +} + +func (m *GoTest) GetF_Sint32Required() int32 { + if m != nil && m.F_Sint32Required != nil { + return *m.F_Sint32Required + } + return 0 +} + +func (m *GoTest) GetF_Sint64Required() int64 { + if m != nil && m.F_Sint64Required != nil { + return *m.F_Sint64Required + } + return 0 +} + +func (m *GoTest) GetF_Sfixed32Required() int32 { + if m != nil && m.F_Sfixed32Required != nil { + return *m.F_Sfixed32Required + } + return 0 +} + +func (m *GoTest) GetF_Sfixed64Required() int64 { + if m != nil && m.F_Sfixed64Required != nil { + return *m.F_Sfixed64Required + } + return 0 +} + +func (m *GoTest) GetF_BoolRepeated() []bool { + if m != nil { + return m.F_BoolRepeated + } + return nil +} + +func (m *GoTest) GetF_Int32Repeated() []int32 { + if m != nil { + return m.F_Int32Repeated + } + return nil +} + +func (m *GoTest) GetF_Int64Repeated() []int64 { + if m != nil { + return m.F_Int64Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed32Repeated() []uint32 { + if m != nil { + return m.F_Fixed32Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed64Repeated() []uint64 { + if m != nil { + return m.F_Fixed64Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint32Repeated() []uint32 { + if m != nil { + return m.F_Uint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint64Repeated() []uint64 { + if m != nil { + return m.F_Uint64Repeated + } + return nil +} + +func (m *GoTest) GetF_FloatRepeated() []float32 { + if m != nil { + return m.F_FloatRepeated + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeated() []float64 { + if m != nil { + return m.F_DoubleRepeated + } + return nil +} + +func (m *GoTest) GetF_StringRepeated() []string { + if m != nil { + return m.F_StringRepeated + } + return nil +} + +func (m *GoTest) GetF_BytesRepeated() [][]byte { + if m != nil { + return m.F_BytesRepeated + } + return nil +} + +func (m *GoTest) GetF_Sint32Repeated() []int32 { + if m != nil { + return m.F_Sint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Sint64Repeated() []int64 { + if m != nil { + return m.F_Sint64Repeated + } + return nil +} + +func (m *GoTest) GetF_Sfixed32Repeated() []int32 { + if m != nil { + return m.F_Sfixed32Repeated + } + return nil +} + +func (m *GoTest) GetF_Sfixed64Repeated() []int64 { + if m != nil { + return m.F_Sfixed64Repeated + } + return nil +} + +func (m *GoTest) GetF_BoolOptional() bool { + if m != nil && m.F_BoolOptional != nil { + return *m.F_BoolOptional + } + return false +} + +func (m *GoTest) GetF_Int32Optional() int32 { + if m != nil && m.F_Int32Optional != nil { + return *m.F_Int32Optional + } + return 0 +} + +func (m *GoTest) GetF_Int64Optional() int64 { + if m != nil && m.F_Int64Optional != nil { + return *m.F_Int64Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Optional() uint32 { + if m != nil && m.F_Fixed32Optional != nil { + return *m.F_Fixed32Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Optional() uint64 { + if m != nil && m.F_Fixed64Optional != nil { + return *m.F_Fixed64Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint32Optional() uint32 { + if m != nil && m.F_Uint32Optional != nil { + return *m.F_Uint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint64Optional() uint64 { + if m != nil && m.F_Uint64Optional != nil { + return *m.F_Uint64Optional + } + return 0 +} + +func (m *GoTest) GetF_FloatOptional() float32 { + if m != nil && m.F_FloatOptional != nil { + return *m.F_FloatOptional + } + return 0 +} + +func (m *GoTest) GetF_DoubleOptional() float64 { + if m != nil && m.F_DoubleOptional != nil { + return *m.F_DoubleOptional + } + return 0 +} + +func (m *GoTest) GetF_StringOptional() string { + if m != nil && m.F_StringOptional != nil { + return *m.F_StringOptional + } + return "" +} + +func (m *GoTest) GetF_BytesOptional() []byte { + if m != nil { + return m.F_BytesOptional + } + return nil +} + +func (m *GoTest) GetF_Sint32Optional() int32 { + if m != nil && m.F_Sint32Optional != nil { + return *m.F_Sint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Sint64Optional() int64 { + if m != nil && m.F_Sint64Optional != nil { + return *m.F_Sint64Optional + } + return 0 +} + +func (m *GoTest) GetF_Sfixed32Optional() int32 { + if m != nil && m.F_Sfixed32Optional != nil { + return *m.F_Sfixed32Optional + } + return 0 +} + +func (m *GoTest) GetF_Sfixed64Optional() int64 { + if m != nil && m.F_Sfixed64Optional != nil { + return *m.F_Sfixed64Optional + } + return 0 +} + +func (m *GoTest) GetF_BoolDefaulted() bool { + if m != nil && m.F_BoolDefaulted != nil { + return *m.F_BoolDefaulted + } + return Default_GoTest_F_BoolDefaulted +} + +func (m *GoTest) GetF_Int32Defaulted() int32 { + if m != nil && m.F_Int32Defaulted != nil { + return *m.F_Int32Defaulted + } + return Default_GoTest_F_Int32Defaulted +} + +func (m *GoTest) GetF_Int64Defaulted() int64 { + if m != nil && m.F_Int64Defaulted != nil { + return *m.F_Int64Defaulted + } + return Default_GoTest_F_Int64Defaulted +} + +func (m *GoTest) GetF_Fixed32Defaulted() uint32 { + if m != nil && m.F_Fixed32Defaulted != nil { + return *m.F_Fixed32Defaulted + } + return Default_GoTest_F_Fixed32Defaulted +} + +func (m *GoTest) GetF_Fixed64Defaulted() uint64 { + if m != nil && m.F_Fixed64Defaulted != nil { + return *m.F_Fixed64Defaulted + } + return Default_GoTest_F_Fixed64Defaulted +} + +func (m *GoTest) GetF_Uint32Defaulted() uint32 { + if m != nil && m.F_Uint32Defaulted != nil { + return *m.F_Uint32Defaulted + } + return Default_GoTest_F_Uint32Defaulted +} + +func (m *GoTest) GetF_Uint64Defaulted() uint64 { + if m != nil && m.F_Uint64Defaulted != nil { + return *m.F_Uint64Defaulted + } + return Default_GoTest_F_Uint64Defaulted +} + +func (m *GoTest) GetF_FloatDefaulted() float32 { + if m != nil && m.F_FloatDefaulted != nil { + return *m.F_FloatDefaulted + } + return Default_GoTest_F_FloatDefaulted +} + +func (m *GoTest) GetF_DoubleDefaulted() float64 { + if m != nil && m.F_DoubleDefaulted != nil { + return *m.F_DoubleDefaulted + } + return Default_GoTest_F_DoubleDefaulted +} + +func (m *GoTest) GetF_StringDefaulted() string { + if m != nil && m.F_StringDefaulted != nil { + return *m.F_StringDefaulted + } + return Default_GoTest_F_StringDefaulted +} + +func (m *GoTest) GetF_BytesDefaulted() []byte { + if m != nil && m.F_BytesDefaulted != nil { + return m.F_BytesDefaulted + } + return append([]byte(nil), Default_GoTest_F_BytesDefaulted...) +} + +func (m *GoTest) GetF_Sint32Defaulted() int32 { + if m != nil && m.F_Sint32Defaulted != nil { + return *m.F_Sint32Defaulted + } + return Default_GoTest_F_Sint32Defaulted +} + +func (m *GoTest) GetF_Sint64Defaulted() int64 { + if m != nil && m.F_Sint64Defaulted != nil { + return *m.F_Sint64Defaulted + } + return Default_GoTest_F_Sint64Defaulted +} + +func (m *GoTest) GetF_Sfixed32Defaulted() int32 { + if m != nil && m.F_Sfixed32Defaulted != nil { + return *m.F_Sfixed32Defaulted + } + return Default_GoTest_F_Sfixed32Defaulted +} + +func (m *GoTest) GetF_Sfixed64Defaulted() int64 { + if m != nil && m.F_Sfixed64Defaulted != nil { + return *m.F_Sfixed64Defaulted + } + return Default_GoTest_F_Sfixed64Defaulted +} + +func (m *GoTest) GetF_BoolRepeatedPacked() []bool { + if m != nil { + return m.F_BoolRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int32RepeatedPacked() []int32 { + if m != nil { + return m.F_Int32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int64RepeatedPacked() []int64 { + if m != nil { + return m.F_Int64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Fixed32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Fixed64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Uint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Uint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_FloatRepeatedPacked() []float32 { + if m != nil { + return m.F_FloatRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeatedPacked() []float64 { + if m != nil { + return m.F_DoubleRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint32RepeatedPacked() []int32 { + if m != nil { + return m.F_Sint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint64RepeatedPacked() []int64 { + if m != nil { + return m.F_Sint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sfixed32RepeatedPacked() []int32 { + if m != nil { + return m.F_Sfixed32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sfixed64RepeatedPacked() []int64 { + if m != nil { + return m.F_Sfixed64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetRequiredgroup() *GoTest_RequiredGroup { + if m != nil { + return m.Requiredgroup + } + return nil +} + +func (m *GoTest) GetRepeatedgroup() []*GoTest_RepeatedGroup { + if m != nil { + return m.Repeatedgroup + } + return nil +} + +func (m *GoTest) GetOptionalgroup() *GoTest_OptionalGroup { + if m != nil { + return m.Optionalgroup + } + return nil +} + +// Required, repeated, and optional groups. +type GoTest_RequiredGroup struct { + RequiredField *string `protobuf:"bytes,71,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_RequiredGroup) Reset() { *m = GoTest_RequiredGroup{} } +func (m *GoTest_RequiredGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RequiredGroup) ProtoMessage() {} +func (*GoTest_RequiredGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{2, 0} +} +func (m *GoTest_RequiredGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_RequiredGroup.Unmarshal(m, b) +} +func (m *GoTest_RequiredGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_RequiredGroup.Marshal(b, m, deterministic) +} +func (dst *GoTest_RequiredGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_RequiredGroup.Merge(dst, src) +} +func (m *GoTest_RequiredGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_RequiredGroup.Size(m) +} +func (m *GoTest_RequiredGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_RequiredGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_RequiredGroup proto.InternalMessageInfo + +func (m *GoTest_RequiredGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_RepeatedGroup struct { + RequiredField *string `protobuf:"bytes,81,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_RepeatedGroup) Reset() { *m = GoTest_RepeatedGroup{} } +func (m *GoTest_RepeatedGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RepeatedGroup) ProtoMessage() {} +func (*GoTest_RepeatedGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{2, 1} +} +func (m *GoTest_RepeatedGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_RepeatedGroup.Unmarshal(m, b) +} +func (m *GoTest_RepeatedGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_RepeatedGroup.Marshal(b, m, deterministic) +} +func (dst *GoTest_RepeatedGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_RepeatedGroup.Merge(dst, src) +} +func (m *GoTest_RepeatedGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_RepeatedGroup.Size(m) +} +func (m *GoTest_RepeatedGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_RepeatedGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_RepeatedGroup proto.InternalMessageInfo + +func (m *GoTest_RepeatedGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_OptionalGroup struct { + RequiredField *string `protobuf:"bytes,91,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_OptionalGroup) Reset() { *m = GoTest_OptionalGroup{} } +func (m *GoTest_OptionalGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_OptionalGroup) ProtoMessage() {} +func (*GoTest_OptionalGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{2, 2} +} +func (m *GoTest_OptionalGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_OptionalGroup.Unmarshal(m, b) +} +func (m *GoTest_OptionalGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_OptionalGroup.Marshal(b, m, deterministic) +} +func (dst *GoTest_OptionalGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_OptionalGroup.Merge(dst, src) +} +func (m *GoTest_OptionalGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_OptionalGroup.Size(m) +} +func (m *GoTest_OptionalGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_OptionalGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_OptionalGroup proto.InternalMessageInfo + +func (m *GoTest_OptionalGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +// For testing a group containing a required field. +type GoTestRequiredGroupField struct { + Group *GoTestRequiredGroupField_Group `protobuf:"group,1,req,name=Group,json=group" json:"group,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestRequiredGroupField) Reset() { *m = GoTestRequiredGroupField{} } +func (m *GoTestRequiredGroupField) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField) ProtoMessage() {} +func (*GoTestRequiredGroupField) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{3} +} +func (m *GoTestRequiredGroupField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestRequiredGroupField.Unmarshal(m, b) +} +func (m *GoTestRequiredGroupField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestRequiredGroupField.Marshal(b, m, deterministic) +} +func (dst *GoTestRequiredGroupField) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestRequiredGroupField.Merge(dst, src) +} +func (m *GoTestRequiredGroupField) XXX_Size() int { + return xxx_messageInfo_GoTestRequiredGroupField.Size(m) +} +func (m *GoTestRequiredGroupField) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestRequiredGroupField.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestRequiredGroupField proto.InternalMessageInfo + +func (m *GoTestRequiredGroupField) GetGroup() *GoTestRequiredGroupField_Group { + if m != nil { + return m.Group + } + return nil +} + +type GoTestRequiredGroupField_Group struct { + Field *int32 `protobuf:"varint,2,req,name=Field" json:"Field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestRequiredGroupField_Group) Reset() { *m = GoTestRequiredGroupField_Group{} } +func (m *GoTestRequiredGroupField_Group) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField_Group) ProtoMessage() {} +func (*GoTestRequiredGroupField_Group) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{3, 0} +} +func (m *GoTestRequiredGroupField_Group) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Unmarshal(m, b) +} +func (m *GoTestRequiredGroupField_Group) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Marshal(b, m, deterministic) +} +func (dst *GoTestRequiredGroupField_Group) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestRequiredGroupField_Group.Merge(dst, src) +} +func (m *GoTestRequiredGroupField_Group) XXX_Size() int { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Size(m) +} +func (m *GoTestRequiredGroupField_Group) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestRequiredGroupField_Group.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestRequiredGroupField_Group proto.InternalMessageInfo + +func (m *GoTestRequiredGroupField_Group) GetField() int32 { + if m != nil && m.Field != nil { + return *m.Field + } + return 0 +} + +// For testing skipping of unrecognized fields. +// Numbers are all big, larger than tag numbers in GoTestField, +// the message used in the corresponding test. +type GoSkipTest struct { + SkipInt32 *int32 `protobuf:"varint,11,req,name=skip_int32,json=skipInt32" json:"skip_int32,omitempty"` + SkipFixed32 *uint32 `protobuf:"fixed32,12,req,name=skip_fixed32,json=skipFixed32" json:"skip_fixed32,omitempty"` + SkipFixed64 *uint64 `protobuf:"fixed64,13,req,name=skip_fixed64,json=skipFixed64" json:"skip_fixed64,omitempty"` + SkipString *string `protobuf:"bytes,14,req,name=skip_string,json=skipString" json:"skip_string,omitempty"` + Skipgroup *GoSkipTest_SkipGroup `protobuf:"group,15,req,name=SkipGroup,json=skipgroup" json:"skipgroup,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoSkipTest) Reset() { *m = GoSkipTest{} } +func (m *GoSkipTest) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest) ProtoMessage() {} +func (*GoSkipTest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{4} +} +func (m *GoSkipTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoSkipTest.Unmarshal(m, b) +} +func (m *GoSkipTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoSkipTest.Marshal(b, m, deterministic) +} +func (dst *GoSkipTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoSkipTest.Merge(dst, src) +} +func (m *GoSkipTest) XXX_Size() int { + return xxx_messageInfo_GoSkipTest.Size(m) +} +func (m *GoSkipTest) XXX_DiscardUnknown() { + xxx_messageInfo_GoSkipTest.DiscardUnknown(m) +} + +var xxx_messageInfo_GoSkipTest proto.InternalMessageInfo + +func (m *GoSkipTest) GetSkipInt32() int32 { + if m != nil && m.SkipInt32 != nil { + return *m.SkipInt32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed32() uint32 { + if m != nil && m.SkipFixed32 != nil { + return *m.SkipFixed32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed64() uint64 { + if m != nil && m.SkipFixed64 != nil { + return *m.SkipFixed64 + } + return 0 +} + +func (m *GoSkipTest) GetSkipString() string { + if m != nil && m.SkipString != nil { + return *m.SkipString + } + return "" +} + +func (m *GoSkipTest) GetSkipgroup() *GoSkipTest_SkipGroup { + if m != nil { + return m.Skipgroup + } + return nil +} + +type GoSkipTest_SkipGroup struct { + GroupInt32 *int32 `protobuf:"varint,16,req,name=group_int32,json=groupInt32" json:"group_int32,omitempty"` + GroupString *string `protobuf:"bytes,17,req,name=group_string,json=groupString" json:"group_string,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoSkipTest_SkipGroup) Reset() { *m = GoSkipTest_SkipGroup{} } +func (m *GoSkipTest_SkipGroup) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest_SkipGroup) ProtoMessage() {} +func (*GoSkipTest_SkipGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{4, 0} +} +func (m *GoSkipTest_SkipGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoSkipTest_SkipGroup.Unmarshal(m, b) +} +func (m *GoSkipTest_SkipGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoSkipTest_SkipGroup.Marshal(b, m, deterministic) +} +func (dst *GoSkipTest_SkipGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoSkipTest_SkipGroup.Merge(dst, src) +} +func (m *GoSkipTest_SkipGroup) XXX_Size() int { + return xxx_messageInfo_GoSkipTest_SkipGroup.Size(m) +} +func (m *GoSkipTest_SkipGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoSkipTest_SkipGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoSkipTest_SkipGroup proto.InternalMessageInfo + +func (m *GoSkipTest_SkipGroup) GetGroupInt32() int32 { + if m != nil && m.GroupInt32 != nil { + return *m.GroupInt32 + } + return 0 +} + +func (m *GoSkipTest_SkipGroup) GetGroupString() string { + if m != nil && m.GroupString != nil { + return *m.GroupString + } + return "" +} + +// For testing packed/non-packed decoder switching. +// A serialized instance of one should be deserializable as the other. +type NonPackedTest struct { + A []int32 `protobuf:"varint,1,rep,name=a" json:"a,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NonPackedTest) Reset() { *m = NonPackedTest{} } +func (m *NonPackedTest) String() string { return proto.CompactTextString(m) } +func (*NonPackedTest) ProtoMessage() {} +func (*NonPackedTest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{5} +} +func (m *NonPackedTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NonPackedTest.Unmarshal(m, b) +} +func (m *NonPackedTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NonPackedTest.Marshal(b, m, deterministic) +} +func (dst *NonPackedTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_NonPackedTest.Merge(dst, src) +} +func (m *NonPackedTest) XXX_Size() int { + return xxx_messageInfo_NonPackedTest.Size(m) +} +func (m *NonPackedTest) XXX_DiscardUnknown() { + xxx_messageInfo_NonPackedTest.DiscardUnknown(m) +} + +var xxx_messageInfo_NonPackedTest proto.InternalMessageInfo + +func (m *NonPackedTest) GetA() []int32 { + if m != nil { + return m.A + } + return nil +} + +type PackedTest struct { + B []int32 `protobuf:"varint,1,rep,packed,name=b" json:"b,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackedTest) Reset() { *m = PackedTest{} } +func (m *PackedTest) String() string { return proto.CompactTextString(m) } +func (*PackedTest) ProtoMessage() {} +func (*PackedTest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{6} +} +func (m *PackedTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackedTest.Unmarshal(m, b) +} +func (m *PackedTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackedTest.Marshal(b, m, deterministic) +} +func (dst *PackedTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackedTest.Merge(dst, src) +} +func (m *PackedTest) XXX_Size() int { + return xxx_messageInfo_PackedTest.Size(m) +} +func (m *PackedTest) XXX_DiscardUnknown() { + xxx_messageInfo_PackedTest.DiscardUnknown(m) +} + +var xxx_messageInfo_PackedTest proto.InternalMessageInfo + +func (m *PackedTest) GetB() []int32 { + if m != nil { + return m.B + } + return nil +} + +type MaxTag struct { + // Maximum possible tag number. + LastField *string `protobuf:"bytes,536870911,opt,name=last_field,json=lastField" json:"last_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaxTag) Reset() { *m = MaxTag{} } +func (m *MaxTag) String() string { return proto.CompactTextString(m) } +func (*MaxTag) ProtoMessage() {} +func (*MaxTag) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{7} +} +func (m *MaxTag) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaxTag.Unmarshal(m, b) +} +func (m *MaxTag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaxTag.Marshal(b, m, deterministic) +} +func (dst *MaxTag) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaxTag.Merge(dst, src) +} +func (m *MaxTag) XXX_Size() int { + return xxx_messageInfo_MaxTag.Size(m) +} +func (m *MaxTag) XXX_DiscardUnknown() { + xxx_messageInfo_MaxTag.DiscardUnknown(m) +} + +var xxx_messageInfo_MaxTag proto.InternalMessageInfo + +func (m *MaxTag) GetLastField() string { + if m != nil && m.LastField != nil { + return *m.LastField + } + return "" +} + +type OldMessage struct { + Nested *OldMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + Num *int32 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldMessage) Reset() { *m = OldMessage{} } +func (m *OldMessage) String() string { return proto.CompactTextString(m) } +func (*OldMessage) ProtoMessage() {} +func (*OldMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{8} +} +func (m *OldMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldMessage.Unmarshal(m, b) +} +func (m *OldMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldMessage.Marshal(b, m, deterministic) +} +func (dst *OldMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldMessage.Merge(dst, src) +} +func (m *OldMessage) XXX_Size() int { + return xxx_messageInfo_OldMessage.Size(m) +} +func (m *OldMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OldMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OldMessage proto.InternalMessageInfo + +func (m *OldMessage) GetNested() *OldMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *OldMessage) GetNum() int32 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type OldMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldMessage_Nested) Reset() { *m = OldMessage_Nested{} } +func (m *OldMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*OldMessage_Nested) ProtoMessage() {} +func (*OldMessage_Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{8, 0} +} +func (m *OldMessage_Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldMessage_Nested.Unmarshal(m, b) +} +func (m *OldMessage_Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldMessage_Nested.Marshal(b, m, deterministic) +} +func (dst *OldMessage_Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldMessage_Nested.Merge(dst, src) +} +func (m *OldMessage_Nested) XXX_Size() int { + return xxx_messageInfo_OldMessage_Nested.Size(m) +} +func (m *OldMessage_Nested) XXX_DiscardUnknown() { + xxx_messageInfo_OldMessage_Nested.DiscardUnknown(m) +} + +var xxx_messageInfo_OldMessage_Nested proto.InternalMessageInfo + +func (m *OldMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +// NewMessage is wire compatible with OldMessage; +// imagine it as a future version. +type NewMessage struct { + Nested *NewMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + // This is an int32 in OldMessage. + Num *int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NewMessage) Reset() { *m = NewMessage{} } +func (m *NewMessage) String() string { return proto.CompactTextString(m) } +func (*NewMessage) ProtoMessage() {} +func (*NewMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{9} +} +func (m *NewMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NewMessage.Unmarshal(m, b) +} +func (m *NewMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NewMessage.Marshal(b, m, deterministic) +} +func (dst *NewMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_NewMessage.Merge(dst, src) +} +func (m *NewMessage) XXX_Size() int { + return xxx_messageInfo_NewMessage.Size(m) +} +func (m *NewMessage) XXX_DiscardUnknown() { + xxx_messageInfo_NewMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_NewMessage proto.InternalMessageInfo + +func (m *NewMessage) GetNested() *NewMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *NewMessage) GetNum() int64 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type NewMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + FoodGroup *string `protobuf:"bytes,2,opt,name=food_group,json=foodGroup" json:"food_group,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NewMessage_Nested) Reset() { *m = NewMessage_Nested{} } +func (m *NewMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*NewMessage_Nested) ProtoMessage() {} +func (*NewMessage_Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{9, 0} +} +func (m *NewMessage_Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NewMessage_Nested.Unmarshal(m, b) +} +func (m *NewMessage_Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NewMessage_Nested.Marshal(b, m, deterministic) +} +func (dst *NewMessage_Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_NewMessage_Nested.Merge(dst, src) +} +func (m *NewMessage_Nested) XXX_Size() int { + return xxx_messageInfo_NewMessage_Nested.Size(m) +} +func (m *NewMessage_Nested) XXX_DiscardUnknown() { + xxx_messageInfo_NewMessage_Nested.DiscardUnknown(m) +} + +var xxx_messageInfo_NewMessage_Nested proto.InternalMessageInfo + +func (m *NewMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *NewMessage_Nested) GetFoodGroup() string { + if m != nil && m.FoodGroup != nil { + return *m.FoodGroup + } + return "" +} + +type InnerMessage struct { + Host *string `protobuf:"bytes,1,req,name=host" json:"host,omitempty"` + Port *int32 `protobuf:"varint,2,opt,name=port,def=4000" json:"port,omitempty"` + Connected *bool `protobuf:"varint,3,opt,name=connected" json:"connected,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InnerMessage) Reset() { *m = InnerMessage{} } +func (m *InnerMessage) String() string { return proto.CompactTextString(m) } +func (*InnerMessage) ProtoMessage() {} +func (*InnerMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{10} +} +func (m *InnerMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InnerMessage.Unmarshal(m, b) +} +func (m *InnerMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InnerMessage.Marshal(b, m, deterministic) +} +func (dst *InnerMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_InnerMessage.Merge(dst, src) +} +func (m *InnerMessage) XXX_Size() int { + return xxx_messageInfo_InnerMessage.Size(m) +} +func (m *InnerMessage) XXX_DiscardUnknown() { + xxx_messageInfo_InnerMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_InnerMessage proto.InternalMessageInfo + +const Default_InnerMessage_Port int32 = 4000 + +func (m *InnerMessage) GetHost() string { + if m != nil && m.Host != nil { + return *m.Host + } + return "" +} + +func (m *InnerMessage) GetPort() int32 { + if m != nil && m.Port != nil { + return *m.Port + } + return Default_InnerMessage_Port +} + +func (m *InnerMessage) GetConnected() bool { + if m != nil && m.Connected != nil { + return *m.Connected + } + return false +} + +type OtherMessage struct { + Key *int64 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` + Weight *float32 `protobuf:"fixed32,3,opt,name=weight" json:"weight,omitempty"` + Inner *InnerMessage `protobuf:"bytes,4,opt,name=inner" json:"inner,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OtherMessage) Reset() { *m = OtherMessage{} } +func (m *OtherMessage) String() string { return proto.CompactTextString(m) } +func (*OtherMessage) ProtoMessage() {} +func (*OtherMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{11} +} + +var extRange_OtherMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*OtherMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OtherMessage +} +func (m *OtherMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherMessage.Unmarshal(m, b) +} +func (m *OtherMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherMessage.Marshal(b, m, deterministic) +} +func (dst *OtherMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherMessage.Merge(dst, src) +} +func (m *OtherMessage) XXX_Size() int { + return xxx_messageInfo_OtherMessage.Size(m) +} +func (m *OtherMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OtherMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherMessage proto.InternalMessageInfo + +func (m *OtherMessage) GetKey() int64 { + if m != nil && m.Key != nil { + return *m.Key + } + return 0 +} + +func (m *OtherMessage) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *OtherMessage) GetWeight() float32 { + if m != nil && m.Weight != nil { + return *m.Weight + } + return 0 +} + +func (m *OtherMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +type RequiredInnerMessage struct { + LeoFinallyWonAnOscar *InnerMessage `protobuf:"bytes,1,req,name=leo_finally_won_an_oscar,json=leoFinallyWonAnOscar" json:"leo_finally_won_an_oscar,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequiredInnerMessage) Reset() { *m = RequiredInnerMessage{} } +func (m *RequiredInnerMessage) String() string { return proto.CompactTextString(m) } +func (*RequiredInnerMessage) ProtoMessage() {} +func (*RequiredInnerMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{12} +} +func (m *RequiredInnerMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequiredInnerMessage.Unmarshal(m, b) +} +func (m *RequiredInnerMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequiredInnerMessage.Marshal(b, m, deterministic) +} +func (dst *RequiredInnerMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequiredInnerMessage.Merge(dst, src) +} +func (m *RequiredInnerMessage) XXX_Size() int { + return xxx_messageInfo_RequiredInnerMessage.Size(m) +} +func (m *RequiredInnerMessage) XXX_DiscardUnknown() { + xxx_messageInfo_RequiredInnerMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_RequiredInnerMessage proto.InternalMessageInfo + +func (m *RequiredInnerMessage) GetLeoFinallyWonAnOscar() *InnerMessage { + if m != nil { + return m.LeoFinallyWonAnOscar + } + return nil +} + +type MyMessage struct { + Count *int32 `protobuf:"varint,1,req,name=count" json:"count,omitempty"` + Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + Quote *string `protobuf:"bytes,3,opt,name=quote" json:"quote,omitempty"` + Pet []string `protobuf:"bytes,4,rep,name=pet" json:"pet,omitempty"` + Inner *InnerMessage `protobuf:"bytes,5,opt,name=inner" json:"inner,omitempty"` + Others []*OtherMessage `protobuf:"bytes,6,rep,name=others" json:"others,omitempty"` + WeMustGoDeeper *RequiredInnerMessage `protobuf:"bytes,13,opt,name=we_must_go_deeper,json=weMustGoDeeper" json:"we_must_go_deeper,omitempty"` + RepInner []*InnerMessage `protobuf:"bytes,12,rep,name=rep_inner,json=repInner" json:"rep_inner,omitempty"` + Bikeshed *MyMessage_Color `protobuf:"varint,7,opt,name=bikeshed,enum=test_proto.MyMessage_Color" json:"bikeshed,omitempty"` + Somegroup *MyMessage_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` + // This field becomes [][]byte in the generated code. + RepBytes [][]byte `protobuf:"bytes,10,rep,name=rep_bytes,json=repBytes" json:"rep_bytes,omitempty"` + Bigfloat *float64 `protobuf:"fixed64,11,opt,name=bigfloat" json:"bigfloat,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessage) Reset() { *m = MyMessage{} } +func (m *MyMessage) String() string { return proto.CompactTextString(m) } +func (*MyMessage) ProtoMessage() {} +func (*MyMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{13} +} + +var extRange_MyMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*MyMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessage +} +func (m *MyMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessage.Unmarshal(m, b) +} +func (m *MyMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessage.Marshal(b, m, deterministic) +} +func (dst *MyMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessage.Merge(dst, src) +} +func (m *MyMessage) XXX_Size() int { + return xxx_messageInfo_MyMessage.Size(m) +} +func (m *MyMessage) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessage proto.InternalMessageInfo + +func (m *MyMessage) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +func (m *MyMessage) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MyMessage) GetQuote() string { + if m != nil && m.Quote != nil { + return *m.Quote + } + return "" +} + +func (m *MyMessage) GetPet() []string { + if m != nil { + return m.Pet + } + return nil +} + +func (m *MyMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +func (m *MyMessage) GetOthers() []*OtherMessage { + if m != nil { + return m.Others + } + return nil +} + +func (m *MyMessage) GetWeMustGoDeeper() *RequiredInnerMessage { + if m != nil { + return m.WeMustGoDeeper + } + return nil +} + +func (m *MyMessage) GetRepInner() []*InnerMessage { + if m != nil { + return m.RepInner + } + return nil +} + +func (m *MyMessage) GetBikeshed() MyMessage_Color { + if m != nil && m.Bikeshed != nil { + return *m.Bikeshed + } + return MyMessage_RED +} + +func (m *MyMessage) GetSomegroup() *MyMessage_SomeGroup { + if m != nil { + return m.Somegroup + } + return nil +} + +func (m *MyMessage) GetRepBytes() [][]byte { + if m != nil { + return m.RepBytes + } + return nil +} + +func (m *MyMessage) GetBigfloat() float64 { + if m != nil && m.Bigfloat != nil { + return *m.Bigfloat + } + return 0 +} + +type MyMessage_SomeGroup struct { + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessage_SomeGroup) Reset() { *m = MyMessage_SomeGroup{} } +func (m *MyMessage_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*MyMessage_SomeGroup) ProtoMessage() {} +func (*MyMessage_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{13, 0} +} +func (m *MyMessage_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessage_SomeGroup.Unmarshal(m, b) +} +func (m *MyMessage_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessage_SomeGroup.Marshal(b, m, deterministic) +} +func (dst *MyMessage_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessage_SomeGroup.Merge(dst, src) +} +func (m *MyMessage_SomeGroup) XXX_Size() int { + return xxx_messageInfo_MyMessage_SomeGroup.Size(m) +} +func (m *MyMessage_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessage_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessage_SomeGroup proto.InternalMessageInfo + +func (m *MyMessage_SomeGroup) GetGroupField() int32 { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return 0 +} + +type Ext struct { + Data *string `protobuf:"bytes,1,opt,name=data" json:"data,omitempty"` + MapField map[int32]int32 `protobuf:"bytes,2,rep,name=map_field,json=mapField" json:"map_field,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Ext) Reset() { *m = Ext{} } +func (m *Ext) String() string { return proto.CompactTextString(m) } +func (*Ext) ProtoMessage() {} +func (*Ext) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{14} +} +func (m *Ext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Ext.Unmarshal(m, b) +} +func (m *Ext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Ext.Marshal(b, m, deterministic) +} +func (dst *Ext) XXX_Merge(src proto.Message) { + xxx_messageInfo_Ext.Merge(dst, src) +} +func (m *Ext) XXX_Size() int { + return xxx_messageInfo_Ext.Size(m) +} +func (m *Ext) XXX_DiscardUnknown() { + xxx_messageInfo_Ext.DiscardUnknown(m) +} + +var xxx_messageInfo_Ext proto.InternalMessageInfo + +func (m *Ext) GetData() string { + if m != nil && m.Data != nil { + return *m.Data + } + return "" +} + +func (m *Ext) GetMapField() map[int32]int32 { + if m != nil { + return m.MapField + } + return nil +} + +var E_Ext_More = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*Ext)(nil), + Field: 103, + Name: "test_proto.Ext.more", + Tag: "bytes,103,opt,name=more", + Filename: "test_proto/test.proto", +} + +var E_Ext_Text = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*string)(nil), + Field: 104, + Name: "test_proto.Ext.text", + Tag: "bytes,104,opt,name=text", + Filename: "test_proto/test.proto", +} + +var E_Ext_Number = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 105, + Name: "test_proto.Ext.number", + Tag: "varint,105,opt,name=number", + Filename: "test_proto/test.proto", +} + +type ComplexExtension struct { + First *int32 `protobuf:"varint,1,opt,name=first" json:"first,omitempty"` + Second *int32 `protobuf:"varint,2,opt,name=second" json:"second,omitempty"` + Third []int32 `protobuf:"varint,3,rep,name=third" json:"third,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComplexExtension) Reset() { *m = ComplexExtension{} } +func (m *ComplexExtension) String() string { return proto.CompactTextString(m) } +func (*ComplexExtension) ProtoMessage() {} +func (*ComplexExtension) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{15} +} +func (m *ComplexExtension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComplexExtension.Unmarshal(m, b) +} +func (m *ComplexExtension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComplexExtension.Marshal(b, m, deterministic) +} +func (dst *ComplexExtension) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComplexExtension.Merge(dst, src) +} +func (m *ComplexExtension) XXX_Size() int { + return xxx_messageInfo_ComplexExtension.Size(m) +} +func (m *ComplexExtension) XXX_DiscardUnknown() { + xxx_messageInfo_ComplexExtension.DiscardUnknown(m) +} + +var xxx_messageInfo_ComplexExtension proto.InternalMessageInfo + +func (m *ComplexExtension) GetFirst() int32 { + if m != nil && m.First != nil { + return *m.First + } + return 0 +} + +func (m *ComplexExtension) GetSecond() int32 { + if m != nil && m.Second != nil { + return *m.Second + } + return 0 +} + +func (m *ComplexExtension) GetThird() []int32 { + if m != nil { + return m.Third + } + return nil +} + +type DefaultsMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DefaultsMessage) Reset() { *m = DefaultsMessage{} } +func (m *DefaultsMessage) String() string { return proto.CompactTextString(m) } +func (*DefaultsMessage) ProtoMessage() {} +func (*DefaultsMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{16} +} + +var extRange_DefaultsMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*DefaultsMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_DefaultsMessage +} +func (m *DefaultsMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DefaultsMessage.Unmarshal(m, b) +} +func (m *DefaultsMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DefaultsMessage.Marshal(b, m, deterministic) +} +func (dst *DefaultsMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_DefaultsMessage.Merge(dst, src) +} +func (m *DefaultsMessage) XXX_Size() int { + return xxx_messageInfo_DefaultsMessage.Size(m) +} +func (m *DefaultsMessage) XXX_DiscardUnknown() { + xxx_messageInfo_DefaultsMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_DefaultsMessage proto.InternalMessageInfo + +type MyMessageSet struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessageSet) Reset() { *m = MyMessageSet{} } +func (m *MyMessageSet) String() string { return proto.CompactTextString(m) } +func (*MyMessageSet) ProtoMessage() {} +func (*MyMessageSet) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{17} +} + +func (m *MyMessageSet) MarshalJSON() ([]byte, error) { + return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) +} +func (m *MyMessageSet) UnmarshalJSON(buf []byte) error { + return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +} + +var extRange_MyMessageSet = []proto.ExtensionRange{ + {Start: 100, End: 2147483646}, +} + +func (*MyMessageSet) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessageSet +} +func (m *MyMessageSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessageSet.Unmarshal(m, b) +} +func (m *MyMessageSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessageSet.Marshal(b, m, deterministic) +} +func (dst *MyMessageSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessageSet.Merge(dst, src) +} +func (m *MyMessageSet) XXX_Size() int { + return xxx_messageInfo_MyMessageSet.Size(m) +} +func (m *MyMessageSet) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessageSet.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessageSet proto.InternalMessageInfo + +type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{18} +} +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (dst *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(dst, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo + +type MessageList struct { + Message []*MessageList_Message `protobuf:"group,1,rep,name=Message,json=message" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageList) Reset() { *m = MessageList{} } +func (m *MessageList) String() string { return proto.CompactTextString(m) } +func (*MessageList) ProtoMessage() {} +func (*MessageList) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{19} +} +func (m *MessageList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageList.Unmarshal(m, b) +} +func (m *MessageList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageList.Marshal(b, m, deterministic) +} +func (dst *MessageList) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageList.Merge(dst, src) +} +func (m *MessageList) XXX_Size() int { + return xxx_messageInfo_MessageList.Size(m) +} +func (m *MessageList) XXX_DiscardUnknown() { + xxx_messageInfo_MessageList.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageList proto.InternalMessageInfo + +func (m *MessageList) GetMessage() []*MessageList_Message { + if m != nil { + return m.Message + } + return nil +} + +type MessageList_Message struct { + Name *string `protobuf:"bytes,2,req,name=name" json:"name,omitempty"` + Count *int32 `protobuf:"varint,3,req,name=count" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageList_Message) Reset() { *m = MessageList_Message{} } +func (m *MessageList_Message) String() string { return proto.CompactTextString(m) } +func (*MessageList_Message) ProtoMessage() {} +func (*MessageList_Message) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{19, 0} +} +func (m *MessageList_Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageList_Message.Unmarshal(m, b) +} +func (m *MessageList_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageList_Message.Marshal(b, m, deterministic) +} +func (dst *MessageList_Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageList_Message.Merge(dst, src) +} +func (m *MessageList_Message) XXX_Size() int { + return xxx_messageInfo_MessageList_Message.Size(m) +} +func (m *MessageList_Message) XXX_DiscardUnknown() { + xxx_messageInfo_MessageList_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageList_Message proto.InternalMessageInfo + +func (m *MessageList_Message) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MessageList_Message) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +type Strings struct { + StringField *string `protobuf:"bytes,1,opt,name=string_field,json=stringField" json:"string_field,omitempty"` + BytesField []byte `protobuf:"bytes,2,opt,name=bytes_field,json=bytesField" json:"bytes_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Strings) Reset() { *m = Strings{} } +func (m *Strings) String() string { return proto.CompactTextString(m) } +func (*Strings) ProtoMessage() {} +func (*Strings) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{20} +} +func (m *Strings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Strings.Unmarshal(m, b) +} +func (m *Strings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Strings.Marshal(b, m, deterministic) +} +func (dst *Strings) XXX_Merge(src proto.Message) { + xxx_messageInfo_Strings.Merge(dst, src) +} +func (m *Strings) XXX_Size() int { + return xxx_messageInfo_Strings.Size(m) +} +func (m *Strings) XXX_DiscardUnknown() { + xxx_messageInfo_Strings.DiscardUnknown(m) +} + +var xxx_messageInfo_Strings proto.InternalMessageInfo + +func (m *Strings) GetStringField() string { + if m != nil && m.StringField != nil { + return *m.StringField + } + return "" +} + +func (m *Strings) GetBytesField() []byte { + if m != nil { + return m.BytesField + } + return nil +} + +type Defaults struct { + // Default-valued fields of all basic types. + // Same as GoTest, but copied here to make testing easier. + F_Bool *bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,def=1" json:"F_Bool,omitempty"` + F_Int32 *int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,def=32" json:"F_Int32,omitempty"` + F_Int64 *int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,def=64" json:"F_Int64,omitempty"` + F_Fixed32 *uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,def=320" json:"F_Fixed32,omitempty"` + F_Fixed64 *uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,def=640" json:"F_Fixed64,omitempty"` + F_Uint32 *uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,def=3200" json:"F_Uint32,omitempty"` + F_Uint64 *uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,def=6400" json:"F_Uint64,omitempty"` + F_Float *float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,def=314159" json:"F_Float,omitempty"` + F_Double *float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,def=271828" json:"F_Double,omitempty"` + F_String *string `protobuf:"bytes,10,opt,name=F_String,json=FString,def=hello, \"world!\"\n" json:"F_String,omitempty"` + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,def=Bignose" json:"F_Bytes,omitempty"` + F_Sint32 *int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,def=-32" json:"F_Sint32,omitempty"` + F_Sint64 *int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,def=-64" json:"F_Sint64,omitempty"` + F_Enum *Defaults_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=test_proto.Defaults_Color,def=1" json:"F_Enum,omitempty"` + // More fields with crazy defaults. + F_Pinf *float32 `protobuf:"fixed32,15,opt,name=F_Pinf,json=FPinf,def=inf" json:"F_Pinf,omitempty"` + F_Ninf *float32 `protobuf:"fixed32,16,opt,name=F_Ninf,json=FNinf,def=-inf" json:"F_Ninf,omitempty"` + F_Nan *float32 `protobuf:"fixed32,17,opt,name=F_Nan,json=FNan,def=nan" json:"F_Nan,omitempty"` + // Sub-message. + Sub *SubDefaults `protobuf:"bytes,18,opt,name=sub" json:"sub,omitempty"` + // Redundant but explicit defaults. + StrZero *string `protobuf:"bytes,19,opt,name=str_zero,json=strZero,def=" json:"str_zero,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Defaults) Reset() { *m = Defaults{} } +func (m *Defaults) String() string { return proto.CompactTextString(m) } +func (*Defaults) ProtoMessage() {} +func (*Defaults) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{21} +} +func (m *Defaults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Defaults.Unmarshal(m, b) +} +func (m *Defaults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Defaults.Marshal(b, m, deterministic) +} +func (dst *Defaults) XXX_Merge(src proto.Message) { + xxx_messageInfo_Defaults.Merge(dst, src) +} +func (m *Defaults) XXX_Size() int { + return xxx_messageInfo_Defaults.Size(m) +} +func (m *Defaults) XXX_DiscardUnknown() { + xxx_messageInfo_Defaults.DiscardUnknown(m) +} + +var xxx_messageInfo_Defaults proto.InternalMessageInfo + +const Default_Defaults_F_Bool bool = true +const Default_Defaults_F_Int32 int32 = 32 +const Default_Defaults_F_Int64 int64 = 64 +const Default_Defaults_F_Fixed32 uint32 = 320 +const Default_Defaults_F_Fixed64 uint64 = 640 +const Default_Defaults_F_Uint32 uint32 = 3200 +const Default_Defaults_F_Uint64 uint64 = 6400 +const Default_Defaults_F_Float float32 = 314159 +const Default_Defaults_F_Double float64 = 271828 +const Default_Defaults_F_String string = "hello, \"world!\"\n" + +var Default_Defaults_F_Bytes []byte = []byte("Bignose") + +const Default_Defaults_F_Sint32 int32 = -32 +const Default_Defaults_F_Sint64 int64 = -64 +const Default_Defaults_F_Enum Defaults_Color = Defaults_GREEN + +var Default_Defaults_F_Pinf float32 = float32(math.Inf(1)) +var Default_Defaults_F_Ninf float32 = float32(math.Inf(-1)) +var Default_Defaults_F_Nan float32 = float32(math.NaN()) + +func (m *Defaults) GetF_Bool() bool { + if m != nil && m.F_Bool != nil { + return *m.F_Bool + } + return Default_Defaults_F_Bool +} + +func (m *Defaults) GetF_Int32() int32 { + if m != nil && m.F_Int32 != nil { + return *m.F_Int32 + } + return Default_Defaults_F_Int32 +} + +func (m *Defaults) GetF_Int64() int64 { + if m != nil && m.F_Int64 != nil { + return *m.F_Int64 + } + return Default_Defaults_F_Int64 +} + +func (m *Defaults) GetF_Fixed32() uint32 { + if m != nil && m.F_Fixed32 != nil { + return *m.F_Fixed32 + } + return Default_Defaults_F_Fixed32 +} + +func (m *Defaults) GetF_Fixed64() uint64 { + if m != nil && m.F_Fixed64 != nil { + return *m.F_Fixed64 + } + return Default_Defaults_F_Fixed64 +} + +func (m *Defaults) GetF_Uint32() uint32 { + if m != nil && m.F_Uint32 != nil { + return *m.F_Uint32 + } + return Default_Defaults_F_Uint32 +} + +func (m *Defaults) GetF_Uint64() uint64 { + if m != nil && m.F_Uint64 != nil { + return *m.F_Uint64 + } + return Default_Defaults_F_Uint64 +} + +func (m *Defaults) GetF_Float() float32 { + if m != nil && m.F_Float != nil { + return *m.F_Float + } + return Default_Defaults_F_Float +} + +func (m *Defaults) GetF_Double() float64 { + if m != nil && m.F_Double != nil { + return *m.F_Double + } + return Default_Defaults_F_Double +} + +func (m *Defaults) GetF_String() string { + if m != nil && m.F_String != nil { + return *m.F_String + } + return Default_Defaults_F_String +} + +func (m *Defaults) GetF_Bytes() []byte { + if m != nil && m.F_Bytes != nil { + return m.F_Bytes + } + return append([]byte(nil), Default_Defaults_F_Bytes...) +} + +func (m *Defaults) GetF_Sint32() int32 { + if m != nil && m.F_Sint32 != nil { + return *m.F_Sint32 + } + return Default_Defaults_F_Sint32 +} + +func (m *Defaults) GetF_Sint64() int64 { + if m != nil && m.F_Sint64 != nil { + return *m.F_Sint64 + } + return Default_Defaults_F_Sint64 +} + +func (m *Defaults) GetF_Enum() Defaults_Color { + if m != nil && m.F_Enum != nil { + return *m.F_Enum + } + return Default_Defaults_F_Enum +} + +func (m *Defaults) GetF_Pinf() float32 { + if m != nil && m.F_Pinf != nil { + return *m.F_Pinf + } + return Default_Defaults_F_Pinf +} + +func (m *Defaults) GetF_Ninf() float32 { + if m != nil && m.F_Ninf != nil { + return *m.F_Ninf + } + return Default_Defaults_F_Ninf +} + +func (m *Defaults) GetF_Nan() float32 { + if m != nil && m.F_Nan != nil { + return *m.F_Nan + } + return Default_Defaults_F_Nan +} + +func (m *Defaults) GetSub() *SubDefaults { + if m != nil { + return m.Sub + } + return nil +} + +func (m *Defaults) GetStrZero() string { + if m != nil && m.StrZero != nil { + return *m.StrZero + } + return "" +} + +type SubDefaults struct { + N *int64 `protobuf:"varint,1,opt,name=n,def=7" json:"n,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubDefaults) Reset() { *m = SubDefaults{} } +func (m *SubDefaults) String() string { return proto.CompactTextString(m) } +func (*SubDefaults) ProtoMessage() {} +func (*SubDefaults) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{22} +} +func (m *SubDefaults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubDefaults.Unmarshal(m, b) +} +func (m *SubDefaults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubDefaults.Marshal(b, m, deterministic) +} +func (dst *SubDefaults) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubDefaults.Merge(dst, src) +} +func (m *SubDefaults) XXX_Size() int { + return xxx_messageInfo_SubDefaults.Size(m) +} +func (m *SubDefaults) XXX_DiscardUnknown() { + xxx_messageInfo_SubDefaults.DiscardUnknown(m) +} + +var xxx_messageInfo_SubDefaults proto.InternalMessageInfo + +const Default_SubDefaults_N int64 = 7 + +func (m *SubDefaults) GetN() int64 { + if m != nil && m.N != nil { + return *m.N + } + return Default_SubDefaults_N +} + +type RepeatedEnum struct { + Color []RepeatedEnum_Color `protobuf:"varint,1,rep,name=color,enum=test_proto.RepeatedEnum_Color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepeatedEnum) Reset() { *m = RepeatedEnum{} } +func (m *RepeatedEnum) String() string { return proto.CompactTextString(m) } +func (*RepeatedEnum) ProtoMessage() {} +func (*RepeatedEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{23} +} +func (m *RepeatedEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepeatedEnum.Unmarshal(m, b) +} +func (m *RepeatedEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepeatedEnum.Marshal(b, m, deterministic) +} +func (dst *RepeatedEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepeatedEnum.Merge(dst, src) +} +func (m *RepeatedEnum) XXX_Size() int { + return xxx_messageInfo_RepeatedEnum.Size(m) +} +func (m *RepeatedEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RepeatedEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RepeatedEnum proto.InternalMessageInfo + +func (m *RepeatedEnum) GetColor() []RepeatedEnum_Color { + if m != nil { + return m.Color + } + return nil +} + +type MoreRepeated struct { + Bools []bool `protobuf:"varint,1,rep,name=bools" json:"bools,omitempty"` + BoolsPacked []bool `protobuf:"varint,2,rep,packed,name=bools_packed,json=boolsPacked" json:"bools_packed,omitempty"` + Ints []int32 `protobuf:"varint,3,rep,name=ints" json:"ints,omitempty"` + IntsPacked []int32 `protobuf:"varint,4,rep,packed,name=ints_packed,json=intsPacked" json:"ints_packed,omitempty"` + Int64SPacked []int64 `protobuf:"varint,7,rep,packed,name=int64s_packed,json=int64sPacked" json:"int64s_packed,omitempty"` + Strings []string `protobuf:"bytes,5,rep,name=strings" json:"strings,omitempty"` + Fixeds []uint32 `protobuf:"fixed32,6,rep,name=fixeds" json:"fixeds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MoreRepeated) Reset() { *m = MoreRepeated{} } +func (m *MoreRepeated) String() string { return proto.CompactTextString(m) } +func (*MoreRepeated) ProtoMessage() {} +func (*MoreRepeated) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{24} +} +func (m *MoreRepeated) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MoreRepeated.Unmarshal(m, b) +} +func (m *MoreRepeated) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MoreRepeated.Marshal(b, m, deterministic) +} +func (dst *MoreRepeated) XXX_Merge(src proto.Message) { + xxx_messageInfo_MoreRepeated.Merge(dst, src) +} +func (m *MoreRepeated) XXX_Size() int { + return xxx_messageInfo_MoreRepeated.Size(m) +} +func (m *MoreRepeated) XXX_DiscardUnknown() { + xxx_messageInfo_MoreRepeated.DiscardUnknown(m) +} + +var xxx_messageInfo_MoreRepeated proto.InternalMessageInfo + +func (m *MoreRepeated) GetBools() []bool { + if m != nil { + return m.Bools + } + return nil +} + +func (m *MoreRepeated) GetBoolsPacked() []bool { + if m != nil { + return m.BoolsPacked + } + return nil +} + +func (m *MoreRepeated) GetInts() []int32 { + if m != nil { + return m.Ints + } + return nil +} + +func (m *MoreRepeated) GetIntsPacked() []int32 { + if m != nil { + return m.IntsPacked + } + return nil +} + +func (m *MoreRepeated) GetInt64SPacked() []int64 { + if m != nil { + return m.Int64SPacked + } + return nil +} + +func (m *MoreRepeated) GetStrings() []string { + if m != nil { + return m.Strings + } + return nil +} + +func (m *MoreRepeated) GetFixeds() []uint32 { + if m != nil { + return m.Fixeds + } + return nil +} + +type GroupOld struct { + G *GroupOld_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupOld) Reset() { *m = GroupOld{} } +func (m *GroupOld) String() string { return proto.CompactTextString(m) } +func (*GroupOld) ProtoMessage() {} +func (*GroupOld) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{25} +} +func (m *GroupOld) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupOld.Unmarshal(m, b) +} +func (m *GroupOld) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupOld.Marshal(b, m, deterministic) +} +func (dst *GroupOld) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupOld.Merge(dst, src) +} +func (m *GroupOld) XXX_Size() int { + return xxx_messageInfo_GroupOld.Size(m) +} +func (m *GroupOld) XXX_DiscardUnknown() { + xxx_messageInfo_GroupOld.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupOld proto.InternalMessageInfo + +func (m *GroupOld) GetG() *GroupOld_G { + if m != nil { + return m.G + } + return nil +} + +type GroupOld_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupOld_G) Reset() { *m = GroupOld_G{} } +func (m *GroupOld_G) String() string { return proto.CompactTextString(m) } +func (*GroupOld_G) ProtoMessage() {} +func (*GroupOld_G) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{25, 0} +} +func (m *GroupOld_G) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupOld_G.Unmarshal(m, b) +} +func (m *GroupOld_G) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupOld_G.Marshal(b, m, deterministic) +} +func (dst *GroupOld_G) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupOld_G.Merge(dst, src) +} +func (m *GroupOld_G) XXX_Size() int { + return xxx_messageInfo_GroupOld_G.Size(m) +} +func (m *GroupOld_G) XXX_DiscardUnknown() { + xxx_messageInfo_GroupOld_G.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupOld_G proto.InternalMessageInfo + +func (m *GroupOld_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type GroupNew struct { + G *GroupNew_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupNew) Reset() { *m = GroupNew{} } +func (m *GroupNew) String() string { return proto.CompactTextString(m) } +func (*GroupNew) ProtoMessage() {} +func (*GroupNew) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{26} +} +func (m *GroupNew) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupNew.Unmarshal(m, b) +} +func (m *GroupNew) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupNew.Marshal(b, m, deterministic) +} +func (dst *GroupNew) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupNew.Merge(dst, src) +} +func (m *GroupNew) XXX_Size() int { + return xxx_messageInfo_GroupNew.Size(m) +} +func (m *GroupNew) XXX_DiscardUnknown() { + xxx_messageInfo_GroupNew.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupNew proto.InternalMessageInfo + +func (m *GroupNew) GetG() *GroupNew_G { + if m != nil { + return m.G + } + return nil +} + +type GroupNew_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + Y *int32 `protobuf:"varint,3,opt,name=y" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupNew_G) Reset() { *m = GroupNew_G{} } +func (m *GroupNew_G) String() string { return proto.CompactTextString(m) } +func (*GroupNew_G) ProtoMessage() {} +func (*GroupNew_G) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{26, 0} +} +func (m *GroupNew_G) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupNew_G.Unmarshal(m, b) +} +func (m *GroupNew_G) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupNew_G.Marshal(b, m, deterministic) +} +func (dst *GroupNew_G) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupNew_G.Merge(dst, src) +} +func (m *GroupNew_G) XXX_Size() int { + return xxx_messageInfo_GroupNew_G.Size(m) +} +func (m *GroupNew_G) XXX_DiscardUnknown() { + xxx_messageInfo_GroupNew_G.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupNew_G proto.InternalMessageInfo + +func (m *GroupNew_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +func (m *GroupNew_G) GetY() int32 { + if m != nil && m.Y != nil { + return *m.Y + } + return 0 +} + +type FloatingPoint struct { + F *float64 `protobuf:"fixed64,1,req,name=f" json:"f,omitempty"` + Exact *bool `protobuf:"varint,2,opt,name=exact" json:"exact,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FloatingPoint) Reset() { *m = FloatingPoint{} } +func (m *FloatingPoint) String() string { return proto.CompactTextString(m) } +func (*FloatingPoint) ProtoMessage() {} +func (*FloatingPoint) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{27} +} +func (m *FloatingPoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatingPoint.Unmarshal(m, b) +} +func (m *FloatingPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatingPoint.Marshal(b, m, deterministic) +} +func (dst *FloatingPoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatingPoint.Merge(dst, src) +} +func (m *FloatingPoint) XXX_Size() int { + return xxx_messageInfo_FloatingPoint.Size(m) +} +func (m *FloatingPoint) XXX_DiscardUnknown() { + xxx_messageInfo_FloatingPoint.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatingPoint proto.InternalMessageInfo + +func (m *FloatingPoint) GetF() float64 { + if m != nil && m.F != nil { + return *m.F + } + return 0 +} + +func (m *FloatingPoint) GetExact() bool { + if m != nil && m.Exact != nil { + return *m.Exact + } + return false +} + +type MessageWithMap struct { + NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + StrToStr map[string]string `protobuf:"bytes,4,rep,name=str_to_str,json=strToStr" json:"str_to_str,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{28} +} +func (m *MessageWithMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageWithMap.Unmarshal(m, b) +} +func (m *MessageWithMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageWithMap.Marshal(b, m, deterministic) +} +func (dst *MessageWithMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageWithMap.Merge(dst, src) +} +func (m *MessageWithMap) XXX_Size() int { + return xxx_messageInfo_MessageWithMap.Size(m) +} +func (m *MessageWithMap) XXX_DiscardUnknown() { + xxx_messageInfo_MessageWithMap.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageWithMap proto.InternalMessageInfo + +func (m *MessageWithMap) GetNameMapping() map[int32]string { + if m != nil { + return m.NameMapping + } + return nil +} + +func (m *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint { + if m != nil { + return m.MsgMapping + } + return nil +} + +func (m *MessageWithMap) GetByteMapping() map[bool][]byte { + if m != nil { + return m.ByteMapping + } + return nil +} + +func (m *MessageWithMap) GetStrToStr() map[string]string { + if m != nil { + return m.StrToStr + } + return nil +} + +type Oneof struct { + // Types that are valid to be assigned to Union: + // *Oneof_F_Bool + // *Oneof_F_Int32 + // *Oneof_F_Int64 + // *Oneof_F_Fixed32 + // *Oneof_F_Fixed64 + // *Oneof_F_Uint32 + // *Oneof_F_Uint64 + // *Oneof_F_Float + // *Oneof_F_Double + // *Oneof_F_String + // *Oneof_F_Bytes + // *Oneof_F_Sint32 + // *Oneof_F_Sint64 + // *Oneof_F_Enum + // *Oneof_F_Message + // *Oneof_FGroup + // *Oneof_F_Largest_Tag + Union isOneof_Union `protobuf_oneof:"union"` + // Types that are valid to be assigned to Tormato: + // *Oneof_Value + Tormato isOneof_Tormato `protobuf_oneof:"tormato"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Oneof) Reset() { *m = Oneof{} } +func (m *Oneof) String() string { return proto.CompactTextString(m) } +func (*Oneof) ProtoMessage() {} +func (*Oneof) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{29} +} +func (m *Oneof) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Oneof.Unmarshal(m, b) +} +func (m *Oneof) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Oneof.Marshal(b, m, deterministic) +} +func (dst *Oneof) XXX_Merge(src proto.Message) { + xxx_messageInfo_Oneof.Merge(dst, src) +} +func (m *Oneof) XXX_Size() int { + return xxx_messageInfo_Oneof.Size(m) +} +func (m *Oneof) XXX_DiscardUnknown() { + xxx_messageInfo_Oneof.DiscardUnknown(m) +} + +var xxx_messageInfo_Oneof proto.InternalMessageInfo + +type isOneof_Union interface { + isOneof_Union() +} +type isOneof_Tormato interface { + isOneof_Tormato() +} + +type Oneof_F_Bool struct { + F_Bool bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,oneof"` +} +type Oneof_F_Int32 struct { + F_Int32 int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,oneof"` +} +type Oneof_F_Int64 struct { + F_Int64 int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,oneof"` +} +type Oneof_F_Fixed32 struct { + F_Fixed32 uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,oneof"` +} +type Oneof_F_Fixed64 struct { + F_Fixed64 uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,oneof"` +} +type Oneof_F_Uint32 struct { + F_Uint32 uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,oneof"` +} +type Oneof_F_Uint64 struct { + F_Uint64 uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,oneof"` +} +type Oneof_F_Float struct { + F_Float float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,oneof"` +} +type Oneof_F_Double struct { + F_Double float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,oneof"` +} +type Oneof_F_String struct { + F_String string `protobuf:"bytes,10,opt,name=F_String,json=FString,oneof"` +} +type Oneof_F_Bytes struct { + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,oneof"` +} +type Oneof_F_Sint32 struct { + F_Sint32 int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,oneof"` +} +type Oneof_F_Sint64 struct { + F_Sint64 int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,oneof"` +} +type Oneof_F_Enum struct { + F_Enum MyMessage_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=test_proto.MyMessage_Color,oneof"` +} +type Oneof_F_Message struct { + F_Message *GoTestField `protobuf:"bytes,15,opt,name=F_Message,json=FMessage,oneof"` +} +type Oneof_FGroup struct { + FGroup *Oneof_F_Group `protobuf:"group,16,opt,name=F_Group,json=fGroup,oneof"` +} +type Oneof_F_Largest_Tag struct { + F_Largest_Tag int32 `protobuf:"varint,536870911,opt,name=F_Largest_Tag,json=FLargestTag,oneof"` +} +type Oneof_Value struct { + Value int32 `protobuf:"varint,100,opt,name=value,oneof"` +} + +func (*Oneof_F_Bool) isOneof_Union() {} +func (*Oneof_F_Int32) isOneof_Union() {} +func (*Oneof_F_Int64) isOneof_Union() {} +func (*Oneof_F_Fixed32) isOneof_Union() {} +func (*Oneof_F_Fixed64) isOneof_Union() {} +func (*Oneof_F_Uint32) isOneof_Union() {} +func (*Oneof_F_Uint64) isOneof_Union() {} +func (*Oneof_F_Float) isOneof_Union() {} +func (*Oneof_F_Double) isOneof_Union() {} +func (*Oneof_F_String) isOneof_Union() {} +func (*Oneof_F_Bytes) isOneof_Union() {} +func (*Oneof_F_Sint32) isOneof_Union() {} +func (*Oneof_F_Sint64) isOneof_Union() {} +func (*Oneof_F_Enum) isOneof_Union() {} +func (*Oneof_F_Message) isOneof_Union() {} +func (*Oneof_FGroup) isOneof_Union() {} +func (*Oneof_F_Largest_Tag) isOneof_Union() {} +func (*Oneof_Value) isOneof_Tormato() {} + +func (m *Oneof) GetUnion() isOneof_Union { + if m != nil { + return m.Union + } + return nil +} +func (m *Oneof) GetTormato() isOneof_Tormato { + if m != nil { + return m.Tormato + } + return nil +} + +func (m *Oneof) GetF_Bool() bool { + if x, ok := m.GetUnion().(*Oneof_F_Bool); ok { + return x.F_Bool + } + return false +} + +func (m *Oneof) GetF_Int32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Int32); ok { + return x.F_Int32 + } + return 0 +} + +func (m *Oneof) GetF_Int64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Int64); ok { + return x.F_Int64 + } + return 0 +} + +func (m *Oneof) GetF_Fixed32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed32); ok { + return x.F_Fixed32 + } + return 0 +} + +func (m *Oneof) GetF_Fixed64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed64); ok { + return x.F_Fixed64 + } + return 0 +} + +func (m *Oneof) GetF_Uint32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Uint32); ok { + return x.F_Uint32 + } + return 0 +} + +func (m *Oneof) GetF_Uint64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Uint64); ok { + return x.F_Uint64 + } + return 0 +} + +func (m *Oneof) GetF_Float() float32 { + if x, ok := m.GetUnion().(*Oneof_F_Float); ok { + return x.F_Float + } + return 0 +} + +func (m *Oneof) GetF_Double() float64 { + if x, ok := m.GetUnion().(*Oneof_F_Double); ok { + return x.F_Double + } + return 0 +} + +func (m *Oneof) GetF_String() string { + if x, ok := m.GetUnion().(*Oneof_F_String); ok { + return x.F_String + } + return "" +} + +func (m *Oneof) GetF_Bytes() []byte { + if x, ok := m.GetUnion().(*Oneof_F_Bytes); ok { + return x.F_Bytes + } + return nil +} + +func (m *Oneof) GetF_Sint32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Sint32); ok { + return x.F_Sint32 + } + return 0 +} + +func (m *Oneof) GetF_Sint64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Sint64); ok { + return x.F_Sint64 + } + return 0 +} + +func (m *Oneof) GetF_Enum() MyMessage_Color { + if x, ok := m.GetUnion().(*Oneof_F_Enum); ok { + return x.F_Enum + } + return MyMessage_RED +} + +func (m *Oneof) GetF_Message() *GoTestField { + if x, ok := m.GetUnion().(*Oneof_F_Message); ok { + return x.F_Message + } + return nil +} + +func (m *Oneof) GetFGroup() *Oneof_F_Group { + if x, ok := m.GetUnion().(*Oneof_FGroup); ok { + return x.FGroup + } + return nil +} + +func (m *Oneof) GetF_Largest_Tag() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Largest_Tag); ok { + return x.F_Largest_Tag + } + return 0 +} + +func (m *Oneof) GetValue() int32 { + if x, ok := m.GetTormato().(*Oneof_Value); ok { + return x.Value + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Oneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Oneof_OneofMarshaler, _Oneof_OneofUnmarshaler, _Oneof_OneofSizer, []interface{}{ + (*Oneof_F_Bool)(nil), + (*Oneof_F_Int32)(nil), + (*Oneof_F_Int64)(nil), + (*Oneof_F_Fixed32)(nil), + (*Oneof_F_Fixed64)(nil), + (*Oneof_F_Uint32)(nil), + (*Oneof_F_Uint64)(nil), + (*Oneof_F_Float)(nil), + (*Oneof_F_Double)(nil), + (*Oneof_F_String)(nil), + (*Oneof_F_Bytes)(nil), + (*Oneof_F_Sint32)(nil), + (*Oneof_F_Sint64)(nil), + (*Oneof_F_Enum)(nil), + (*Oneof_F_Message)(nil), + (*Oneof_FGroup)(nil), + (*Oneof_F_Largest_Tag)(nil), + (*Oneof_Value)(nil), + } +} + +func _Oneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Oneof) + // union + switch x := m.Union.(type) { + case *Oneof_F_Bool: + t := uint64(0) + if x.F_Bool { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Oneof_F_Int32: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Int32)) + case *Oneof_F_Int64: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Int64)) + case *Oneof_F_Fixed32: + b.EncodeVarint(4<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(x.F_Fixed32)) + case *Oneof_F_Fixed64: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(uint64(x.F_Fixed64)) + case *Oneof_F_Uint32: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Uint32)) + case *Oneof_F_Uint64: + b.EncodeVarint(7<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Uint64)) + case *Oneof_F_Float: + b.EncodeVarint(8<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.F_Float))) + case *Oneof_F_Double: + b.EncodeVarint(9<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.F_Double)) + case *Oneof_F_String: + b.EncodeVarint(10<<3 | proto.WireBytes) + b.EncodeStringBytes(x.F_String) + case *Oneof_F_Bytes: + b.EncodeVarint(11<<3 | proto.WireBytes) + b.EncodeRawBytes(x.F_Bytes) + case *Oneof_F_Sint32: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeZigzag32(uint64(x.F_Sint32)) + case *Oneof_F_Sint64: + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeZigzag64(uint64(x.F_Sint64)) + case *Oneof_F_Enum: + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Enum)) + case *Oneof_F_Message: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.F_Message); err != nil { + return err + } + case *Oneof_FGroup: + b.EncodeVarint(16<<3 | proto.WireStartGroup) + if err := b.Marshal(x.FGroup); err != nil { + return err + } + b.EncodeVarint(16<<3 | proto.WireEndGroup) + case *Oneof_F_Largest_Tag: + b.EncodeVarint(536870911<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Largest_Tag)) + case nil: + default: + return fmt.Errorf("Oneof.Union has unexpected type %T", x) + } + // tormato + switch x := m.Tormato.(type) { + case *Oneof_Value: + b.EncodeVarint(100<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Value)) + case nil: + default: + return fmt.Errorf("Oneof.Tormato has unexpected type %T", x) + } + return nil +} + +func _Oneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Oneof) + switch tag { + case 1: // union.F_Bool + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Bool{x != 0} + return true, err + case 2: // union.F_Int32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Int32{int32(x)} + return true, err + case 3: // union.F_Int64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Int64{int64(x)} + return true, err + case 4: // union.F_Fixed32 + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Oneof_F_Fixed32{uint32(x)} + return true, err + case 5: // union.F_Fixed64 + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Oneof_F_Fixed64{x} + return true, err + case 6: // union.F_Uint32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Uint32{uint32(x)} + return true, err + case 7: // union.F_Uint64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Uint64{x} + return true, err + case 8: // union.F_Float + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Oneof_F_Float{math.Float32frombits(uint32(x))} + return true, err + case 9: // union.F_Double + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Oneof_F_Double{math.Float64frombits(x)} + return true, err + case 10: // union.F_String + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Oneof_F_String{x} + return true, err + case 11: // union.F_Bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Oneof_F_Bytes{x} + return true, err + case 12: // union.F_Sint32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag32() + m.Union = &Oneof_F_Sint32{int32(x)} + return true, err + case 13: // union.F_Sint64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag64() + m.Union = &Oneof_F_Sint64{int64(x)} + return true, err + case 14: // union.F_Enum + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Enum{MyMessage_Color(x)} + return true, err + case 15: // union.F_Message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GoTestField) + err := b.DecodeMessage(msg) + m.Union = &Oneof_F_Message{msg} + return true, err + case 16: // union.f_group + if wire != proto.WireStartGroup { + return true, proto.ErrInternalBadWireType + } + msg := new(Oneof_F_Group) + err := b.DecodeGroup(msg) + m.Union = &Oneof_FGroup{msg} + return true, err + case 536870911: // union.F_Largest_Tag + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Largest_Tag{int32(x)} + return true, err + case 100: // tormato.value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Tormato = &Oneof_Value{int32(x)} + return true, err + default: + return false, nil + } +} + +func _Oneof_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Oneof) + // union + switch x := m.Union.(type) { + case *Oneof_F_Bool: + n += 1 // tag and wire + n += 1 + case *Oneof_F_Int32: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.F_Int32)) + case *Oneof_F_Int64: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.F_Int64)) + case *Oneof_F_Fixed32: + n += 1 // tag and wire + n += 4 + case *Oneof_F_Fixed64: + n += 1 // tag and wire + n += 8 + case *Oneof_F_Uint32: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.F_Uint32)) + case *Oneof_F_Uint64: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.F_Uint64)) + case *Oneof_F_Float: + n += 1 // tag and wire + n += 4 + case *Oneof_F_Double: + n += 1 // tag and wire + n += 8 + case *Oneof_F_String: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.F_String))) + n += len(x.F_String) + case *Oneof_F_Bytes: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.F_Bytes))) + n += len(x.F_Bytes) + case *Oneof_F_Sint32: + n += 1 // tag and wire + n += proto.SizeVarint(uint64((uint32(x.F_Sint32) << 1) ^ uint32((int32(x.F_Sint32) >> 31)))) + case *Oneof_F_Sint64: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(uint64(x.F_Sint64<<1) ^ uint64((int64(x.F_Sint64) >> 63)))) + case *Oneof_F_Enum: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.F_Enum)) + case *Oneof_F_Message: + s := proto.Size(x.F_Message) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Oneof_FGroup: + n += 2 // tag and wire + n += proto.Size(x.FGroup) + n += 2 // tag and wire + case *Oneof_F_Largest_Tag: + n += 10 // tag and wire + n += proto.SizeVarint(uint64(x.F_Largest_Tag)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // tormato + switch x := m.Tormato.(type) { + case *Oneof_Value: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.Value)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Oneof_F_Group struct { + X *int32 `protobuf:"varint,17,opt,name=x" json:"x,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Oneof_F_Group) Reset() { *m = Oneof_F_Group{} } +func (m *Oneof_F_Group) String() string { return proto.CompactTextString(m) } +func (*Oneof_F_Group) ProtoMessage() {} +func (*Oneof_F_Group) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{29, 0} +} +func (m *Oneof_F_Group) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Oneof_F_Group.Unmarshal(m, b) +} +func (m *Oneof_F_Group) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Oneof_F_Group.Marshal(b, m, deterministic) +} +func (dst *Oneof_F_Group) XXX_Merge(src proto.Message) { + xxx_messageInfo_Oneof_F_Group.Merge(dst, src) +} +func (m *Oneof_F_Group) XXX_Size() int { + return xxx_messageInfo_Oneof_F_Group.Size(m) +} +func (m *Oneof_F_Group) XXX_DiscardUnknown() { + xxx_messageInfo_Oneof_F_Group.DiscardUnknown(m) +} + +var xxx_messageInfo_Oneof_F_Group proto.InternalMessageInfo + +func (m *Oneof_F_Group) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type Communique struct { + MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` + // This is a oneof, called "union". + // + // Types that are valid to be assigned to Union: + // *Communique_Number + // *Communique_Name + // *Communique_Data + // *Communique_TempC + // *Communique_Col + // *Communique_Msg + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Communique) Reset() { *m = Communique{} } +func (m *Communique) String() string { return proto.CompactTextString(m) } +func (*Communique) ProtoMessage() {} +func (*Communique) Descriptor() ([]byte, []int) { + return fileDescriptor_test_74787bfc6550f8a7, []int{30} +} +func (m *Communique) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique.Unmarshal(m, b) +} +func (m *Communique) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique.Marshal(b, m, deterministic) +} +func (dst *Communique) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique.Merge(dst, src) +} +func (m *Communique) XXX_Size() int { + return xxx_messageInfo_Communique.Size(m) +} +func (m *Communique) XXX_DiscardUnknown() { + xxx_messageInfo_Communique.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique proto.InternalMessageInfo + +type isCommunique_Union interface { + isCommunique_Union() +} + +type Communique_Number struct { + Number int32 `protobuf:"varint,5,opt,name=number,oneof"` +} +type Communique_Name struct { + Name string `protobuf:"bytes,6,opt,name=name,oneof"` +} +type Communique_Data struct { + Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` +} +type Communique_TempC struct { + TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` +} +type Communique_Col struct { + Col MyMessage_Color `protobuf:"varint,9,opt,name=col,enum=test_proto.MyMessage_Color,oneof"` +} +type Communique_Msg struct { + Msg *Strings `protobuf:"bytes,10,opt,name=msg,oneof"` +} + +func (*Communique_Number) isCommunique_Union() {} +func (*Communique_Name) isCommunique_Union() {} +func (*Communique_Data) isCommunique_Union() {} +func (*Communique_TempC) isCommunique_Union() {} +func (*Communique_Col) isCommunique_Union() {} +func (*Communique_Msg) isCommunique_Union() {} + +func (m *Communique) GetUnion() isCommunique_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} + +func (m *Communique) GetNumber() int32 { + if x, ok := m.GetUnion().(*Communique_Number); ok { + return x.Number + } + return 0 +} + +func (m *Communique) GetName() string { + if x, ok := m.GetUnion().(*Communique_Name); ok { + return x.Name + } + return "" +} + +func (m *Communique) GetData() []byte { + if x, ok := m.GetUnion().(*Communique_Data); ok { + return x.Data + } + return nil +} + +func (m *Communique) GetTempC() float64 { + if x, ok := m.GetUnion().(*Communique_TempC); ok { + return x.TempC + } + return 0 +} + +func (m *Communique) GetCol() MyMessage_Color { + if x, ok := m.GetUnion().(*Communique_Col); ok { + return x.Col + } + return MyMessage_RED +} + +func (m *Communique) GetMsg() *Strings { + if x, ok := m.GetUnion().(*Communique_Msg); ok { + return x.Msg + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ + (*Communique_Number)(nil), + (*Communique_Name)(nil), + (*Communique_Data)(nil), + (*Communique_TempC)(nil), + (*Communique_Col)(nil), + (*Communique_Msg)(nil), + } +} + +func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Number)) + case *Communique_Name: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case *Communique_Data: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Data) + case *Communique_TempC: + b.EncodeVarint(8<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.TempC)) + case *Communique_Col: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Col)) + case *Communique_Msg: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Msg); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Communique.Union has unexpected type %T", x) + } + return nil +} + +func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Communique) + switch tag { + case 5: // union.number + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Number{int32(x)} + return true, err + case 6: // union.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Communique_Name{x} + return true, err + case 7: // union.data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Communique_Data{x} + return true, err + case 8: // union.temp_c + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Communique_TempC{math.Float64frombits(x)} + return true, err + case 9: // union.col + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Col{MyMessage_Color(x)} + return true, err + case 10: // union.msg + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Strings) + err := b.DecodeMessage(msg) + m.Union = &Communique_Msg{msg} + return true, err + default: + return false, nil + } +} + +func _Communique_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Number)) + case *Communique_Name: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case *Communique_Data: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Data))) + n += len(x.Data) + case *Communique_TempC: + n += 1 // tag and wire + n += 8 + case *Communique_Col: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Col)) + case *Communique_Msg: + s := proto.Size(x.Msg) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +var E_Greeting = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: ([]string)(nil), + Field: 106, + Name: "test_proto.greeting", + Tag: "bytes,106,rep,name=greeting", + Filename: "test_proto/test.proto", +} + +var E_Complex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: (*ComplexExtension)(nil), + Field: 200, + Name: "test_proto.complex", + Tag: "bytes,200,opt,name=complex", + Filename: "test_proto/test.proto", +} + +var E_RComplex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: ([]*ComplexExtension)(nil), + Field: 201, + Name: "test_proto.r_complex", + Tag: "bytes,201,rep,name=r_complex,json=rComplex", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 101, + Name: "test_proto.no_default_double", + Tag: "fixed64,101,opt,name=no_default_double,json=noDefaultDouble", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 102, + Name: "test_proto.no_default_float", + Tag: "fixed32,102,opt,name=no_default_float,json=noDefaultFloat", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 103, + Name: "test_proto.no_default_int32", + Tag: "varint,103,opt,name=no_default_int32,json=noDefaultInt32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 104, + Name: "test_proto.no_default_int64", + Tag: "varint,104,opt,name=no_default_int64,json=noDefaultInt64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 105, + Name: "test_proto.no_default_uint32", + Tag: "varint,105,opt,name=no_default_uint32,json=noDefaultUint32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 106, + Name: "test_proto.no_default_uint64", + Tag: "varint,106,opt,name=no_default_uint64,json=noDefaultUint64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 107, + Name: "test_proto.no_default_sint32", + Tag: "zigzag32,107,opt,name=no_default_sint32,json=noDefaultSint32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 108, + Name: "test_proto.no_default_sint64", + Tag: "zigzag64,108,opt,name=no_default_sint64,json=noDefaultSint64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 109, + Name: "test_proto.no_default_fixed32", + Tag: "fixed32,109,opt,name=no_default_fixed32,json=noDefaultFixed32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 110, + Name: "test_proto.no_default_fixed64", + Tag: "fixed64,110,opt,name=no_default_fixed64,json=noDefaultFixed64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 111, + Name: "test_proto.no_default_sfixed32", + Tag: "fixed32,111,opt,name=no_default_sfixed32,json=noDefaultSfixed32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 112, + Name: "test_proto.no_default_sfixed64", + Tag: "fixed64,112,opt,name=no_default_sfixed64,json=noDefaultSfixed64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 113, + Name: "test_proto.no_default_bool", + Tag: "varint,113,opt,name=no_default_bool,json=noDefaultBool", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 114, + Name: "test_proto.no_default_string", + Tag: "bytes,114,opt,name=no_default_string,json=noDefaultString", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 115, + Name: "test_proto.no_default_bytes", + Tag: "bytes,115,opt,name=no_default_bytes,json=noDefaultBytes", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 116, + Name: "test_proto.no_default_enum", + Tag: "varint,116,opt,name=no_default_enum,json=noDefaultEnum,enum=test_proto.DefaultsMessage_DefaultsEnum", + Filename: "test_proto/test.proto", +} + +var E_DefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 201, + Name: "test_proto.default_double", + Tag: "fixed64,201,opt,name=default_double,json=defaultDouble,def=3.1415", + Filename: "test_proto/test.proto", +} + +var E_DefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 202, + Name: "test_proto.default_float", + Tag: "fixed32,202,opt,name=default_float,json=defaultFloat,def=3.14", + Filename: "test_proto/test.proto", +} + +var E_DefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 203, + Name: "test_proto.default_int32", + Tag: "varint,203,opt,name=default_int32,json=defaultInt32,def=42", + Filename: "test_proto/test.proto", +} + +var E_DefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 204, + Name: "test_proto.default_int64", + Tag: "varint,204,opt,name=default_int64,json=defaultInt64,def=43", + Filename: "test_proto/test.proto", +} + +var E_DefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 205, + Name: "test_proto.default_uint32", + Tag: "varint,205,opt,name=default_uint32,json=defaultUint32,def=44", + Filename: "test_proto/test.proto", +} + +var E_DefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 206, + Name: "test_proto.default_uint64", + Tag: "varint,206,opt,name=default_uint64,json=defaultUint64,def=45", + Filename: "test_proto/test.proto", +} + +var E_DefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 207, + Name: "test_proto.default_sint32", + Tag: "zigzag32,207,opt,name=default_sint32,json=defaultSint32,def=46", + Filename: "test_proto/test.proto", +} + +var E_DefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 208, + Name: "test_proto.default_sint64", + Tag: "zigzag64,208,opt,name=default_sint64,json=defaultSint64,def=47", + Filename: "test_proto/test.proto", +} + +var E_DefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 209, + Name: "test_proto.default_fixed32", + Tag: "fixed32,209,opt,name=default_fixed32,json=defaultFixed32,def=48", + Filename: "test_proto/test.proto", +} + +var E_DefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 210, + Name: "test_proto.default_fixed64", + Tag: "fixed64,210,opt,name=default_fixed64,json=defaultFixed64,def=49", + Filename: "test_proto/test.proto", +} + +var E_DefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 211, + Name: "test_proto.default_sfixed32", + Tag: "fixed32,211,opt,name=default_sfixed32,json=defaultSfixed32,def=50", + Filename: "test_proto/test.proto", +} + +var E_DefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 212, + Name: "test_proto.default_sfixed64", + Tag: "fixed64,212,opt,name=default_sfixed64,json=defaultSfixed64,def=51", + Filename: "test_proto/test.proto", +} + +var E_DefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 213, + Name: "test_proto.default_bool", + Tag: "varint,213,opt,name=default_bool,json=defaultBool,def=1", + Filename: "test_proto/test.proto", +} + +var E_DefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 214, + Name: "test_proto.default_string", + Tag: "bytes,214,opt,name=default_string,json=defaultString,def=Hello, string,def=foo", + Filename: "test_proto/test.proto", +} + +var E_DefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 215, + Name: "test_proto.default_bytes", + Tag: "bytes,215,opt,name=default_bytes,json=defaultBytes,def=Hello, bytes", + Filename: "test_proto/test.proto", +} + +var E_DefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 216, + Name: "test_proto.default_enum", + Tag: "varint,216,opt,name=default_enum,json=defaultEnum,enum=test_proto.DefaultsMessage_DefaultsEnum,def=1", + Filename: "test_proto/test.proto", +} + +var E_X201 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 201, + Name: "test_proto.x201", + Tag: "bytes,201,opt,name=x201", + Filename: "test_proto/test.proto", +} + +var E_X202 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 202, + Name: "test_proto.x202", + Tag: "bytes,202,opt,name=x202", + Filename: "test_proto/test.proto", +} + +var E_X203 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 203, + Name: "test_proto.x203", + Tag: "bytes,203,opt,name=x203", + Filename: "test_proto/test.proto", +} + +var E_X204 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 204, + Name: "test_proto.x204", + Tag: "bytes,204,opt,name=x204", + Filename: "test_proto/test.proto", +} + +var E_X205 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 205, + Name: "test_proto.x205", + Tag: "bytes,205,opt,name=x205", + Filename: "test_proto/test.proto", +} + +var E_X206 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 206, + Name: "test_proto.x206", + Tag: "bytes,206,opt,name=x206", + Filename: "test_proto/test.proto", +} + +var E_X207 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 207, + Name: "test_proto.x207", + Tag: "bytes,207,opt,name=x207", + Filename: "test_proto/test.proto", +} + +var E_X208 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 208, + Name: "test_proto.x208", + Tag: "bytes,208,opt,name=x208", + Filename: "test_proto/test.proto", +} + +var E_X209 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 209, + Name: "test_proto.x209", + Tag: "bytes,209,opt,name=x209", + Filename: "test_proto/test.proto", +} + +var E_X210 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 210, + Name: "test_proto.x210", + Tag: "bytes,210,opt,name=x210", + Filename: "test_proto/test.proto", +} + +var E_X211 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 211, + Name: "test_proto.x211", + Tag: "bytes,211,opt,name=x211", + Filename: "test_proto/test.proto", +} + +var E_X212 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 212, + Name: "test_proto.x212", + Tag: "bytes,212,opt,name=x212", + Filename: "test_proto/test.proto", +} + +var E_X213 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 213, + Name: "test_proto.x213", + Tag: "bytes,213,opt,name=x213", + Filename: "test_proto/test.proto", +} + +var E_X214 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 214, + Name: "test_proto.x214", + Tag: "bytes,214,opt,name=x214", + Filename: "test_proto/test.proto", +} + +var E_X215 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 215, + Name: "test_proto.x215", + Tag: "bytes,215,opt,name=x215", + Filename: "test_proto/test.proto", +} + +var E_X216 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 216, + Name: "test_proto.x216", + Tag: "bytes,216,opt,name=x216", + Filename: "test_proto/test.proto", +} + +var E_X217 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 217, + Name: "test_proto.x217", + Tag: "bytes,217,opt,name=x217", + Filename: "test_proto/test.proto", +} + +var E_X218 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 218, + Name: "test_proto.x218", + Tag: "bytes,218,opt,name=x218", + Filename: "test_proto/test.proto", +} + +var E_X219 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 219, + Name: "test_proto.x219", + Tag: "bytes,219,opt,name=x219", + Filename: "test_proto/test.proto", +} + +var E_X220 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 220, + Name: "test_proto.x220", + Tag: "bytes,220,opt,name=x220", + Filename: "test_proto/test.proto", +} + +var E_X221 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 221, + Name: "test_proto.x221", + Tag: "bytes,221,opt,name=x221", + Filename: "test_proto/test.proto", +} + +var E_X222 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 222, + Name: "test_proto.x222", + Tag: "bytes,222,opt,name=x222", + Filename: "test_proto/test.proto", +} + +var E_X223 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 223, + Name: "test_proto.x223", + Tag: "bytes,223,opt,name=x223", + Filename: "test_proto/test.proto", +} + +var E_X224 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 224, + Name: "test_proto.x224", + Tag: "bytes,224,opt,name=x224", + Filename: "test_proto/test.proto", +} + +var E_X225 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 225, + Name: "test_proto.x225", + Tag: "bytes,225,opt,name=x225", + Filename: "test_proto/test.proto", +} + +var E_X226 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 226, + Name: "test_proto.x226", + Tag: "bytes,226,opt,name=x226", + Filename: "test_proto/test.proto", +} + +var E_X227 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 227, + Name: "test_proto.x227", + Tag: "bytes,227,opt,name=x227", + Filename: "test_proto/test.proto", +} + +var E_X228 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 228, + Name: "test_proto.x228", + Tag: "bytes,228,opt,name=x228", + Filename: "test_proto/test.proto", +} + +var E_X229 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 229, + Name: "test_proto.x229", + Tag: "bytes,229,opt,name=x229", + Filename: "test_proto/test.proto", +} + +var E_X230 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 230, + Name: "test_proto.x230", + Tag: "bytes,230,opt,name=x230", + Filename: "test_proto/test.proto", +} + +var E_X231 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 231, + Name: "test_proto.x231", + Tag: "bytes,231,opt,name=x231", + Filename: "test_proto/test.proto", +} + +var E_X232 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 232, + Name: "test_proto.x232", + Tag: "bytes,232,opt,name=x232", + Filename: "test_proto/test.proto", +} + +var E_X233 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 233, + Name: "test_proto.x233", + Tag: "bytes,233,opt,name=x233", + Filename: "test_proto/test.proto", +} + +var E_X234 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 234, + Name: "test_proto.x234", + Tag: "bytes,234,opt,name=x234", + Filename: "test_proto/test.proto", +} + +var E_X235 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 235, + Name: "test_proto.x235", + Tag: "bytes,235,opt,name=x235", + Filename: "test_proto/test.proto", +} + +var E_X236 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 236, + Name: "test_proto.x236", + Tag: "bytes,236,opt,name=x236", + Filename: "test_proto/test.proto", +} + +var E_X237 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 237, + Name: "test_proto.x237", + Tag: "bytes,237,opt,name=x237", + Filename: "test_proto/test.proto", +} + +var E_X238 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 238, + Name: "test_proto.x238", + Tag: "bytes,238,opt,name=x238", + Filename: "test_proto/test.proto", +} + +var E_X239 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 239, + Name: "test_proto.x239", + Tag: "bytes,239,opt,name=x239", + Filename: "test_proto/test.proto", +} + +var E_X240 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 240, + Name: "test_proto.x240", + Tag: "bytes,240,opt,name=x240", + Filename: "test_proto/test.proto", +} + +var E_X241 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 241, + Name: "test_proto.x241", + Tag: "bytes,241,opt,name=x241", + Filename: "test_proto/test.proto", +} + +var E_X242 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 242, + Name: "test_proto.x242", + Tag: "bytes,242,opt,name=x242", + Filename: "test_proto/test.proto", +} + +var E_X243 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 243, + Name: "test_proto.x243", + Tag: "bytes,243,opt,name=x243", + Filename: "test_proto/test.proto", +} + +var E_X244 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 244, + Name: "test_proto.x244", + Tag: "bytes,244,opt,name=x244", + Filename: "test_proto/test.proto", +} + +var E_X245 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 245, + Name: "test_proto.x245", + Tag: "bytes,245,opt,name=x245", + Filename: "test_proto/test.proto", +} + +var E_X246 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 246, + Name: "test_proto.x246", + Tag: "bytes,246,opt,name=x246", + Filename: "test_proto/test.proto", +} + +var E_X247 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 247, + Name: "test_proto.x247", + Tag: "bytes,247,opt,name=x247", + Filename: "test_proto/test.proto", +} + +var E_X248 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 248, + Name: "test_proto.x248", + Tag: "bytes,248,opt,name=x248", + Filename: "test_proto/test.proto", +} + +var E_X249 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 249, + Name: "test_proto.x249", + Tag: "bytes,249,opt,name=x249", + Filename: "test_proto/test.proto", +} + +var E_X250 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 250, + Name: "test_proto.x250", + Tag: "bytes,250,opt,name=x250", + Filename: "test_proto/test.proto", +} + +func init() { + proto.RegisterType((*GoEnum)(nil), "test_proto.GoEnum") + proto.RegisterType((*GoTestField)(nil), "test_proto.GoTestField") + proto.RegisterType((*GoTest)(nil), "test_proto.GoTest") + proto.RegisterType((*GoTest_RequiredGroup)(nil), "test_proto.GoTest.RequiredGroup") + proto.RegisterType((*GoTest_RepeatedGroup)(nil), "test_proto.GoTest.RepeatedGroup") + proto.RegisterType((*GoTest_OptionalGroup)(nil), "test_proto.GoTest.OptionalGroup") + proto.RegisterType((*GoTestRequiredGroupField)(nil), "test_proto.GoTestRequiredGroupField") + proto.RegisterType((*GoTestRequiredGroupField_Group)(nil), "test_proto.GoTestRequiredGroupField.Group") + proto.RegisterType((*GoSkipTest)(nil), "test_proto.GoSkipTest") + proto.RegisterType((*GoSkipTest_SkipGroup)(nil), "test_proto.GoSkipTest.SkipGroup") + proto.RegisterType((*NonPackedTest)(nil), "test_proto.NonPackedTest") + proto.RegisterType((*PackedTest)(nil), "test_proto.PackedTest") + proto.RegisterType((*MaxTag)(nil), "test_proto.MaxTag") + proto.RegisterType((*OldMessage)(nil), "test_proto.OldMessage") + proto.RegisterType((*OldMessage_Nested)(nil), "test_proto.OldMessage.Nested") + proto.RegisterType((*NewMessage)(nil), "test_proto.NewMessage") + proto.RegisterType((*NewMessage_Nested)(nil), "test_proto.NewMessage.Nested") + proto.RegisterType((*InnerMessage)(nil), "test_proto.InnerMessage") + proto.RegisterType((*OtherMessage)(nil), "test_proto.OtherMessage") + proto.RegisterType((*RequiredInnerMessage)(nil), "test_proto.RequiredInnerMessage") + proto.RegisterType((*MyMessage)(nil), "test_proto.MyMessage") + proto.RegisterType((*MyMessage_SomeGroup)(nil), "test_proto.MyMessage.SomeGroup") + proto.RegisterType((*Ext)(nil), "test_proto.Ext") + proto.RegisterMapType((map[int32]int32)(nil), "test_proto.Ext.MapFieldEntry") + proto.RegisterType((*ComplexExtension)(nil), "test_proto.ComplexExtension") + proto.RegisterType((*DefaultsMessage)(nil), "test_proto.DefaultsMessage") + proto.RegisterType((*MyMessageSet)(nil), "test_proto.MyMessageSet") + proto.RegisterType((*Empty)(nil), "test_proto.Empty") + proto.RegisterType((*MessageList)(nil), "test_proto.MessageList") + proto.RegisterType((*MessageList_Message)(nil), "test_proto.MessageList.Message") + proto.RegisterType((*Strings)(nil), "test_proto.Strings") + proto.RegisterType((*Defaults)(nil), "test_proto.Defaults") + proto.RegisterType((*SubDefaults)(nil), "test_proto.SubDefaults") + proto.RegisterType((*RepeatedEnum)(nil), "test_proto.RepeatedEnum") + proto.RegisterType((*MoreRepeated)(nil), "test_proto.MoreRepeated") + proto.RegisterType((*GroupOld)(nil), "test_proto.GroupOld") + proto.RegisterType((*GroupOld_G)(nil), "test_proto.GroupOld.G") + proto.RegisterType((*GroupNew)(nil), "test_proto.GroupNew") + proto.RegisterType((*GroupNew_G)(nil), "test_proto.GroupNew.G") + proto.RegisterType((*FloatingPoint)(nil), "test_proto.FloatingPoint") + proto.RegisterType((*MessageWithMap)(nil), "test_proto.MessageWithMap") + proto.RegisterMapType((map[bool][]byte)(nil), "test_proto.MessageWithMap.ByteMappingEntry") + proto.RegisterMapType((map[int64]*FloatingPoint)(nil), "test_proto.MessageWithMap.MsgMappingEntry") + proto.RegisterMapType((map[int32]string)(nil), "test_proto.MessageWithMap.NameMappingEntry") + proto.RegisterMapType((map[string]string)(nil), "test_proto.MessageWithMap.StrToStrEntry") + proto.RegisterType((*Oneof)(nil), "test_proto.Oneof") + proto.RegisterType((*Oneof_F_Group)(nil), "test_proto.Oneof.F_Group") + proto.RegisterType((*Communique)(nil), "test_proto.Communique") + proto.RegisterEnum("test_proto.FOO", FOO_name, FOO_value) + proto.RegisterEnum("test_proto.GoTest_KIND", GoTest_KIND_name, GoTest_KIND_value) + proto.RegisterEnum("test_proto.MyMessage_Color", MyMessage_Color_name, MyMessage_Color_value) + proto.RegisterEnum("test_proto.DefaultsMessage_DefaultsEnum", DefaultsMessage_DefaultsEnum_name, DefaultsMessage_DefaultsEnum_value) + proto.RegisterEnum("test_proto.Defaults_Color", Defaults_Color_name, Defaults_Color_value) + proto.RegisterEnum("test_proto.RepeatedEnum_Color", RepeatedEnum_Color_name, RepeatedEnum_Color_value) + proto.RegisterExtension(E_Ext_More) + proto.RegisterExtension(E_Ext_Text) + proto.RegisterExtension(E_Ext_Number) + proto.RegisterExtension(E_Greeting) + proto.RegisterExtension(E_Complex) + proto.RegisterExtension(E_RComplex) + proto.RegisterExtension(E_NoDefaultDouble) + proto.RegisterExtension(E_NoDefaultFloat) + proto.RegisterExtension(E_NoDefaultInt32) + proto.RegisterExtension(E_NoDefaultInt64) + proto.RegisterExtension(E_NoDefaultUint32) + proto.RegisterExtension(E_NoDefaultUint64) + proto.RegisterExtension(E_NoDefaultSint32) + proto.RegisterExtension(E_NoDefaultSint64) + proto.RegisterExtension(E_NoDefaultFixed32) + proto.RegisterExtension(E_NoDefaultFixed64) + proto.RegisterExtension(E_NoDefaultSfixed32) + proto.RegisterExtension(E_NoDefaultSfixed64) + proto.RegisterExtension(E_NoDefaultBool) + proto.RegisterExtension(E_NoDefaultString) + proto.RegisterExtension(E_NoDefaultBytes) + proto.RegisterExtension(E_NoDefaultEnum) + proto.RegisterExtension(E_DefaultDouble) + proto.RegisterExtension(E_DefaultFloat) + proto.RegisterExtension(E_DefaultInt32) + proto.RegisterExtension(E_DefaultInt64) + proto.RegisterExtension(E_DefaultUint32) + proto.RegisterExtension(E_DefaultUint64) + proto.RegisterExtension(E_DefaultSint32) + proto.RegisterExtension(E_DefaultSint64) + proto.RegisterExtension(E_DefaultFixed32) + proto.RegisterExtension(E_DefaultFixed64) + proto.RegisterExtension(E_DefaultSfixed32) + proto.RegisterExtension(E_DefaultSfixed64) + proto.RegisterExtension(E_DefaultBool) + proto.RegisterExtension(E_DefaultString) + proto.RegisterExtension(E_DefaultBytes) + proto.RegisterExtension(E_DefaultEnum) + proto.RegisterExtension(E_X201) + proto.RegisterExtension(E_X202) + proto.RegisterExtension(E_X203) + proto.RegisterExtension(E_X204) + proto.RegisterExtension(E_X205) + proto.RegisterExtension(E_X206) + proto.RegisterExtension(E_X207) + proto.RegisterExtension(E_X208) + proto.RegisterExtension(E_X209) + proto.RegisterExtension(E_X210) + proto.RegisterExtension(E_X211) + proto.RegisterExtension(E_X212) + proto.RegisterExtension(E_X213) + proto.RegisterExtension(E_X214) + proto.RegisterExtension(E_X215) + proto.RegisterExtension(E_X216) + proto.RegisterExtension(E_X217) + proto.RegisterExtension(E_X218) + proto.RegisterExtension(E_X219) + proto.RegisterExtension(E_X220) + proto.RegisterExtension(E_X221) + proto.RegisterExtension(E_X222) + proto.RegisterExtension(E_X223) + proto.RegisterExtension(E_X224) + proto.RegisterExtension(E_X225) + proto.RegisterExtension(E_X226) + proto.RegisterExtension(E_X227) + proto.RegisterExtension(E_X228) + proto.RegisterExtension(E_X229) + proto.RegisterExtension(E_X230) + proto.RegisterExtension(E_X231) + proto.RegisterExtension(E_X232) + proto.RegisterExtension(E_X233) + proto.RegisterExtension(E_X234) + proto.RegisterExtension(E_X235) + proto.RegisterExtension(E_X236) + proto.RegisterExtension(E_X237) + proto.RegisterExtension(E_X238) + proto.RegisterExtension(E_X239) + proto.RegisterExtension(E_X240) + proto.RegisterExtension(E_X241) + proto.RegisterExtension(E_X242) + proto.RegisterExtension(E_X243) + proto.RegisterExtension(E_X244) + proto.RegisterExtension(E_X245) + proto.RegisterExtension(E_X246) + proto.RegisterExtension(E_X247) + proto.RegisterExtension(E_X248) + proto.RegisterExtension(E_X249) + proto.RegisterExtension(E_X250) +} + +func init() { proto.RegisterFile("test_proto/test.proto", fileDescriptor_test_74787bfc6550f8a7) } + +var fileDescriptor_test_74787bfc6550f8a7 = []byte{ + // 4680 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5b, 0xd9, 0x73, 0x1b, 0x47, + 0x7a, 0xd7, 0x0c, 0xee, 0x0f, 0x20, 0x31, 0x6c, 0xd1, 0x12, 0x44, 0x59, 0xd2, 0x08, 0x6b, 0xaf, + 0x61, 0xc9, 0xa2, 0x48, 0x60, 0x08, 0x49, 0x70, 0xec, 0xb2, 0x0e, 0x82, 0x62, 0x49, 0x24, 0xe4, + 0x21, 0x6d, 0x67, 0x95, 0x07, 0x14, 0x48, 0x0c, 0x40, 0xac, 0x80, 0x19, 0x18, 0x18, 0x44, 0x64, + 0x52, 0xa9, 0xf2, 0x63, 0xaa, 0xf2, 0x94, 0x4d, 0x52, 0x95, 0xf7, 0xbc, 0xe4, 0x25, 0xd7, 0x43, + 0xf2, 0x37, 0xc4, 0xd7, 0x5e, 0xde, 0x2b, 0xc9, 0x26, 0x9b, 0xfb, 0xce, 0xe6, 0xde, 0x23, 0x2f, + 0x4e, 0xf5, 0xd7, 0x3d, 0x33, 0x3d, 0x03, 0xa8, 0x45, 0x3e, 0x71, 0xa6, 0xfb, 0xf7, 0xfd, 0xfa, + 0xfa, 0xf5, 0xf7, 0xf5, 0xd7, 0x18, 0xc2, 0x0b, 0xae, 0x35, 0x76, 0x9b, 0xc3, 0x91, 0xe3, 0x3a, + 0xd7, 0xe9, 0xe3, 0x32, 0x3e, 0x12, 0x08, 0x8a, 0x8b, 0x57, 0x21, 0xb9, 0xe1, 0xac, 0xdb, 0x93, + 0x01, 0xb9, 0x0c, 0xb1, 0x8e, 0xe3, 0x14, 0x14, 0x5d, 0x2d, 0xcd, 0x97, 0xf3, 0xcb, 0x01, 0x66, + 0xb9, 0xde, 0x68, 0x98, 0xb4, 0xae, 0x78, 0x03, 0xb2, 0x1b, 0xce, 0xae, 0x35, 0x76, 0xeb, 0x3d, + 0xab, 0xdf, 0x26, 0x8b, 0x90, 0x78, 0xd8, 0xda, 0xb3, 0xfa, 0x68, 0x93, 0x31, 0xd9, 0x0b, 0x21, + 0x10, 0xdf, 0x3d, 0x1a, 0x5a, 0x05, 0x15, 0x0b, 0xf1, 0xb9, 0xf8, 0x87, 0x45, 0xda, 0x0c, 0xb5, + 0x24, 0x57, 0x21, 0xfe, 0xa0, 0x67, 0xb7, 0x79, 0x3b, 0x67, 0xc5, 0x76, 0x18, 0x62, 0xf9, 0xc1, + 0xe6, 0xf6, 0x3d, 0x13, 0x41, 0xb4, 0x85, 0xdd, 0xd6, 0x5e, 0x9f, 0x92, 0x29, 0xb4, 0x05, 0x7c, + 0xa1, 0xa5, 0x8f, 0x5a, 0xa3, 0xd6, 0xa0, 0x10, 0xd3, 0x95, 0x52, 0xc2, 0x64, 0x2f, 0xe4, 0x0d, + 0x98, 0x33, 0xad, 0xf7, 0x27, 0xbd, 0x91, 0xd5, 0xc6, 0xee, 0x15, 0xe2, 0xba, 0x5a, 0xca, 0xce, + 0x6a, 0x01, 0xab, 0xcd, 0x30, 0x9a, 0x99, 0x0f, 0xad, 0x96, 0xeb, 0x99, 0x27, 0xf4, 0xd8, 0x73, + 0xcc, 0x05, 0x34, 0x35, 0x6f, 0x0c, 0xdd, 0x9e, 0x63, 0xb7, 0xfa, 0xcc, 0x3c, 0xa9, 0x2b, 0x52, + 0xf3, 0x10, 0x9a, 0x7c, 0x11, 0xf2, 0xf5, 0xe6, 0x1d, 0xc7, 0xe9, 0x37, 0x47, 0xbc, 0x57, 0x05, + 0xd0, 0xd5, 0x52, 0xda, 0x9c, 0xab, 0xd3, 0x52, 0xaf, 0xab, 0xa4, 0x04, 0x5a, 0xbd, 0xb9, 0x69, + 0xbb, 0x95, 0x72, 0x00, 0xcc, 0xea, 0x6a, 0x29, 0x61, 0xce, 0xd7, 0xb1, 0x78, 0x0a, 0x59, 0x35, + 0x02, 0x64, 0x4e, 0x57, 0x4b, 0x31, 0x86, 0xac, 0x1a, 0x3e, 0xf2, 0x35, 0x20, 0xf5, 0x66, 0xbd, + 0x77, 0x68, 0xb5, 0x45, 0xd6, 0x39, 0x5d, 0x2d, 0xa5, 0x4c, 0xad, 0xce, 0x2b, 0x66, 0xa0, 0x45, + 0xe6, 0x79, 0x5d, 0x2d, 0x25, 0x3d, 0xb4, 0xc0, 0x7d, 0x05, 0x16, 0xea, 0xcd, 0x77, 0x7a, 0xe1, + 0x0e, 0xe7, 0x75, 0xb5, 0x34, 0x67, 0xe6, 0xeb, 0xac, 0x7c, 0x1a, 0x2b, 0x12, 0x6b, 0xba, 0x5a, + 0x8a, 0x73, 0xac, 0xc0, 0x8b, 0xa3, 0xab, 0xf7, 0x9d, 0x96, 0x1b, 0x40, 0x17, 0x74, 0xb5, 0xa4, + 0x9a, 0xf3, 0x75, 0x2c, 0x0e, 0xb3, 0xde, 0x73, 0x26, 0x7b, 0x7d, 0x2b, 0x80, 0x12, 0x5d, 0x2d, + 0x29, 0x66, 0xbe, 0xce, 0xca, 0xc3, 0xd8, 0x1d, 0x77, 0xd4, 0xb3, 0xbb, 0x01, 0xf6, 0x34, 0xea, + 0x38, 0x5f, 0x67, 0xe5, 0xe1, 0x1e, 0xdc, 0x39, 0x72, 0xad, 0x71, 0x00, 0xb5, 0x74, 0xb5, 0x94, + 0x33, 0xe7, 0xeb, 0x58, 0x1c, 0x61, 0x8d, 0xcc, 0x41, 0x47, 0x57, 0x4b, 0x0b, 0x94, 0x75, 0xc6, + 0x1c, 0xec, 0x44, 0xe6, 0xa0, 0xab, 0xab, 0x25, 0xc2, 0xb1, 0xc2, 0x1c, 0x2c, 0xc3, 0xe9, 0x7a, + 0x73, 0xa7, 0x13, 0x5d, 0xb8, 0x03, 0x5d, 0x2d, 0xe5, 0xcd, 0x85, 0xba, 0x57, 0x33, 0x0b, 0x2f, + 0xb2, 0xf7, 0x74, 0xb5, 0xa4, 0xf9, 0x78, 0x81, 0x5f, 0xd4, 0x24, 0x93, 0x7a, 0x61, 0x51, 0x8f, + 0x09, 0x9a, 0x64, 0x85, 0x61, 0x4d, 0x72, 0xe0, 0x0b, 0x7a, 0x4c, 0xd4, 0x64, 0x04, 0x89, 0xcd, + 0x73, 0xe4, 0x19, 0x3d, 0x26, 0x6a, 0x92, 0x23, 0x23, 0x9a, 0xe4, 0xd8, 0xb3, 0x7a, 0x2c, 0xac, + 0xc9, 0x29, 0xb4, 0xc8, 0x5c, 0xd0, 0x63, 0x61, 0x4d, 0x72, 0x74, 0x58, 0x93, 0x1c, 0x7c, 0x4e, + 0x8f, 0x85, 0x34, 0x19, 0xc5, 0x8a, 0xc4, 0x4b, 0x7a, 0x2c, 0xa4, 0x49, 0x71, 0x74, 0x9e, 0x26, + 0x39, 0xf4, 0xbc, 0x1e, 0x13, 0x35, 0x29, 0xb2, 0xfa, 0x9a, 0xe4, 0xd0, 0x17, 0xf5, 0x58, 0x48, + 0x93, 0x22, 0xd6, 0xd7, 0x24, 0xc7, 0x5e, 0xd0, 0x63, 0x21, 0x4d, 0x72, 0xec, 0xab, 0xa2, 0x26, + 0x39, 0xf4, 0x43, 0x45, 0x8f, 0x89, 0xa2, 0xe4, 0xd0, 0xab, 0x21, 0x51, 0x72, 0xec, 0x47, 0x14, + 0x2b, 0xaa, 0x32, 0x0a, 0x16, 0x67, 0xe1, 0x63, 0x0a, 0x16, 0x65, 0xc9, 0xc1, 0xd7, 0x23, 0xb2, + 0xe4, 0xf0, 0x4f, 0x28, 0x3c, 0xac, 0xcb, 0x69, 0x03, 0x91, 0xff, 0x53, 0x6a, 0x10, 0x16, 0x26, + 0x37, 0x08, 0x84, 0xe9, 0x70, 0x27, 0x5a, 0xb8, 0xa8, 0x2b, 0xbe, 0x30, 0x3d, 0xcf, 0x2a, 0x0a, + 0xd3, 0x07, 0x5e, 0xc2, 0x90, 0xc1, 0x85, 0x39, 0x85, 0xac, 0x1a, 0x01, 0x52, 0xd7, 0x95, 0x40, + 0x98, 0x3e, 0x32, 0x24, 0x4c, 0x1f, 0x7b, 0x59, 0x57, 0x44, 0x61, 0xce, 0x40, 0x8b, 0xcc, 0x45, + 0x5d, 0x11, 0x85, 0xe9, 0xa3, 0x45, 0x61, 0xfa, 0xe0, 0x2f, 0xe8, 0x8a, 0x20, 0xcc, 0x69, 0xac, + 0x48, 0xfc, 0x92, 0xae, 0x08, 0xc2, 0x0c, 0x8f, 0x8e, 0x09, 0xd3, 0x87, 0xbe, 0xac, 0x2b, 0x81, + 0x30, 0xc3, 0xac, 0x5c, 0x98, 0x3e, 0xf4, 0x8b, 0xba, 0x22, 0x08, 0x33, 0x8c, 0xe5, 0xc2, 0xf4, + 0xb1, 0xaf, 0x60, 0x9c, 0xf6, 0x84, 0xe9, 0x63, 0x05, 0x61, 0xfa, 0xd0, 0xdf, 0xa1, 0x31, 0xdd, + 0x17, 0xa6, 0x0f, 0x15, 0x85, 0xe9, 0x63, 0x7f, 0x97, 0x62, 0x03, 0x61, 0x4e, 0x83, 0xc5, 0x59, + 0xf8, 0x3d, 0x0a, 0x0e, 0x84, 0xe9, 0x83, 0xc3, 0xc2, 0xf4, 0xe1, 0xbf, 0x4f, 0xe1, 0xa2, 0x30, + 0x67, 0x19, 0x88, 0xfc, 0x7f, 0x40, 0x0d, 0x44, 0x61, 0xfa, 0x06, 0xcb, 0x38, 0x4c, 0x2a, 0xcc, + 0xb6, 0xd5, 0x69, 0x4d, 0xfa, 0x54, 0xc6, 0x25, 0xaa, 0xcc, 0x5a, 0xdc, 0x1d, 0x4d, 0x2c, 0x3a, + 0x56, 0xc7, 0xe9, 0xdf, 0xf3, 0xea, 0xc8, 0x32, 0xed, 0x3e, 0x13, 0x68, 0x60, 0xf0, 0x2a, 0x55, + 0x68, 0x4d, 0xad, 0x94, 0xcd, 0x3c, 0x53, 0xe9, 0x34, 0xbe, 0x6a, 0x08, 0xf8, 0x2b, 0x54, 0xa7, + 0x35, 0xb5, 0x6a, 0x30, 0x7c, 0xd5, 0x08, 0xf0, 0x15, 0x3a, 0x00, 0x4f, 0xac, 0x81, 0xc5, 0x55, + 0xaa, 0xd6, 0x5a, 0xac, 0x52, 0x5e, 0x31, 0x17, 0x3c, 0xc9, 0xce, 0x32, 0x0a, 0x35, 0xf3, 0x1a, + 0x15, 0x6d, 0x2d, 0x56, 0x35, 0x7c, 0x23, 0xb1, 0xa5, 0x32, 0x15, 0x3a, 0x97, 0x6e, 0x60, 0x73, + 0x8d, 0x6a, 0xb7, 0x16, 0xaf, 0x94, 0x57, 0x56, 0x4c, 0x8d, 0x2b, 0x78, 0x86, 0x4d, 0xa8, 0x9d, + 0x65, 0xaa, 0xe1, 0x5a, 0xbc, 0x6a, 0xf8, 0x36, 0xe1, 0x76, 0x16, 0x3c, 0x29, 0x07, 0x26, 0xd7, + 0xa9, 0x96, 0x6b, 0xc9, 0xca, 0xaa, 0xb1, 0xba, 0x76, 0xcb, 0xcc, 0x33, 0x4d, 0x07, 0x36, 0x06, + 0x6d, 0x87, 0x8b, 0x3a, 0x30, 0x5a, 0xa1, 0xaa, 0xae, 0x25, 0xcb, 0x37, 0x56, 0x6f, 0x96, 0x6f, + 0x9a, 0x1a, 0x57, 0x77, 0x60, 0xf5, 0x26, 0xb5, 0xe2, 0xf2, 0x0e, 0xac, 0x56, 0xa9, 0xbe, 0x6b, + 0xda, 0x81, 0xd5, 0xef, 0x3b, 0xaf, 0xe9, 0xc5, 0xa7, 0xce, 0xa8, 0xdf, 0xbe, 0x5c, 0x04, 0x53, + 0xe3, 0x8a, 0x17, 0x5b, 0x5d, 0xf0, 0x24, 0x1f, 0x98, 0xff, 0x2a, 0x3d, 0xb1, 0xe6, 0x6a, 0xa9, + 0x3b, 0xbd, 0xae, 0xed, 0x8c, 0x2d, 0x33, 0xcf, 0xc4, 0x1f, 0x99, 0x93, 0x9d, 0xe8, 0x3c, 0x7e, + 0x85, 0x9a, 0x2d, 0xd4, 0x62, 0xd7, 0x2a, 0x65, 0xda, 0xd2, 0xac, 0x79, 0xdc, 0x89, 0xce, 0xe3, + 0xaf, 0x51, 0x1b, 0x52, 0x8b, 0x5d, 0xab, 0x1a, 0xdc, 0x46, 0x9c, 0xc7, 0x2a, 0x2c, 0x0a, 0x7b, + 0x21, 0xb0, 0xfa, 0x75, 0x6a, 0x95, 0x67, 0x2d, 0x11, 0x7f, 0x47, 0xcc, 0xb4, 0x0b, 0xb5, 0xf6, + 0x1b, 0xd4, 0x4e, 0x63, 0xad, 0x11, 0x7f, 0x63, 0x04, 0x76, 0x37, 0xe0, 0x4c, 0xe4, 0x2c, 0xd1, + 0x1c, 0xb6, 0xf6, 0x9f, 0x58, 0xed, 0x42, 0x99, 0x1e, 0x29, 0xee, 0xa8, 0x9a, 0x62, 0x9e, 0x0e, + 0x1d, 0x2b, 0x1e, 0x61, 0x35, 0xb9, 0x05, 0x67, 0xa3, 0x87, 0x0b, 0xcf, 0xb2, 0x42, 0xcf, 0x18, + 0x68, 0xb9, 0x18, 0x3e, 0x67, 0x44, 0x4c, 0x85, 0xa0, 0xe2, 0x99, 0x1a, 0xf4, 0xd0, 0x11, 0x98, + 0x06, 0xb1, 0x85, 0x9b, 0xbe, 0x01, 0xe7, 0xa6, 0x8f, 0x1f, 0x9e, 0xf1, 0x1a, 0x3d, 0x85, 0xa0, + 0xf1, 0x99, 0xe8, 0x49, 0x64, 0xca, 0x7c, 0x46, 0xdb, 0x55, 0x7a, 0x2c, 0x11, 0xcd, 0xa7, 0x5a, + 0x7f, 0x1d, 0x0a, 0x53, 0x07, 0x14, 0xcf, 0xfa, 0x06, 0x3d, 0xa7, 0xa0, 0xf5, 0x0b, 0x91, 0xb3, + 0x4a, 0xd4, 0x78, 0x46, 0xd3, 0x37, 0xe9, 0xc1, 0x45, 0x30, 0x9e, 0x6a, 0x19, 0xa7, 0x2c, 0x7c, + 0x84, 0xf1, 0x6c, 0x6f, 0xd1, 0x93, 0x0c, 0x9f, 0xb2, 0xd0, 0x69, 0x46, 0x6c, 0x37, 0x72, 0xa6, + 0xf1, 0x6c, 0x6b, 0xf4, 0x68, 0xc3, 0xdb, 0x0d, 0x1f, 0x6f, 0xb8, 0xf1, 0xcf, 0x50, 0xe3, 0x9d, + 0xd9, 0x23, 0xfe, 0x51, 0x8c, 0x1e, 0x4a, 0xb8, 0xf5, 0xce, 0xac, 0x21, 0xfb, 0xd6, 0x33, 0x86, + 0xfc, 0x63, 0x6a, 0x4d, 0x04, 0xeb, 0xa9, 0x31, 0xbf, 0x05, 0x4b, 0x33, 0xce, 0x2b, 0x9e, 0xfd, + 0x4f, 0xa8, 0x7d, 0x1e, 0xed, 0xcf, 0x4e, 0x1d, 0x5d, 0xa6, 0x19, 0x66, 0xf4, 0xe0, 0xa7, 0x94, + 0x41, 0x0b, 0x31, 0x4c, 0xf5, 0xa1, 0x0e, 0x73, 0xde, 0x79, 0xbc, 0x3b, 0x72, 0x26, 0xc3, 0x42, + 0x5d, 0x57, 0x4b, 0x50, 0xd6, 0x67, 0x64, 0xc7, 0xde, 0xf1, 0x7c, 0x83, 0xe2, 0xcc, 0xb0, 0x19, + 0xe3, 0x61, 0xcc, 0x8c, 0xe7, 0x91, 0x1e, 0x7b, 0x26, 0x0f, 0xc3, 0xf9, 0x3c, 0x82, 0x19, 0xe5, + 0xf1, 0xc2, 0x1d, 0xe3, 0x79, 0xac, 0x2b, 0xcf, 0xe0, 0xf1, 0x82, 0x1f, 0xe7, 0x09, 0x99, 0x2d, + 0xad, 0x05, 0x39, 0x39, 0xd6, 0x93, 0x97, 0xa2, 0x49, 0xfa, 0x06, 0x66, 0x57, 0xe1, 0x42, 0x66, + 0x26, 0x74, 0x6f, 0xda, 0xec, 0xed, 0x67, 0x98, 0x85, 0x7a, 0x33, 0x6d, 0xf6, 0x73, 0x33, 0xcc, + 0x8a, 0xbf, 0xa9, 0x40, 0xfc, 0xc1, 0xe6, 0xf6, 0x3d, 0x92, 0x86, 0xf8, 0xbb, 0x8d, 0xcd, 0x7b, + 0xda, 0x29, 0xfa, 0x74, 0xa7, 0xd1, 0x78, 0xa8, 0x29, 0x24, 0x03, 0x89, 0x3b, 0x5f, 0xda, 0x5d, + 0xdf, 0xd1, 0x54, 0x92, 0x87, 0x6c, 0x7d, 0x73, 0x7b, 0x63, 0xdd, 0x7c, 0x64, 0x6e, 0x6e, 0xef, + 0x6a, 0x31, 0x5a, 0x57, 0x7f, 0xd8, 0xb8, 0xbd, 0xab, 0xc5, 0x49, 0x0a, 0x62, 0xb4, 0x2c, 0x41, + 0x00, 0x92, 0x3b, 0xbb, 0xe6, 0xe6, 0xf6, 0x86, 0x96, 0xa4, 0x2c, 0xbb, 0x9b, 0x5b, 0xeb, 0x5a, + 0x8a, 0x22, 0x77, 0xdf, 0x79, 0xf4, 0x70, 0x5d, 0x4b, 0xd3, 0xc7, 0xdb, 0xa6, 0x79, 0xfb, 0x4b, + 0x5a, 0x86, 0x1a, 0x6d, 0xdd, 0x7e, 0xa4, 0x01, 0x56, 0xdf, 0xbe, 0xf3, 0x70, 0x5d, 0xcb, 0x92, + 0x1c, 0xa4, 0xeb, 0xef, 0x6c, 0xdf, 0xdd, 0xdd, 0x6c, 0x6c, 0x6b, 0xb9, 0xe2, 0x2f, 0x42, 0x81, + 0x4d, 0x73, 0x68, 0x16, 0xd9, 0x95, 0xc1, 0x5b, 0x90, 0x60, 0x6b, 0xa3, 0xa0, 0x56, 0xae, 0x4c, + 0xaf, 0xcd, 0xb4, 0xd1, 0x32, 0x5b, 0x25, 0x66, 0xb8, 0x74, 0x01, 0x12, 0x6c, 0x9e, 0x16, 0x21, + 0xc1, 0xe6, 0x47, 0xc5, 0xab, 0x04, 0xf6, 0x52, 0xfc, 0x2d, 0x15, 0x60, 0xc3, 0xd9, 0x79, 0xd2, + 0x1b, 0xe2, 0xc5, 0xcd, 0x05, 0x80, 0xf1, 0x93, 0xde, 0xb0, 0x89, 0x3b, 0x90, 0x5f, 0x3a, 0x64, + 0x68, 0x09, 0xfa, 0x5e, 0x72, 0x19, 0x72, 0x58, 0xcd, 0xb7, 0x08, 0xde, 0x35, 0xa4, 0xcc, 0x2c, + 0x2d, 0xe3, 0x4e, 0x32, 0x0c, 0xa9, 0x1a, 0x78, 0xc5, 0x90, 0x14, 0x20, 0x55, 0x83, 0x5c, 0x02, + 0x7c, 0x6d, 0x8e, 0x31, 0x9a, 0xe2, 0xb5, 0x42, 0xc6, 0xc4, 0x76, 0x59, 0x7c, 0x25, 0x6f, 0x02, + 0xb6, 0xc9, 0x46, 0x9e, 0x9f, 0xb5, 0x4b, 0xbc, 0x0e, 0x2f, 0xd3, 0x07, 0x36, 0xde, 0xc0, 0x64, + 0xa9, 0x01, 0x19, 0xbf, 0x9c, 0xb6, 0x86, 0xa5, 0x7c, 0x4c, 0x1a, 0x8e, 0x09, 0xb0, 0xc8, 0x1f, + 0x14, 0x03, 0xf0, 0xfe, 0x2c, 0x60, 0x7f, 0x98, 0x11, 0xeb, 0x50, 0xf1, 0x02, 0xcc, 0x6d, 0x3b, + 0x36, 0xdb, 0xc7, 0x38, 0x4f, 0x39, 0x50, 0x5a, 0x05, 0x05, 0xf3, 0x5f, 0xa5, 0x55, 0xbc, 0x08, + 0x20, 0xd4, 0x69, 0xa0, 0xec, 0xb1, 0x3a, 0xf4, 0x07, 0xca, 0x5e, 0xf1, 0x2a, 0x24, 0xb7, 0x5a, + 0x87, 0xbb, 0xad, 0x2e, 0xb9, 0x0c, 0xd0, 0x6f, 0x8d, 0xdd, 0x66, 0x07, 0x57, 0xe2, 0xf3, 0xcf, + 0x3f, 0xff, 0x5c, 0xc1, 0xc3, 0x74, 0x86, 0x96, 0xb2, 0x15, 0x19, 0x03, 0x34, 0xfa, 0xed, 0x2d, + 0x6b, 0x3c, 0x6e, 0x75, 0x2d, 0xb2, 0x06, 0x49, 0xdb, 0x1a, 0xd3, 0xe8, 0xab, 0xe0, 0x5d, 0xd3, + 0x05, 0x71, 0x1e, 0x02, 0xdc, 0xf2, 0x36, 0x82, 0x4c, 0x0e, 0x26, 0x1a, 0xc4, 0xec, 0xc9, 0x00, + 0x6f, 0xd4, 0x12, 0x26, 0x7d, 0x5c, 0x7a, 0x11, 0x92, 0x0c, 0x43, 0x08, 0xc4, 0xed, 0xd6, 0xc0, + 0x2a, 0xb0, 0x96, 0xf1, 0xb9, 0xf8, 0x15, 0x05, 0x60, 0xdb, 0x7a, 0x7a, 0xac, 0x56, 0x03, 0x9c, + 0xa4, 0xd5, 0x18, 0x6b, 0xf5, 0x75, 0x59, 0xab, 0x54, 0x6d, 0x1d, 0xc7, 0x69, 0x37, 0xd9, 0x42, + 0xb3, 0xeb, 0xbf, 0x0c, 0x2d, 0xc1, 0x95, 0x2b, 0x3e, 0x86, 0xdc, 0xa6, 0x6d, 0x5b, 0x23, 0xaf, + 0x57, 0x04, 0xe2, 0x07, 0xce, 0xd8, 0xe5, 0x37, 0x91, 0xf8, 0x4c, 0x0a, 0x10, 0x1f, 0x3a, 0x23, + 0x97, 0x8d, 0xb4, 0x16, 0x37, 0x56, 0x56, 0x56, 0x4c, 0x2c, 0x21, 0x2f, 0x42, 0x66, 0xdf, 0xb1, + 0x6d, 0x6b, 0x9f, 0x0e, 0x23, 0x86, 0xa9, 0x63, 0x50, 0x50, 0xfc, 0x65, 0x05, 0x72, 0x0d, 0xf7, + 0x20, 0x20, 0xd7, 0x20, 0xf6, 0xc4, 0x3a, 0xc2, 0xee, 0xc5, 0x4c, 0xfa, 0x48, 0x37, 0xcc, 0xcf, + 0xb7, 0xfa, 0x13, 0x76, 0x2f, 0x99, 0x33, 0xd9, 0x0b, 0x39, 0x03, 0xc9, 0xa7, 0x56, 0xaf, 0x7b, + 0xe0, 0x22, 0xa7, 0x6a, 0xf2, 0x37, 0xb2, 0x0c, 0x89, 0x1e, 0xed, 0x6c, 0x21, 0x8e, 0x33, 0x56, + 0x10, 0x67, 0x4c, 0x1c, 0x85, 0xc9, 0x60, 0x57, 0xd2, 0xe9, 0xb6, 0xf6, 0xc1, 0x07, 0x1f, 0x7c, + 0xa0, 0x16, 0x0f, 0x60, 0xd1, 0xdb, 0xc4, 0xa1, 0xe1, 0x3e, 0x82, 0x42, 0xdf, 0x72, 0x9a, 0x9d, + 0x9e, 0xdd, 0xea, 0xf7, 0x8f, 0x9a, 0x4f, 0x1d, 0xbb, 0xd9, 0xb2, 0x9b, 0xce, 0x78, 0xbf, 0x35, + 0xc2, 0x29, 0x90, 0x35, 0xb2, 0xd8, 0xb7, 0x9c, 0x3a, 0x33, 0x7c, 0xcf, 0xb1, 0x6f, 0xdb, 0x0d, + 0x6a, 0x55, 0xfc, 0x2c, 0x0e, 0x99, 0xad, 0x23, 0x8f, 0x7f, 0x11, 0x12, 0xfb, 0xce, 0xc4, 0x66, + 0xf3, 0x99, 0x30, 0xd9, 0x8b, 0xbf, 0x4e, 0xaa, 0xb0, 0x4e, 0x8b, 0x90, 0x78, 0x7f, 0xe2, 0xb8, + 0x16, 0x0e, 0x39, 0x63, 0xb2, 0x17, 0x3a, 0x63, 0x43, 0xcb, 0x2d, 0xc4, 0xf1, 0x9a, 0x82, 0x3e, + 0x06, 0x73, 0x90, 0x38, 0xd6, 0x1c, 0x90, 0x15, 0x48, 0x3a, 0x74, 0x0d, 0xc6, 0x85, 0x24, 0xde, + 0xc3, 0x86, 0x0c, 0xc4, 0xd5, 0x31, 0x39, 0x8e, 0x3c, 0x80, 0x85, 0xa7, 0x56, 0x73, 0x30, 0x19, + 0xbb, 0xcd, 0xae, 0xd3, 0x6c, 0x5b, 0xd6, 0xd0, 0x1a, 0x15, 0xe6, 0xb0, 0xb5, 0x90, 0x87, 0x98, + 0x35, 0xa1, 0xe6, 0xfc, 0x53, 0x6b, 0x6b, 0x32, 0x76, 0x37, 0x9c, 0x7b, 0x68, 0x47, 0xd6, 0x20, + 0x33, 0xb2, 0xa8, 0x5f, 0xa0, 0x5d, 0xce, 0x4d, 0xf7, 0x20, 0x64, 0x9c, 0x1e, 0x59, 0x43, 0x2c, + 0x20, 0x37, 0x20, 0xbd, 0xd7, 0x7b, 0x62, 0x8d, 0x0f, 0xac, 0x76, 0x21, 0xa5, 0x2b, 0xa5, 0xf9, + 0xf2, 0x79, 0xd1, 0xca, 0x9f, 0xe0, 0xe5, 0xbb, 0x4e, 0xdf, 0x19, 0x99, 0x3e, 0x98, 0xbc, 0x01, + 0x99, 0xb1, 0x33, 0xb0, 0x98, 0xda, 0xd3, 0x18, 0x6c, 0x2f, 0xcd, 0xb6, 0xdc, 0x71, 0x06, 0x96, + 0xe7, 0xd5, 0x3c, 0x0b, 0x72, 0x9e, 0x75, 0x77, 0x8f, 0x26, 0x13, 0x05, 0xc0, 0x0b, 0x1f, 0xda, + 0x29, 0x4c, 0x2e, 0xc8, 0x12, 0xed, 0x54, 0xb7, 0x43, 0xcf, 0x6c, 0x85, 0x2c, 0xe6, 0xf2, 0xfe, + 0xfb, 0xd2, 0x6b, 0x90, 0xf1, 0x09, 0x03, 0x77, 0xc8, 0x5c, 0x50, 0x06, 0x3d, 0x04, 0x73, 0x87, + 0xcc, 0xff, 0xbc, 0x0c, 0x09, 0xec, 0x38, 0x8d, 0x5c, 0xe6, 0x3a, 0x0d, 0x94, 0x19, 0x48, 0x6c, + 0x98, 0xeb, 0xeb, 0xdb, 0x9a, 0x82, 0x31, 0xf3, 0xe1, 0x3b, 0xeb, 0x9a, 0x2a, 0xe8, 0xf7, 0xb7, + 0x55, 0x88, 0xad, 0x1f, 0xa2, 0x72, 0xda, 0x2d, 0xb7, 0xe5, 0xed, 0x70, 0xfa, 0x4c, 0x6a, 0x90, + 0x19, 0xb4, 0xbc, 0xb6, 0x54, 0x9c, 0xe2, 0x90, 0x2f, 0x59, 0x3f, 0x74, 0x97, 0xb7, 0x5a, 0xac, + 0xe5, 0x75, 0xdb, 0x1d, 0x1d, 0x99, 0xe9, 0x01, 0x7f, 0x5d, 0x7a, 0x1d, 0xe6, 0x42, 0x55, 0xe2, + 0x16, 0x4d, 0xcc, 0xd8, 0xa2, 0x09, 0xbe, 0x45, 0x6b, 0xea, 0x4d, 0xa5, 0x5c, 0x83, 0xf8, 0xc0, + 0x19, 0x59, 0xe4, 0x85, 0x99, 0x13, 0x5c, 0xe8, 0xa2, 0x64, 0xf2, 0x91, 0xae, 0x98, 0x68, 0x53, + 0x7e, 0x15, 0xe2, 0xae, 0x75, 0xe8, 0x3e, 0xcb, 0xf6, 0x80, 0x8d, 0x8f, 0x42, 0xca, 0xd7, 0x20, + 0x69, 0x4f, 0x06, 0x7b, 0xd6, 0xe8, 0x59, 0xe0, 0x1e, 0x76, 0x8c, 0x83, 0x8a, 0xef, 0x82, 0x76, + 0xd7, 0x19, 0x0c, 0xfb, 0xd6, 0xe1, 0xfa, 0xa1, 0x6b, 0xd9, 0xe3, 0x9e, 0x63, 0xd3, 0x31, 0x74, + 0x7a, 0x23, 0x74, 0x6b, 0x38, 0x06, 0x7c, 0xa1, 0x6e, 0x66, 0x6c, 0xed, 0x3b, 0x76, 0x9b, 0x0f, + 0x8d, 0xbf, 0x51, 0xb4, 0x7b, 0xd0, 0x1b, 0x51, 0x8f, 0x46, 0x83, 0x0f, 0x7b, 0x29, 0x6e, 0x40, + 0x9e, 0xa7, 0x61, 0x63, 0xde, 0x70, 0xf1, 0x0a, 0xe4, 0xbc, 0x22, 0xfc, 0xe5, 0x27, 0x0d, 0xf1, + 0xc7, 0xeb, 0x66, 0x43, 0x3b, 0x45, 0xd7, 0xb5, 0xb1, 0xbd, 0xae, 0x29, 0xf4, 0x61, 0xf7, 0xbd, + 0x46, 0x68, 0x2d, 0x5f, 0x84, 0x9c, 0xdf, 0xf7, 0x1d, 0xcb, 0xc5, 0x1a, 0x1a, 0xa5, 0x52, 0x35, + 0x35, 0xad, 0x14, 0x53, 0x90, 0x58, 0x1f, 0x0c, 0xdd, 0xa3, 0xe2, 0x2f, 0x41, 0x96, 0x83, 0x1e, + 0xf6, 0xc6, 0x2e, 0xb9, 0x05, 0xa9, 0x01, 0x1f, 0xaf, 0x82, 0x67, 0xd1, 0xb0, 0xac, 0x03, 0xa4, + 0xf7, 0x6c, 0x7a, 0xf8, 0xa5, 0x0a, 0xa4, 0x04, 0xf7, 0xce, 0x3d, 0x8f, 0x2a, 0x7a, 0x1e, 0xe6, + 0xa3, 0x62, 0x82, 0x8f, 0x2a, 0x6e, 0x41, 0x8a, 0x05, 0xe6, 0x31, 0x1e, 0x37, 0x58, 0xfe, 0xce, + 0x34, 0xc6, 0xc4, 0x97, 0x65, 0x65, 0xec, 0x0c, 0x75, 0x09, 0xb2, 0xb8, 0x67, 0x7c, 0x15, 0x52, + 0x6f, 0x0e, 0x58, 0xc4, 0x14, 0xff, 0x47, 0x09, 0x48, 0x7b, 0x73, 0x45, 0xce, 0x43, 0x92, 0x25, + 0xb1, 0x48, 0xe5, 0x5d, 0xea, 0x24, 0x30, 0x6d, 0x25, 0xe7, 0x21, 0xc5, 0x13, 0x55, 0x1e, 0x70, + 0xd4, 0x4a, 0xd9, 0x4c, 0xb2, 0xc4, 0xd4, 0xaf, 0xac, 0x1a, 0xe8, 0x27, 0xd9, 0x75, 0x4d, 0x92, + 0xa5, 0x9e, 0x44, 0x87, 0x8c, 0x9f, 0x6c, 0x62, 0x88, 0xe0, 0x77, 0x33, 0x69, 0x2f, 0xbb, 0x14, + 0x10, 0x55, 0x03, 0x1d, 0x28, 0xbf, 0x88, 0x49, 0xd7, 0x83, 0x73, 0x53, 0xda, 0x4b, 0x19, 0xf1, + 0x97, 0x27, 0xef, 0xd6, 0x25, 0xc5, 0x93, 0xc4, 0x00, 0x50, 0x35, 0xd0, 0x33, 0x79, 0x57, 0x2c, + 0x29, 0x9e, 0x08, 0x92, 0x4b, 0xb4, 0x8b, 0x98, 0xd8, 0xa1, 0xff, 0x09, 0xee, 0x53, 0x92, 0x2c, + 0xdd, 0x23, 0x97, 0x29, 0x03, 0xcb, 0xde, 0xd0, 0x35, 0x04, 0x97, 0x27, 0x29, 0x9e, 0xd4, 0x91, + 0xab, 0x14, 0xc2, 0xa6, 0xbf, 0x00, 0xcf, 0xb8, 0x29, 0x49, 0xf1, 0x9b, 0x12, 0xa2, 0xd3, 0x06, + 0xd1, 0x43, 0xa1, 0x57, 0x12, 0x6e, 0x45, 0x92, 0xec, 0x56, 0x84, 0x5c, 0x44, 0x3a, 0x36, 0xa8, + 0x5c, 0x70, 0x03, 0x92, 0xe2, 0x59, 0x60, 0x50, 0x8f, 0x67, 0x49, 0xff, 0xb6, 0x23, 0xc5, 0xf3, + 0x3c, 0x72, 0x93, 0xae, 0x17, 0x55, 0x78, 0x61, 0x1e, 0x7d, 0xf1, 0x92, 0x28, 0x3d, 0x6f, 0x55, + 0x99, 0x2b, 0xae, 0x31, 0x37, 0x66, 0x26, 0xea, 0xb8, 0x23, 0x96, 0xa8, 0xe5, 0xa3, 0x9e, 0xdd, + 0x29, 0xe4, 0x71, 0x2e, 0x62, 0x3d, 0xbb, 0x63, 0x26, 0xea, 0xb4, 0x84, 0xa9, 0x60, 0x9b, 0xd6, + 0x69, 0x58, 0x17, 0xbf, 0xc6, 0x2a, 0x69, 0x11, 0x29, 0x40, 0xa2, 0xde, 0xdc, 0x6e, 0xd9, 0x85, + 0x05, 0x66, 0x67, 0xb7, 0x6c, 0x33, 0x5e, 0xdf, 0x6e, 0xd9, 0xe4, 0x55, 0x88, 0x8d, 0x27, 0x7b, + 0x05, 0x32, 0xfd, 0xb3, 0xe0, 0xce, 0x64, 0xcf, 0xeb, 0x8c, 0x49, 0x31, 0xe4, 0x3c, 0xa4, 0xc7, + 0xee, 0xa8, 0xf9, 0x0b, 0xd6, 0xc8, 0x29, 0x9c, 0xc6, 0x69, 0x3c, 0x65, 0xa6, 0xc6, 0xee, 0xe8, + 0xb1, 0x35, 0x72, 0x8e, 0xe9, 0x83, 0x8b, 0x17, 0x21, 0x2b, 0xf0, 0x92, 0x3c, 0x28, 0x36, 0x3b, + 0xc0, 0xd4, 0x94, 0x1b, 0xa6, 0x62, 0x17, 0xdf, 0x85, 0x9c, 0x97, 0x62, 0xe1, 0x88, 0x0d, 0xba, + 0x9b, 0xfa, 0xce, 0x08, 0x77, 0xe9, 0x7c, 0xf9, 0x62, 0x38, 0x62, 0x06, 0x40, 0x1e, 0xb9, 0x18, + 0xb8, 0xa8, 0x45, 0x3a, 0xa3, 0x14, 0x7f, 0xa0, 0x40, 0x6e, 0xcb, 0x19, 0x05, 0xbf, 0x5f, 0x2c, + 0x42, 0x62, 0xcf, 0x71, 0xfa, 0x63, 0x24, 0x4e, 0x9b, 0xec, 0x85, 0xbc, 0x0c, 0x39, 0x7c, 0xf0, + 0x92, 0x64, 0xd5, 0xbf, 0x05, 0xca, 0x62, 0x39, 0xcf, 0x8b, 0x09, 0xc4, 0x7b, 0xb6, 0x3b, 0xe6, + 0x1e, 0x0d, 0x9f, 0xc9, 0x17, 0x20, 0x4b, 0xff, 0x7a, 0x96, 0x71, 0xff, 0x34, 0x0d, 0xb4, 0x98, + 0x1b, 0xbe, 0x02, 0x73, 0xa8, 0x01, 0x1f, 0x96, 0xf2, 0x6f, 0x7c, 0x72, 0xac, 0x82, 0x03, 0x0b, + 0x90, 0x62, 0x0e, 0x61, 0x8c, 0x3f, 0xf8, 0x66, 0x4c, 0xef, 0x95, 0xba, 0x59, 0x4c, 0x54, 0xd8, + 0x09, 0x24, 0x65, 0xf2, 0xb7, 0xe2, 0x5d, 0x48, 0x63, 0xb8, 0x6c, 0xf4, 0xdb, 0xe4, 0x25, 0x50, + 0xba, 0x05, 0x0b, 0xc3, 0xf5, 0x99, 0x50, 0x16, 0xc2, 0x01, 0xcb, 0x1b, 0xa6, 0xd2, 0x5d, 0x5a, + 0x00, 0x65, 0x83, 0xa6, 0x05, 0x87, 0xdc, 0x61, 0x2b, 0x87, 0xc5, 0xb7, 0x39, 0xc9, 0xb6, 0xf5, + 0x54, 0x4e, 0xb2, 0x6d, 0x3d, 0x65, 0x24, 0x97, 0xa6, 0x48, 0xe8, 0xdb, 0x11, 0xff, 0x0d, 0x5c, + 0x39, 0x2a, 0x56, 0x60, 0x0e, 0x37, 0x6a, 0xcf, 0xee, 0x3e, 0x72, 0x7a, 0x36, 0x26, 0x22, 0x1d, + 0x3c, 0xc0, 0x29, 0xa6, 0xd2, 0xa1, 0xeb, 0x60, 0x1d, 0xb6, 0xf6, 0xd9, 0x71, 0x38, 0x6d, 0xb2, + 0x97, 0xe2, 0xf7, 0xe3, 0x30, 0xcf, 0x9d, 0xec, 0x7b, 0x3d, 0xf7, 0x60, 0xab, 0x35, 0x24, 0xdb, + 0x90, 0xa3, 0xfe, 0xb5, 0x39, 0x68, 0x0d, 0x87, 0x74, 0x23, 0x2b, 0x18, 0x9a, 0xaf, 0xce, 0x70, + 0xdb, 0xdc, 0x62, 0x79, 0xbb, 0x35, 0xb0, 0xb6, 0x18, 0x9a, 0x05, 0xea, 0xac, 0x1d, 0x94, 0x90, + 0x07, 0x90, 0x1d, 0x8c, 0xbb, 0x3e, 0x1d, 0x8b, 0xf4, 0x57, 0x24, 0x74, 0x5b, 0xe3, 0x6e, 0x88, + 0x0d, 0x06, 0x7e, 0x01, 0xed, 0x1c, 0xf5, 0xce, 0x3e, 0x5b, 0xec, 0xb9, 0x9d, 0xa3, 0xae, 0x24, + 0xdc, 0xb9, 0xbd, 0xa0, 0x84, 0xd4, 0x01, 0xe8, 0x56, 0x73, 0x1d, 0x9a, 0xe1, 0xa1, 0x96, 0xb2, + 0xe5, 0x92, 0x84, 0x6d, 0xc7, 0x1d, 0xed, 0x3a, 0x3b, 0xee, 0x88, 0x1f, 0x48, 0xc6, 0xfc, 0x75, + 0xe9, 0x4d, 0xd0, 0xa2, 0xb3, 0xf0, 0xbc, 0x33, 0x49, 0x46, 0x38, 0x93, 0x2c, 0xfd, 0x2c, 0xe4, + 0x23, 0xc3, 0x16, 0xcd, 0x09, 0x33, 0xbf, 0x2e, 0x9a, 0x67, 0xcb, 0xe7, 0x42, 0xdf, 0x68, 0x88, + 0x4b, 0x2f, 0x32, 0xbf, 0x09, 0x5a, 0x74, 0x0a, 0x44, 0xea, 0xb4, 0x24, 0xa1, 0x41, 0xfb, 0xd7, + 0x61, 0x2e, 0x34, 0x68, 0xd1, 0x38, 0xf3, 0x9c, 0x61, 0x15, 0x7f, 0x25, 0x01, 0x89, 0x86, 0x6d, + 0x39, 0x1d, 0x72, 0x36, 0x1c, 0x3b, 0xef, 0x9f, 0xf2, 0xe2, 0xe6, 0xb9, 0x48, 0xdc, 0xbc, 0x7f, + 0xca, 0x8f, 0x9a, 0xe7, 0x22, 0x51, 0xd3, 0xab, 0xaa, 0x1a, 0xe4, 0xc2, 0x54, 0xcc, 0xbc, 0x7f, + 0x4a, 0x08, 0x98, 0x17, 0xa6, 0x02, 0x66, 0x50, 0x5d, 0x35, 0xa8, 0x83, 0x0d, 0x47, 0xcb, 0xfb, + 0xa7, 0x82, 0x48, 0x79, 0x3e, 0x1a, 0x29, 0xfd, 0xca, 0xaa, 0xc1, 0xba, 0x24, 0x44, 0x49, 0xec, + 0x12, 0x8b, 0x8f, 0xe7, 0xa3, 0xf1, 0x11, 0xed, 0x78, 0x64, 0x3c, 0x1f, 0x8d, 0x8c, 0x58, 0xc9, + 0x23, 0xe1, 0xb9, 0x48, 0x24, 0x44, 0x52, 0x16, 0x02, 0xcf, 0x47, 0x43, 0x20, 0xb3, 0x13, 0x7a, + 0x2a, 0xc6, 0x3f, 0xbf, 0xb2, 0x6a, 0x10, 0x23, 0x12, 0xfc, 0x64, 0x89, 0x08, 0xae, 0x06, 0x86, + 0x81, 0x2a, 0x9d, 0x38, 0xef, 0x80, 0x9a, 0x97, 0x7e, 0xc2, 0x82, 0x33, 0xea, 0x1d, 0xd0, 0x0c, + 0x48, 0x75, 0x78, 0xae, 0xae, 0xa1, 0x27, 0x0b, 0x89, 0x13, 0x25, 0xb0, 0x5c, 0x6f, 0xa2, 0x47, + 0xa3, 0xa3, 0xeb, 0xb0, 0x84, 0xa3, 0x04, 0x73, 0xf5, 0xe6, 0xc3, 0xd6, 0xa8, 0x4b, 0xa1, 0xbb, + 0xad, 0xae, 0x7f, 0xeb, 0x41, 0x55, 0x90, 0xad, 0xf3, 0x9a, 0xdd, 0x56, 0x97, 0x9c, 0xf1, 0x24, + 0xd6, 0xc6, 0x5a, 0x85, 0x8b, 0x6c, 0xe9, 0x2c, 0x9d, 0x3a, 0x46, 0x86, 0xbe, 0x71, 0x81, 0xfb, + 0xc6, 0x3b, 0x29, 0x48, 0x4c, 0xec, 0x9e, 0x63, 0xdf, 0xc9, 0x40, 0xca, 0x75, 0x46, 0x83, 0x96, + 0xeb, 0x14, 0x7f, 0xa8, 0x00, 0xdc, 0x75, 0x06, 0x83, 0x89, 0xdd, 0x7b, 0x7f, 0x62, 0x91, 0x8b, + 0x90, 0x1d, 0xb4, 0x9e, 0x58, 0xcd, 0x81, 0xd5, 0xdc, 0x1f, 0x79, 0xbb, 0x21, 0x43, 0x8b, 0xb6, + 0xac, 0xbb, 0xa3, 0x23, 0x52, 0xf0, 0x0e, 0xf0, 0xa8, 0x20, 0x14, 0x26, 0x3f, 0xd0, 0x2f, 0xf2, + 0xe3, 0x68, 0x92, 0xaf, 0xa4, 0x77, 0x20, 0x65, 0x49, 0x4e, 0x8a, 0xaf, 0x21, 0x4b, 0x73, 0xce, + 0x42, 0xd2, 0xb5, 0x06, 0xc3, 0xe6, 0x3e, 0x0a, 0x86, 0x8a, 0x22, 0x41, 0xdf, 0xef, 0x92, 0xeb, + 0x10, 0xdb, 0x77, 0xfa, 0x28, 0x95, 0xe7, 0xae, 0x0e, 0x45, 0x92, 0x57, 0x20, 0x36, 0x18, 0x33, + 0xf9, 0x64, 0xcb, 0xa7, 0x43, 0x27, 0x08, 0x16, 0xb2, 0x28, 0x70, 0x30, 0xee, 0xfa, 0x63, 0xbf, + 0x92, 0x87, 0x58, 0xbd, 0xd1, 0xa0, 0xa7, 0x82, 0x7a, 0xa3, 0xb1, 0xaa, 0x29, 0xb5, 0x55, 0x48, + 0x77, 0x47, 0x96, 0x45, 0x1d, 0xc5, 0xb3, 0xb2, 0x92, 0x2f, 0x63, 0x14, 0xf4, 0x61, 0xb5, 0xb7, + 0x21, 0xb5, 0xcf, 0xf2, 0x12, 0xf2, 0xcc, 0x1c, 0xbc, 0xf0, 0xc7, 0xec, 0x2e, 0xe8, 0x45, 0x11, + 0x10, 0xcd, 0x66, 0x4c, 0x8f, 0xa7, 0xb6, 0x0b, 0x99, 0x51, 0xf3, 0xf9, 0xa4, 0x1f, 0xb2, 0xc8, + 0x23, 0x27, 0x4d, 0x8f, 0x78, 0x51, 0x6d, 0x03, 0x16, 0x6c, 0xc7, 0xfb, 0x49, 0xaa, 0xd9, 0xe6, + 0xfb, 0x6e, 0xd6, 0x91, 0xcf, 0x6b, 0xc0, 0x62, 0x3f, 0x6c, 0xdb, 0x0e, 0xaf, 0x60, 0x7b, 0xb5, + 0xb6, 0x0e, 0x9a, 0x40, 0xd4, 0x61, 0x9b, 0x5b, 0xc6, 0xd3, 0x61, 0xbf, 0xa5, 0xfb, 0x3c, 0xe8, + 0x0f, 0x22, 0x34, 0x7c, 0xc7, 0xca, 0x68, 0xba, 0xec, 0xd3, 0x04, 0x9f, 0x06, 0x9d, 0xe0, 0x34, + 0x0d, 0xf5, 0x5f, 0x32, 0x9a, 0x03, 0xf6, 0xdd, 0x82, 0x48, 0x53, 0x35, 0x22, 0xb3, 0x33, 0x39, + 0x46, 0x77, 0x7a, 0xec, 0xc3, 0x03, 0x9f, 0x87, 0xb9, 0xc7, 0x19, 0x44, 0xcf, 0xeb, 0xd0, 0x97, + 0xd9, 0x57, 0x09, 0x21, 0xa2, 0xa9, 0x1e, 0x8d, 0x8f, 0xd1, 0xa3, 0x27, 0xec, 0x23, 0x00, 0x9f, + 0x68, 0x67, 0x56, 0x8f, 0xc6, 0xc7, 0xe8, 0x51, 0x9f, 0x7d, 0x20, 0x10, 0x22, 0xaa, 0x1a, 0xb5, + 0x4d, 0x20, 0xe2, 0xc2, 0xf3, 0x58, 0x22, 0x65, 0x1a, 0xb0, 0x0f, 0x3f, 0x82, 0xa5, 0x67, 0x46, + 0xb3, 0xa8, 0x9e, 0xd7, 0x29, 0x9b, 0x7d, 0x15, 0x12, 0xa6, 0xaa, 0x1a, 0xb5, 0x07, 0x70, 0x5a, + 0x1c, 0xde, 0xb1, 0xba, 0xe5, 0xb0, 0x4f, 0x1a, 0x82, 0x01, 0x72, 0xab, 0x99, 0x64, 0xcf, 0xeb, + 0xd8, 0x90, 0x7d, 0xee, 0x10, 0x21, 0xab, 0x1a, 0xb5, 0xbb, 0x90, 0x17, 0xc8, 0xf6, 0x30, 0x0b, + 0x96, 0x11, 0xbd, 0xcf, 0x3e, 0xd2, 0xf1, 0x89, 0x68, 0xfc, 0x8f, 0xae, 0x1e, 0x8b, 0x88, 0x52, + 0x9a, 0x11, 0xfb, 0xc6, 0x24, 0xe8, 0x0f, 0xda, 0x44, 0x36, 0xca, 0x1e, 0x0b, 0x9f, 0x32, 0x9e, + 0x31, 0xfb, 0xfe, 0x24, 0xe8, 0x0e, 0x35, 0xa9, 0x0d, 0x42, 0x83, 0xb2, 0x68, 0x50, 0x94, 0xb2, + 0xb8, 0xe8, 0xbf, 0x4b, 0x12, 0xc8, 0xb2, 0x78, 0xd9, 0x22, 0x0c, 0x9f, 0xbe, 0xd6, 0x1e, 0xc0, + 0xfc, 0x49, 0x5c, 0xd6, 0x87, 0x0a, 0xcb, 0xbc, 0x2b, 0xcb, 0x34, 0x39, 0x37, 0xe7, 0xda, 0x21, + 0xcf, 0xb5, 0x01, 0x73, 0x27, 0x70, 0x5b, 0x1f, 0x29, 0x2c, 0x7f, 0xa5, 0x5c, 0x66, 0xae, 0x1d, + 0xf6, 0x5d, 0x73, 0x27, 0x70, 0x5c, 0x1f, 0x2b, 0xec, 0xc2, 0xc3, 0x28, 0xfb, 0x34, 0x9e, 0xef, + 0x9a, 0x3b, 0x81, 0xe3, 0xfa, 0x84, 0xe5, 0xa7, 0xaa, 0x51, 0x11, 0x69, 0xd0, 0x53, 0xcc, 0x9f, + 0xc4, 0x71, 0x7d, 0xaa, 0xe0, 0x05, 0x88, 0x6a, 0x18, 0xfe, 0xfc, 0xf8, 0xbe, 0x6b, 0xfe, 0x24, + 0x8e, 0xeb, 0xab, 0x0a, 0x5e, 0x94, 0xa8, 0xc6, 0x5a, 0x88, 0x28, 0xdc, 0xa3, 0xe3, 0x38, 0xae, + 0xaf, 0x29, 0x78, 0x7b, 0xa1, 0x1a, 0x55, 0x9f, 0x68, 0x67, 0xaa, 0x47, 0xc7, 0x71, 0x5c, 0x5f, + 0xc7, 0x6c, 0xa0, 0xa6, 0x1a, 0x37, 0x42, 0x44, 0xe8, 0xbb, 0xf2, 0x27, 0x72, 0x5c, 0xdf, 0x50, + 0xf0, 0xa2, 0x49, 0x35, 0x6e, 0x9a, 0x5e, 0x0f, 0x02, 0xdf, 0x95, 0x3f, 0x91, 0xe3, 0xfa, 0xa6, + 0x82, 0x37, 0x52, 0xaa, 0x71, 0x2b, 0x4c, 0x85, 0xbe, 0x4b, 0x3b, 0x99, 0xe3, 0xfa, 0x4c, 0xc1, + 0xef, 0x4f, 0xd4, 0xb5, 0x15, 0xd3, 0xeb, 0x84, 0xe0, 0xbb, 0xb4, 0x93, 0x39, 0xae, 0x6f, 0x29, + 0xf8, 0x51, 0x8a, 0xba, 0xb6, 0x1a, 0x21, 0xab, 0x1a, 0xb5, 0x75, 0xc8, 0x1d, 0xdf, 0x71, 0x7d, + 0x5b, 0xbc, 0xef, 0xcb, 0xb6, 0x05, 0xef, 0xf5, 0x58, 0x58, 0xbf, 0x63, 0xb8, 0xae, 0xef, 0x60, + 0xd6, 0x54, 0x7b, 0xe1, 0x3e, 0xbb, 0x15, 0x63, 0x26, 0xaf, 0xb5, 0xad, 0xce, 0x1b, 0x1d, 0xc7, + 0x09, 0x96, 0x94, 0x39, 0xb4, 0x46, 0xb0, 0x7b, 0x8e, 0xe1, 0xcd, 0xbe, 0xab, 0xe0, 0x25, 0x5a, + 0x8e, 0x53, 0xa3, 0x85, 0xbf, 0x8f, 0x98, 0x6b, 0xb3, 0x83, 0x31, 0x3f, 0xdf, 0xaf, 0x7d, 0x4f, + 0x39, 0x99, 0x63, 0xab, 0xc5, 0x1a, 0xdb, 0xeb, 0xfe, 0xe4, 0x60, 0xc9, 0x5b, 0x10, 0x3f, 0x2c, + 0xaf, 0xac, 0x86, 0x8f, 0x78, 0xe2, 0x1d, 0x32, 0x73, 0x67, 0xd9, 0xf2, 0x42, 0xe8, 0xb2, 0x7d, + 0x30, 0x74, 0x8f, 0x4c, 0xb4, 0xe4, 0x0c, 0x65, 0x09, 0xc3, 0x47, 0x52, 0x86, 0x32, 0x67, 0xa8, + 0x48, 0x18, 0x3e, 0x96, 0x32, 0x54, 0x38, 0x83, 0x21, 0x61, 0xf8, 0x44, 0xca, 0x60, 0x70, 0x86, + 0x35, 0x09, 0xc3, 0xa7, 0x52, 0x86, 0x35, 0xce, 0x50, 0x95, 0x30, 0x7c, 0x55, 0xca, 0x50, 0xe5, + 0x0c, 0x37, 0x24, 0x0c, 0x5f, 0x93, 0x32, 0xdc, 0xe0, 0x0c, 0x37, 0x25, 0x0c, 0x5f, 0x97, 0x32, + 0xdc, 0xe4, 0x0c, 0xb7, 0x24, 0x0c, 0xdf, 0x90, 0x32, 0xdc, 0x62, 0x0c, 0xab, 0x2b, 0x12, 0x86, + 0x6f, 0xca, 0x18, 0x56, 0x57, 0x38, 0x83, 0x4c, 0x93, 0x9f, 0x49, 0x19, 0xb8, 0x26, 0x57, 0x65, + 0x9a, 0xfc, 0x96, 0x94, 0x81, 0x6b, 0x72, 0x55, 0xa6, 0xc9, 0x6f, 0x4b, 0x19, 0xb8, 0x26, 0x57, + 0x65, 0x9a, 0xfc, 0x8e, 0x94, 0x81, 0x6b, 0x72, 0x55, 0xa6, 0xc9, 0xef, 0x4a, 0x19, 0xb8, 0x26, + 0x57, 0x65, 0x9a, 0xfc, 0x9e, 0x94, 0x81, 0x6b, 0x72, 0x55, 0xa6, 0xc9, 0x3f, 0x91, 0x32, 0x70, + 0x4d, 0xae, 0xca, 0x34, 0xf9, 0xa7, 0x52, 0x06, 0xae, 0xc9, 0x55, 0x99, 0x26, 0xff, 0x4c, 0xca, + 0xc0, 0x35, 0x59, 0x96, 0x69, 0xf2, 0xfb, 0x32, 0x86, 0x32, 0xd7, 0x64, 0x59, 0xa6, 0xc9, 0x3f, + 0x97, 0x32, 0x70, 0x4d, 0x96, 0x65, 0x9a, 0xfc, 0x0b, 0x29, 0x03, 0xd7, 0x64, 0x59, 0xa6, 0xc9, + 0x1f, 0x48, 0x19, 0xb8, 0x26, 0xcb, 0x32, 0x4d, 0xfe, 0xa5, 0x94, 0x81, 0x6b, 0xb2, 0x2c, 0xd3, + 0xe4, 0x5f, 0x49, 0x19, 0xb8, 0x26, 0xcb, 0x32, 0x4d, 0xfe, 0xb5, 0x94, 0x81, 0x6b, 0xb2, 0x2c, + 0xd3, 0xe4, 0xdf, 0x48, 0x19, 0xb8, 0x26, 0xcb, 0x32, 0x4d, 0xfe, 0xad, 0x94, 0x81, 0x6b, 0xb2, + 0x2c, 0xd3, 0xe4, 0xdf, 0x49, 0x19, 0xb8, 0x26, 0x2b, 0x32, 0x4d, 0xfe, 0xbd, 0x8c, 0xa1, 0xc2, + 0x35, 0x59, 0x91, 0x69, 0xf2, 0x1f, 0xa4, 0x0c, 0x5c, 0x93, 0x15, 0x99, 0x26, 0xff, 0x51, 0xca, + 0xc0, 0x35, 0x59, 0x91, 0x69, 0xf2, 0x9f, 0xa4, 0x0c, 0x5c, 0x93, 0x15, 0x99, 0x26, 0xff, 0x59, + 0xca, 0xc0, 0x35, 0x59, 0x91, 0x69, 0xf2, 0x5f, 0xa4, 0x0c, 0x5c, 0x93, 0x15, 0x99, 0x26, 0xff, + 0x55, 0xca, 0xc0, 0x35, 0x59, 0x91, 0x69, 0xf2, 0xdf, 0xa4, 0x0c, 0x5c, 0x93, 0x15, 0x99, 0x26, + 0x7f, 0x28, 0x65, 0xe0, 0x9a, 0xac, 0xc8, 0x34, 0xf9, 0xef, 0x52, 0x06, 0xae, 0x49, 0x43, 0xa6, + 0xc9, 0xff, 0x90, 0x31, 0x18, 0x5c, 0x93, 0x86, 0x4c, 0x93, 0xff, 0x29, 0x65, 0xe0, 0x9a, 0x34, + 0x64, 0x9a, 0xfc, 0x2f, 0x29, 0x03, 0xd7, 0xa4, 0x21, 0xd3, 0xe4, 0x7f, 0x4b, 0x19, 0xb8, 0x26, + 0x0d, 0x99, 0x26, 0xff, 0x47, 0xca, 0xc0, 0x35, 0x69, 0xc8, 0x34, 0xf9, 0xbf, 0x52, 0x06, 0xae, + 0x49, 0x43, 0xa6, 0xc9, 0x1f, 0x49, 0x19, 0xb8, 0x26, 0x0d, 0x99, 0x26, 0x7f, 0x2c, 0x65, 0xe0, + 0x9a, 0x34, 0x64, 0x9a, 0xfc, 0x89, 0x94, 0x81, 0x6b, 0xd2, 0x90, 0x69, 0xf2, 0xa7, 0x52, 0x06, + 0xae, 0xc9, 0x35, 0x99, 0x26, 0xff, 0x4f, 0xc6, 0xb0, 0xb6, 0x72, 0xe7, 0xda, 0xe3, 0xab, 0xdd, + 0x9e, 0x7b, 0x30, 0xd9, 0x5b, 0xde, 0x77, 0x06, 0xd7, 0xbb, 0x4e, 0xbf, 0x65, 0x77, 0xaf, 0x23, + 0x6c, 0x6f, 0xd2, 0xb9, 0x1e, 0xfc, 0xeb, 0x35, 0x33, 0xfd, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x46, 0xc7, 0xb3, 0x38, 0x92, 0x3d, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.proto b/vendor/github.com/golang/protobuf/proto/test_proto/test.proto similarity index 95% rename from vendor/github.com/golang/protobuf/proto/testdata/test.proto rename to vendor/github.com/golang/protobuf/proto/test_proto/test.proto index 70e3cfcd..22068a95 100644 --- a/vendor/github.com/golang/protobuf/proto/testdata/test.proto +++ b/vendor/github.com/golang/protobuf/proto/test_proto/test.proto @@ -33,7 +33,9 @@ syntax = "proto2"; -package testdata; +option go_package = "github.com/golang/protobuf/proto/test_proto"; + +package test_proto; enum FOO { FOO1 = 1; }; @@ -96,6 +98,8 @@ message GoTest { required bytes F_Bytes_required = 101; required sint32 F_Sint32_required = 102; required sint64 F_Sint64_required = 103; + required sfixed32 F_Sfixed32_required = 104; + required sfixed64 F_Sfixed64_required = 105; // Repeated fields of all basic types repeated bool F_Bool_repeated = 20; @@ -111,6 +115,8 @@ message GoTest { repeated bytes F_Bytes_repeated = 201; repeated sint32 F_Sint32_repeated = 202; repeated sint64 F_Sint64_repeated = 203; + repeated sfixed32 F_Sfixed32_repeated = 204; + repeated sfixed64 F_Sfixed64_repeated = 205; // Optional fields of all basic types optional bool F_Bool_optional = 30; @@ -126,6 +132,8 @@ message GoTest { optional bytes F_Bytes_optional = 301; optional sint32 F_Sint32_optional = 302; optional sint64 F_Sint64_optional = 303; + optional sfixed32 F_Sfixed32_optional = 304; + optional sfixed64 F_Sfixed64_optional = 305; // Default-valued fields of all basic types optional bool F_Bool_defaulted = 40 [default=true]; @@ -141,6 +149,8 @@ message GoTest { optional bytes F_Bytes_defaulted = 401 [default="Bignose"]; optional sint32 F_Sint32_defaulted = 402 [default = -32]; optional sint64 F_Sint64_defaulted = 403 [default = -64]; + optional sfixed32 F_Sfixed32_defaulted = 404 [default = -32]; + optional sfixed64 F_Sfixed64_defaulted = 405 [default = -64]; // Packed repeated fields (no string or bytes). repeated bool F_Bool_repeated_packed = 50 [packed=true]; @@ -154,6 +164,8 @@ message GoTest { repeated double F_Double_repeated_packed = 58 [packed=true]; repeated sint32 F_Sint32_repeated_packed = 502 [packed=true]; repeated sint64 F_Sint64_repeated_packed = 503 [packed=true]; + repeated sfixed32 F_Sfixed32_repeated_packed = 504 [packed=true]; + repeated sfixed64 F_Sfixed64_repeated_packed = 505 [packed=true]; // Required, repeated, and optional groups. required group RequiredGroup = 70 { @@ -285,10 +297,12 @@ message Ext { } optional string data = 1; + map map_field = 2; } extend MyMessage { repeated string greeting = 106; + // leave field 200 unregistered for testing } message ComplexExtension { @@ -342,7 +356,7 @@ extend DefaultsMessage { optional sfixed32 default_sfixed32 = 211 [default = 50]; optional sfixed64 default_sfixed64 = 212 [default = 51]; optional bool default_bool = 213 [default = true]; - optional string default_string = 214 [default = "Hello, string"]; + optional string default_string = 214 [default = "Hello, string,def=foo"]; optional bytes default_bytes = 215 [default = "Hello, bytes"]; optional DefaultsMessage.DefaultsEnum default_enum = 216 [default = ONE]; } diff --git a/vendor/github.com/golang/protobuf/proto/testdata/Makefile b/vendor/github.com/golang/protobuf/proto/testdata/Makefile deleted file mode 100644 index fc288628..00000000 --- a/vendor/github.com/golang/protobuf/proto/testdata/Makefile +++ /dev/null @@ -1,50 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -include ../../Make.protobuf - -all: regenerate - -regenerate: - rm -f test.pb.go - make test.pb.go - -# The following rules are just aids to development. Not needed for typical testing. - -diff: regenerate - git diff test.pb.go - -restore: - cp test.pb.go.golden test.pb.go - -preserve: - cp test.pb.go test.pb.go.golden diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go b/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go deleted file mode 100644 index e980d1a0..00000000 --- a/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go +++ /dev/null @@ -1,4147 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: test.proto - -/* -Package testdata is a generated protocol buffer package. - -It is generated from these files: - test.proto - -It has these top-level messages: - GoEnum - GoTestField - GoTest - GoTestRequiredGroupField - GoSkipTest - NonPackedTest - PackedTest - MaxTag - OldMessage - NewMessage - InnerMessage - OtherMessage - RequiredInnerMessage - MyMessage - Ext - ComplexExtension - DefaultsMessage - MyMessageSet - Empty - MessageList - Strings - Defaults - SubDefaults - RepeatedEnum - MoreRepeated - GroupOld - GroupNew - FloatingPoint - MessageWithMap - Oneof - Communique -*/ -package testdata - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FOO int32 - -const ( - FOO_FOO1 FOO = 1 -) - -var FOO_name = map[int32]string{ - 1: "FOO1", -} -var FOO_value = map[string]int32{ - "FOO1": 1, -} - -func (x FOO) Enum() *FOO { - p := new(FOO) - *p = x - return p -} -func (x FOO) String() string { - return proto.EnumName(FOO_name, int32(x)) -} -func (x *FOO) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FOO_value, data, "FOO") - if err != nil { - return err - } - *x = FOO(value) - return nil -} -func (FOO) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -// An enum, for completeness. -type GoTest_KIND int32 - -const ( - GoTest_VOID GoTest_KIND = 0 - // Basic types - GoTest_BOOL GoTest_KIND = 1 - GoTest_BYTES GoTest_KIND = 2 - GoTest_FINGERPRINT GoTest_KIND = 3 - GoTest_FLOAT GoTest_KIND = 4 - GoTest_INT GoTest_KIND = 5 - GoTest_STRING GoTest_KIND = 6 - GoTest_TIME GoTest_KIND = 7 - // Groupings - GoTest_TUPLE GoTest_KIND = 8 - GoTest_ARRAY GoTest_KIND = 9 - GoTest_MAP GoTest_KIND = 10 - // Table types - GoTest_TABLE GoTest_KIND = 11 - // Functions - GoTest_FUNCTION GoTest_KIND = 12 -) - -var GoTest_KIND_name = map[int32]string{ - 0: "VOID", - 1: "BOOL", - 2: "BYTES", - 3: "FINGERPRINT", - 4: "FLOAT", - 5: "INT", - 6: "STRING", - 7: "TIME", - 8: "TUPLE", - 9: "ARRAY", - 10: "MAP", - 11: "TABLE", - 12: "FUNCTION", -} -var GoTest_KIND_value = map[string]int32{ - "VOID": 0, - "BOOL": 1, - "BYTES": 2, - "FINGERPRINT": 3, - "FLOAT": 4, - "INT": 5, - "STRING": 6, - "TIME": 7, - "TUPLE": 8, - "ARRAY": 9, - "MAP": 10, - "TABLE": 11, - "FUNCTION": 12, -} - -func (x GoTest_KIND) Enum() *GoTest_KIND { - p := new(GoTest_KIND) - *p = x - return p -} -func (x GoTest_KIND) String() string { - return proto.EnumName(GoTest_KIND_name, int32(x)) -} -func (x *GoTest_KIND) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(GoTest_KIND_value, data, "GoTest_KIND") - if err != nil { - return err - } - *x = GoTest_KIND(value) - return nil -} -func (GoTest_KIND) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -type MyMessage_Color int32 - -const ( - MyMessage_RED MyMessage_Color = 0 - MyMessage_GREEN MyMessage_Color = 1 - MyMessage_BLUE MyMessage_Color = 2 -) - -var MyMessage_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var MyMessage_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x MyMessage_Color) Enum() *MyMessage_Color { - p := new(MyMessage_Color) - *p = x - return p -} -func (x MyMessage_Color) String() string { - return proto.EnumName(MyMessage_Color_name, int32(x)) -} -func (x *MyMessage_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(MyMessage_Color_value, data, "MyMessage_Color") - if err != nil { - return err - } - *x = MyMessage_Color(value) - return nil -} -func (MyMessage_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } - -type DefaultsMessage_DefaultsEnum int32 - -const ( - DefaultsMessage_ZERO DefaultsMessage_DefaultsEnum = 0 - DefaultsMessage_ONE DefaultsMessage_DefaultsEnum = 1 - DefaultsMessage_TWO DefaultsMessage_DefaultsEnum = 2 -) - -var DefaultsMessage_DefaultsEnum_name = map[int32]string{ - 0: "ZERO", - 1: "ONE", - 2: "TWO", -} -var DefaultsMessage_DefaultsEnum_value = map[string]int32{ - "ZERO": 0, - "ONE": 1, - "TWO": 2, -} - -func (x DefaultsMessage_DefaultsEnum) Enum() *DefaultsMessage_DefaultsEnum { - p := new(DefaultsMessage_DefaultsEnum) - *p = x - return p -} -func (x DefaultsMessage_DefaultsEnum) String() string { - return proto.EnumName(DefaultsMessage_DefaultsEnum_name, int32(x)) -} -func (x *DefaultsMessage_DefaultsEnum) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(DefaultsMessage_DefaultsEnum_value, data, "DefaultsMessage_DefaultsEnum") - if err != nil { - return err - } - *x = DefaultsMessage_DefaultsEnum(value) - return nil -} -func (DefaultsMessage_DefaultsEnum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{16, 0} -} - -type Defaults_Color int32 - -const ( - Defaults_RED Defaults_Color = 0 - Defaults_GREEN Defaults_Color = 1 - Defaults_BLUE Defaults_Color = 2 -) - -var Defaults_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var Defaults_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x Defaults_Color) Enum() *Defaults_Color { - p := new(Defaults_Color) - *p = x - return p -} -func (x Defaults_Color) String() string { - return proto.EnumName(Defaults_Color_name, int32(x)) -} -func (x *Defaults_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Defaults_Color_value, data, "Defaults_Color") - if err != nil { - return err - } - *x = Defaults_Color(value) - return nil -} -func (Defaults_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{21, 0} } - -type RepeatedEnum_Color int32 - -const ( - RepeatedEnum_RED RepeatedEnum_Color = 1 -) - -var RepeatedEnum_Color_name = map[int32]string{ - 1: "RED", -} -var RepeatedEnum_Color_value = map[string]int32{ - "RED": 1, -} - -func (x RepeatedEnum_Color) Enum() *RepeatedEnum_Color { - p := new(RepeatedEnum_Color) - *p = x - return p -} -func (x RepeatedEnum_Color) String() string { - return proto.EnumName(RepeatedEnum_Color_name, int32(x)) -} -func (x *RepeatedEnum_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(RepeatedEnum_Color_value, data, "RepeatedEnum_Color") - if err != nil { - return err - } - *x = RepeatedEnum_Color(value) - return nil -} -func (RepeatedEnum_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{23, 0} } - -type GoEnum struct { - Foo *FOO `protobuf:"varint,1,req,name=foo,enum=testdata.FOO" json:"foo,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoEnum) Reset() { *m = GoEnum{} } -func (m *GoEnum) String() string { return proto.CompactTextString(m) } -func (*GoEnum) ProtoMessage() {} -func (*GoEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *GoEnum) GetFoo() FOO { - if m != nil && m.Foo != nil { - return *m.Foo - } - return FOO_FOO1 -} - -type GoTestField struct { - Label *string `protobuf:"bytes,1,req,name=Label" json:"Label,omitempty"` - Type *string `protobuf:"bytes,2,req,name=Type" json:"Type,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestField) Reset() { *m = GoTestField{} } -func (m *GoTestField) String() string { return proto.CompactTextString(m) } -func (*GoTestField) ProtoMessage() {} -func (*GoTestField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *GoTestField) GetLabel() string { - if m != nil && m.Label != nil { - return *m.Label - } - return "" -} - -func (m *GoTestField) GetType() string { - if m != nil && m.Type != nil { - return *m.Type - } - return "" -} - -type GoTest struct { - // Some typical parameters - Kind *GoTest_KIND `protobuf:"varint,1,req,name=Kind,enum=testdata.GoTest_KIND" json:"Kind,omitempty"` - Table *string `protobuf:"bytes,2,opt,name=Table" json:"Table,omitempty"` - Param *int32 `protobuf:"varint,3,opt,name=Param" json:"Param,omitempty"` - // Required, repeated and optional foreign fields. - RequiredField *GoTestField `protobuf:"bytes,4,req,name=RequiredField" json:"RequiredField,omitempty"` - RepeatedField []*GoTestField `protobuf:"bytes,5,rep,name=RepeatedField" json:"RepeatedField,omitempty"` - OptionalField *GoTestField `protobuf:"bytes,6,opt,name=OptionalField" json:"OptionalField,omitempty"` - // Required fields of all basic types - F_BoolRequired *bool `protobuf:"varint,10,req,name=F_Bool_required,json=FBoolRequired" json:"F_Bool_required,omitempty"` - F_Int32Required *int32 `protobuf:"varint,11,req,name=F_Int32_required,json=FInt32Required" json:"F_Int32_required,omitempty"` - F_Int64Required *int64 `protobuf:"varint,12,req,name=F_Int64_required,json=FInt64Required" json:"F_Int64_required,omitempty"` - F_Fixed32Required *uint32 `protobuf:"fixed32,13,req,name=F_Fixed32_required,json=FFixed32Required" json:"F_Fixed32_required,omitempty"` - F_Fixed64Required *uint64 `protobuf:"fixed64,14,req,name=F_Fixed64_required,json=FFixed64Required" json:"F_Fixed64_required,omitempty"` - F_Uint32Required *uint32 `protobuf:"varint,15,req,name=F_Uint32_required,json=FUint32Required" json:"F_Uint32_required,omitempty"` - F_Uint64Required *uint64 `protobuf:"varint,16,req,name=F_Uint64_required,json=FUint64Required" json:"F_Uint64_required,omitempty"` - F_FloatRequired *float32 `protobuf:"fixed32,17,req,name=F_Float_required,json=FFloatRequired" json:"F_Float_required,omitempty"` - F_DoubleRequired *float64 `protobuf:"fixed64,18,req,name=F_Double_required,json=FDoubleRequired" json:"F_Double_required,omitempty"` - F_StringRequired *string `protobuf:"bytes,19,req,name=F_String_required,json=FStringRequired" json:"F_String_required,omitempty"` - F_BytesRequired []byte `protobuf:"bytes,101,req,name=F_Bytes_required,json=FBytesRequired" json:"F_Bytes_required,omitempty"` - F_Sint32Required *int32 `protobuf:"zigzag32,102,req,name=F_Sint32_required,json=FSint32Required" json:"F_Sint32_required,omitempty"` - F_Sint64Required *int64 `protobuf:"zigzag64,103,req,name=F_Sint64_required,json=FSint64Required" json:"F_Sint64_required,omitempty"` - // Repeated fields of all basic types - F_BoolRepeated []bool `protobuf:"varint,20,rep,name=F_Bool_repeated,json=FBoolRepeated" json:"F_Bool_repeated,omitempty"` - F_Int32Repeated []int32 `protobuf:"varint,21,rep,name=F_Int32_repeated,json=FInt32Repeated" json:"F_Int32_repeated,omitempty"` - F_Int64Repeated []int64 `protobuf:"varint,22,rep,name=F_Int64_repeated,json=FInt64Repeated" json:"F_Int64_repeated,omitempty"` - F_Fixed32Repeated []uint32 `protobuf:"fixed32,23,rep,name=F_Fixed32_repeated,json=FFixed32Repeated" json:"F_Fixed32_repeated,omitempty"` - F_Fixed64Repeated []uint64 `protobuf:"fixed64,24,rep,name=F_Fixed64_repeated,json=FFixed64Repeated" json:"F_Fixed64_repeated,omitempty"` - F_Uint32Repeated []uint32 `protobuf:"varint,25,rep,name=F_Uint32_repeated,json=FUint32Repeated" json:"F_Uint32_repeated,omitempty"` - F_Uint64Repeated []uint64 `protobuf:"varint,26,rep,name=F_Uint64_repeated,json=FUint64Repeated" json:"F_Uint64_repeated,omitempty"` - F_FloatRepeated []float32 `protobuf:"fixed32,27,rep,name=F_Float_repeated,json=FFloatRepeated" json:"F_Float_repeated,omitempty"` - F_DoubleRepeated []float64 `protobuf:"fixed64,28,rep,name=F_Double_repeated,json=FDoubleRepeated" json:"F_Double_repeated,omitempty"` - F_StringRepeated []string `protobuf:"bytes,29,rep,name=F_String_repeated,json=FStringRepeated" json:"F_String_repeated,omitempty"` - F_BytesRepeated [][]byte `protobuf:"bytes,201,rep,name=F_Bytes_repeated,json=FBytesRepeated" json:"F_Bytes_repeated,omitempty"` - F_Sint32Repeated []int32 `protobuf:"zigzag32,202,rep,name=F_Sint32_repeated,json=FSint32Repeated" json:"F_Sint32_repeated,omitempty"` - F_Sint64Repeated []int64 `protobuf:"zigzag64,203,rep,name=F_Sint64_repeated,json=FSint64Repeated" json:"F_Sint64_repeated,omitempty"` - // Optional fields of all basic types - F_BoolOptional *bool `protobuf:"varint,30,opt,name=F_Bool_optional,json=FBoolOptional" json:"F_Bool_optional,omitempty"` - F_Int32Optional *int32 `protobuf:"varint,31,opt,name=F_Int32_optional,json=FInt32Optional" json:"F_Int32_optional,omitempty"` - F_Int64Optional *int64 `protobuf:"varint,32,opt,name=F_Int64_optional,json=FInt64Optional" json:"F_Int64_optional,omitempty"` - F_Fixed32Optional *uint32 `protobuf:"fixed32,33,opt,name=F_Fixed32_optional,json=FFixed32Optional" json:"F_Fixed32_optional,omitempty"` - F_Fixed64Optional *uint64 `protobuf:"fixed64,34,opt,name=F_Fixed64_optional,json=FFixed64Optional" json:"F_Fixed64_optional,omitempty"` - F_Uint32Optional *uint32 `protobuf:"varint,35,opt,name=F_Uint32_optional,json=FUint32Optional" json:"F_Uint32_optional,omitempty"` - F_Uint64Optional *uint64 `protobuf:"varint,36,opt,name=F_Uint64_optional,json=FUint64Optional" json:"F_Uint64_optional,omitempty"` - F_FloatOptional *float32 `protobuf:"fixed32,37,opt,name=F_Float_optional,json=FFloatOptional" json:"F_Float_optional,omitempty"` - F_DoubleOptional *float64 `protobuf:"fixed64,38,opt,name=F_Double_optional,json=FDoubleOptional" json:"F_Double_optional,omitempty"` - F_StringOptional *string `protobuf:"bytes,39,opt,name=F_String_optional,json=FStringOptional" json:"F_String_optional,omitempty"` - F_BytesOptional []byte `protobuf:"bytes,301,opt,name=F_Bytes_optional,json=FBytesOptional" json:"F_Bytes_optional,omitempty"` - F_Sint32Optional *int32 `protobuf:"zigzag32,302,opt,name=F_Sint32_optional,json=FSint32Optional" json:"F_Sint32_optional,omitempty"` - F_Sint64Optional *int64 `protobuf:"zigzag64,303,opt,name=F_Sint64_optional,json=FSint64Optional" json:"F_Sint64_optional,omitempty"` - // Default-valued fields of all basic types - F_BoolDefaulted *bool `protobuf:"varint,40,opt,name=F_Bool_defaulted,json=FBoolDefaulted,def=1" json:"F_Bool_defaulted,omitempty"` - F_Int32Defaulted *int32 `protobuf:"varint,41,opt,name=F_Int32_defaulted,json=FInt32Defaulted,def=32" json:"F_Int32_defaulted,omitempty"` - F_Int64Defaulted *int64 `protobuf:"varint,42,opt,name=F_Int64_defaulted,json=FInt64Defaulted,def=64" json:"F_Int64_defaulted,omitempty"` - F_Fixed32Defaulted *uint32 `protobuf:"fixed32,43,opt,name=F_Fixed32_defaulted,json=FFixed32Defaulted,def=320" json:"F_Fixed32_defaulted,omitempty"` - F_Fixed64Defaulted *uint64 `protobuf:"fixed64,44,opt,name=F_Fixed64_defaulted,json=FFixed64Defaulted,def=640" json:"F_Fixed64_defaulted,omitempty"` - F_Uint32Defaulted *uint32 `protobuf:"varint,45,opt,name=F_Uint32_defaulted,json=FUint32Defaulted,def=3200" json:"F_Uint32_defaulted,omitempty"` - F_Uint64Defaulted *uint64 `protobuf:"varint,46,opt,name=F_Uint64_defaulted,json=FUint64Defaulted,def=6400" json:"F_Uint64_defaulted,omitempty"` - F_FloatDefaulted *float32 `protobuf:"fixed32,47,opt,name=F_Float_defaulted,json=FFloatDefaulted,def=314159" json:"F_Float_defaulted,omitempty"` - F_DoubleDefaulted *float64 `protobuf:"fixed64,48,opt,name=F_Double_defaulted,json=FDoubleDefaulted,def=271828" json:"F_Double_defaulted,omitempty"` - F_StringDefaulted *string `protobuf:"bytes,49,opt,name=F_String_defaulted,json=FStringDefaulted,def=hello, \"world!\"\n" json:"F_String_defaulted,omitempty"` - F_BytesDefaulted []byte `protobuf:"bytes,401,opt,name=F_Bytes_defaulted,json=FBytesDefaulted,def=Bignose" json:"F_Bytes_defaulted,omitempty"` - F_Sint32Defaulted *int32 `protobuf:"zigzag32,402,opt,name=F_Sint32_defaulted,json=FSint32Defaulted,def=-32" json:"F_Sint32_defaulted,omitempty"` - F_Sint64Defaulted *int64 `protobuf:"zigzag64,403,opt,name=F_Sint64_defaulted,json=FSint64Defaulted,def=-64" json:"F_Sint64_defaulted,omitempty"` - // Packed repeated fields (no string or bytes). - F_BoolRepeatedPacked []bool `protobuf:"varint,50,rep,packed,name=F_Bool_repeated_packed,json=FBoolRepeatedPacked" json:"F_Bool_repeated_packed,omitempty"` - F_Int32RepeatedPacked []int32 `protobuf:"varint,51,rep,packed,name=F_Int32_repeated_packed,json=FInt32RepeatedPacked" json:"F_Int32_repeated_packed,omitempty"` - F_Int64RepeatedPacked []int64 `protobuf:"varint,52,rep,packed,name=F_Int64_repeated_packed,json=FInt64RepeatedPacked" json:"F_Int64_repeated_packed,omitempty"` - F_Fixed32RepeatedPacked []uint32 `protobuf:"fixed32,53,rep,packed,name=F_Fixed32_repeated_packed,json=FFixed32RepeatedPacked" json:"F_Fixed32_repeated_packed,omitempty"` - F_Fixed64RepeatedPacked []uint64 `protobuf:"fixed64,54,rep,packed,name=F_Fixed64_repeated_packed,json=FFixed64RepeatedPacked" json:"F_Fixed64_repeated_packed,omitempty"` - F_Uint32RepeatedPacked []uint32 `protobuf:"varint,55,rep,packed,name=F_Uint32_repeated_packed,json=FUint32RepeatedPacked" json:"F_Uint32_repeated_packed,omitempty"` - F_Uint64RepeatedPacked []uint64 `protobuf:"varint,56,rep,packed,name=F_Uint64_repeated_packed,json=FUint64RepeatedPacked" json:"F_Uint64_repeated_packed,omitempty"` - F_FloatRepeatedPacked []float32 `protobuf:"fixed32,57,rep,packed,name=F_Float_repeated_packed,json=FFloatRepeatedPacked" json:"F_Float_repeated_packed,omitempty"` - F_DoubleRepeatedPacked []float64 `protobuf:"fixed64,58,rep,packed,name=F_Double_repeated_packed,json=FDoubleRepeatedPacked" json:"F_Double_repeated_packed,omitempty"` - F_Sint32RepeatedPacked []int32 `protobuf:"zigzag32,502,rep,packed,name=F_Sint32_repeated_packed,json=FSint32RepeatedPacked" json:"F_Sint32_repeated_packed,omitempty"` - F_Sint64RepeatedPacked []int64 `protobuf:"zigzag64,503,rep,packed,name=F_Sint64_repeated_packed,json=FSint64RepeatedPacked" json:"F_Sint64_repeated_packed,omitempty"` - Requiredgroup *GoTest_RequiredGroup `protobuf:"group,70,req,name=RequiredGroup,json=requiredgroup" json:"requiredgroup,omitempty"` - Repeatedgroup []*GoTest_RepeatedGroup `protobuf:"group,80,rep,name=RepeatedGroup,json=repeatedgroup" json:"repeatedgroup,omitempty"` - Optionalgroup *GoTest_OptionalGroup `protobuf:"group,90,opt,name=OptionalGroup,json=optionalgroup" json:"optionalgroup,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest) Reset() { *m = GoTest{} } -func (m *GoTest) String() string { return proto.CompactTextString(m) } -func (*GoTest) ProtoMessage() {} -func (*GoTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -const Default_GoTest_F_BoolDefaulted bool = true -const Default_GoTest_F_Int32Defaulted int32 = 32 -const Default_GoTest_F_Int64Defaulted int64 = 64 -const Default_GoTest_F_Fixed32Defaulted uint32 = 320 -const Default_GoTest_F_Fixed64Defaulted uint64 = 640 -const Default_GoTest_F_Uint32Defaulted uint32 = 3200 -const Default_GoTest_F_Uint64Defaulted uint64 = 6400 -const Default_GoTest_F_FloatDefaulted float32 = 314159 -const Default_GoTest_F_DoubleDefaulted float64 = 271828 -const Default_GoTest_F_StringDefaulted string = "hello, \"world!\"\n" - -var Default_GoTest_F_BytesDefaulted []byte = []byte("Bignose") - -const Default_GoTest_F_Sint32Defaulted int32 = -32 -const Default_GoTest_F_Sint64Defaulted int64 = -64 - -func (m *GoTest) GetKind() GoTest_KIND { - if m != nil && m.Kind != nil { - return *m.Kind - } - return GoTest_VOID -} - -func (m *GoTest) GetTable() string { - if m != nil && m.Table != nil { - return *m.Table - } - return "" -} - -func (m *GoTest) GetParam() int32 { - if m != nil && m.Param != nil { - return *m.Param - } - return 0 -} - -func (m *GoTest) GetRequiredField() *GoTestField { - if m != nil { - return m.RequiredField - } - return nil -} - -func (m *GoTest) GetRepeatedField() []*GoTestField { - if m != nil { - return m.RepeatedField - } - return nil -} - -func (m *GoTest) GetOptionalField() *GoTestField { - if m != nil { - return m.OptionalField - } - return nil -} - -func (m *GoTest) GetF_BoolRequired() bool { - if m != nil && m.F_BoolRequired != nil { - return *m.F_BoolRequired - } - return false -} - -func (m *GoTest) GetF_Int32Required() int32 { - if m != nil && m.F_Int32Required != nil { - return *m.F_Int32Required - } - return 0 -} - -func (m *GoTest) GetF_Int64Required() int64 { - if m != nil && m.F_Int64Required != nil { - return *m.F_Int64Required - } - return 0 -} - -func (m *GoTest) GetF_Fixed32Required() uint32 { - if m != nil && m.F_Fixed32Required != nil { - return *m.F_Fixed32Required - } - return 0 -} - -func (m *GoTest) GetF_Fixed64Required() uint64 { - if m != nil && m.F_Fixed64Required != nil { - return *m.F_Fixed64Required - } - return 0 -} - -func (m *GoTest) GetF_Uint32Required() uint32 { - if m != nil && m.F_Uint32Required != nil { - return *m.F_Uint32Required - } - return 0 -} - -func (m *GoTest) GetF_Uint64Required() uint64 { - if m != nil && m.F_Uint64Required != nil { - return *m.F_Uint64Required - } - return 0 -} - -func (m *GoTest) GetF_FloatRequired() float32 { - if m != nil && m.F_FloatRequired != nil { - return *m.F_FloatRequired - } - return 0 -} - -func (m *GoTest) GetF_DoubleRequired() float64 { - if m != nil && m.F_DoubleRequired != nil { - return *m.F_DoubleRequired - } - return 0 -} - -func (m *GoTest) GetF_StringRequired() string { - if m != nil && m.F_StringRequired != nil { - return *m.F_StringRequired - } - return "" -} - -func (m *GoTest) GetF_BytesRequired() []byte { - if m != nil { - return m.F_BytesRequired - } - return nil -} - -func (m *GoTest) GetF_Sint32Required() int32 { - if m != nil && m.F_Sint32Required != nil { - return *m.F_Sint32Required - } - return 0 -} - -func (m *GoTest) GetF_Sint64Required() int64 { - if m != nil && m.F_Sint64Required != nil { - return *m.F_Sint64Required - } - return 0 -} - -func (m *GoTest) GetF_BoolRepeated() []bool { - if m != nil { - return m.F_BoolRepeated - } - return nil -} - -func (m *GoTest) GetF_Int32Repeated() []int32 { - if m != nil { - return m.F_Int32Repeated - } - return nil -} - -func (m *GoTest) GetF_Int64Repeated() []int64 { - if m != nil { - return m.F_Int64Repeated - } - return nil -} - -func (m *GoTest) GetF_Fixed32Repeated() []uint32 { - if m != nil { - return m.F_Fixed32Repeated - } - return nil -} - -func (m *GoTest) GetF_Fixed64Repeated() []uint64 { - if m != nil { - return m.F_Fixed64Repeated - } - return nil -} - -func (m *GoTest) GetF_Uint32Repeated() []uint32 { - if m != nil { - return m.F_Uint32Repeated - } - return nil -} - -func (m *GoTest) GetF_Uint64Repeated() []uint64 { - if m != nil { - return m.F_Uint64Repeated - } - return nil -} - -func (m *GoTest) GetF_FloatRepeated() []float32 { - if m != nil { - return m.F_FloatRepeated - } - return nil -} - -func (m *GoTest) GetF_DoubleRepeated() []float64 { - if m != nil { - return m.F_DoubleRepeated - } - return nil -} - -func (m *GoTest) GetF_StringRepeated() []string { - if m != nil { - return m.F_StringRepeated - } - return nil -} - -func (m *GoTest) GetF_BytesRepeated() [][]byte { - if m != nil { - return m.F_BytesRepeated - } - return nil -} - -func (m *GoTest) GetF_Sint32Repeated() []int32 { - if m != nil { - return m.F_Sint32Repeated - } - return nil -} - -func (m *GoTest) GetF_Sint64Repeated() []int64 { - if m != nil { - return m.F_Sint64Repeated - } - return nil -} - -func (m *GoTest) GetF_BoolOptional() bool { - if m != nil && m.F_BoolOptional != nil { - return *m.F_BoolOptional - } - return false -} - -func (m *GoTest) GetF_Int32Optional() int32 { - if m != nil && m.F_Int32Optional != nil { - return *m.F_Int32Optional - } - return 0 -} - -func (m *GoTest) GetF_Int64Optional() int64 { - if m != nil && m.F_Int64Optional != nil { - return *m.F_Int64Optional - } - return 0 -} - -func (m *GoTest) GetF_Fixed32Optional() uint32 { - if m != nil && m.F_Fixed32Optional != nil { - return *m.F_Fixed32Optional - } - return 0 -} - -func (m *GoTest) GetF_Fixed64Optional() uint64 { - if m != nil && m.F_Fixed64Optional != nil { - return *m.F_Fixed64Optional - } - return 0 -} - -func (m *GoTest) GetF_Uint32Optional() uint32 { - if m != nil && m.F_Uint32Optional != nil { - return *m.F_Uint32Optional - } - return 0 -} - -func (m *GoTest) GetF_Uint64Optional() uint64 { - if m != nil && m.F_Uint64Optional != nil { - return *m.F_Uint64Optional - } - return 0 -} - -func (m *GoTest) GetF_FloatOptional() float32 { - if m != nil && m.F_FloatOptional != nil { - return *m.F_FloatOptional - } - return 0 -} - -func (m *GoTest) GetF_DoubleOptional() float64 { - if m != nil && m.F_DoubleOptional != nil { - return *m.F_DoubleOptional - } - return 0 -} - -func (m *GoTest) GetF_StringOptional() string { - if m != nil && m.F_StringOptional != nil { - return *m.F_StringOptional - } - return "" -} - -func (m *GoTest) GetF_BytesOptional() []byte { - if m != nil { - return m.F_BytesOptional - } - return nil -} - -func (m *GoTest) GetF_Sint32Optional() int32 { - if m != nil && m.F_Sint32Optional != nil { - return *m.F_Sint32Optional - } - return 0 -} - -func (m *GoTest) GetF_Sint64Optional() int64 { - if m != nil && m.F_Sint64Optional != nil { - return *m.F_Sint64Optional - } - return 0 -} - -func (m *GoTest) GetF_BoolDefaulted() bool { - if m != nil && m.F_BoolDefaulted != nil { - return *m.F_BoolDefaulted - } - return Default_GoTest_F_BoolDefaulted -} - -func (m *GoTest) GetF_Int32Defaulted() int32 { - if m != nil && m.F_Int32Defaulted != nil { - return *m.F_Int32Defaulted - } - return Default_GoTest_F_Int32Defaulted -} - -func (m *GoTest) GetF_Int64Defaulted() int64 { - if m != nil && m.F_Int64Defaulted != nil { - return *m.F_Int64Defaulted - } - return Default_GoTest_F_Int64Defaulted -} - -func (m *GoTest) GetF_Fixed32Defaulted() uint32 { - if m != nil && m.F_Fixed32Defaulted != nil { - return *m.F_Fixed32Defaulted - } - return Default_GoTest_F_Fixed32Defaulted -} - -func (m *GoTest) GetF_Fixed64Defaulted() uint64 { - if m != nil && m.F_Fixed64Defaulted != nil { - return *m.F_Fixed64Defaulted - } - return Default_GoTest_F_Fixed64Defaulted -} - -func (m *GoTest) GetF_Uint32Defaulted() uint32 { - if m != nil && m.F_Uint32Defaulted != nil { - return *m.F_Uint32Defaulted - } - return Default_GoTest_F_Uint32Defaulted -} - -func (m *GoTest) GetF_Uint64Defaulted() uint64 { - if m != nil && m.F_Uint64Defaulted != nil { - return *m.F_Uint64Defaulted - } - return Default_GoTest_F_Uint64Defaulted -} - -func (m *GoTest) GetF_FloatDefaulted() float32 { - if m != nil && m.F_FloatDefaulted != nil { - return *m.F_FloatDefaulted - } - return Default_GoTest_F_FloatDefaulted -} - -func (m *GoTest) GetF_DoubleDefaulted() float64 { - if m != nil && m.F_DoubleDefaulted != nil { - return *m.F_DoubleDefaulted - } - return Default_GoTest_F_DoubleDefaulted -} - -func (m *GoTest) GetF_StringDefaulted() string { - if m != nil && m.F_StringDefaulted != nil { - return *m.F_StringDefaulted - } - return Default_GoTest_F_StringDefaulted -} - -func (m *GoTest) GetF_BytesDefaulted() []byte { - if m != nil && m.F_BytesDefaulted != nil { - return m.F_BytesDefaulted - } - return append([]byte(nil), Default_GoTest_F_BytesDefaulted...) -} - -func (m *GoTest) GetF_Sint32Defaulted() int32 { - if m != nil && m.F_Sint32Defaulted != nil { - return *m.F_Sint32Defaulted - } - return Default_GoTest_F_Sint32Defaulted -} - -func (m *GoTest) GetF_Sint64Defaulted() int64 { - if m != nil && m.F_Sint64Defaulted != nil { - return *m.F_Sint64Defaulted - } - return Default_GoTest_F_Sint64Defaulted -} - -func (m *GoTest) GetF_BoolRepeatedPacked() []bool { - if m != nil { - return m.F_BoolRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Int32RepeatedPacked() []int32 { - if m != nil { - return m.F_Int32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Int64RepeatedPacked() []int64 { - if m != nil { - return m.F_Int64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Fixed32RepeatedPacked() []uint32 { - if m != nil { - return m.F_Fixed32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Fixed64RepeatedPacked() []uint64 { - if m != nil { - return m.F_Fixed64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Uint32RepeatedPacked() []uint32 { - if m != nil { - return m.F_Uint32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Uint64RepeatedPacked() []uint64 { - if m != nil { - return m.F_Uint64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_FloatRepeatedPacked() []float32 { - if m != nil { - return m.F_FloatRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_DoubleRepeatedPacked() []float64 { - if m != nil { - return m.F_DoubleRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Sint32RepeatedPacked() []int32 { - if m != nil { - return m.F_Sint32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Sint64RepeatedPacked() []int64 { - if m != nil { - return m.F_Sint64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetRequiredgroup() *GoTest_RequiredGroup { - if m != nil { - return m.Requiredgroup - } - return nil -} - -func (m *GoTest) GetRepeatedgroup() []*GoTest_RepeatedGroup { - if m != nil { - return m.Repeatedgroup - } - return nil -} - -func (m *GoTest) GetOptionalgroup() *GoTest_OptionalGroup { - if m != nil { - return m.Optionalgroup - } - return nil -} - -// Required, repeated, and optional groups. -type GoTest_RequiredGroup struct { - RequiredField *string `protobuf:"bytes,71,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_RequiredGroup) Reset() { *m = GoTest_RequiredGroup{} } -func (m *GoTest_RequiredGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_RequiredGroup) ProtoMessage() {} -func (*GoTest_RequiredGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -func (m *GoTest_RequiredGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -type GoTest_RepeatedGroup struct { - RequiredField *string `protobuf:"bytes,81,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_RepeatedGroup) Reset() { *m = GoTest_RepeatedGroup{} } -func (m *GoTest_RepeatedGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_RepeatedGroup) ProtoMessage() {} -func (*GoTest_RepeatedGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 1} } - -func (m *GoTest_RepeatedGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -type GoTest_OptionalGroup struct { - RequiredField *string `protobuf:"bytes,91,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_OptionalGroup) Reset() { *m = GoTest_OptionalGroup{} } -func (m *GoTest_OptionalGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_OptionalGroup) ProtoMessage() {} -func (*GoTest_OptionalGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 2} } - -func (m *GoTest_OptionalGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -// For testing a group containing a required field. -type GoTestRequiredGroupField struct { - Group *GoTestRequiredGroupField_Group `protobuf:"group,1,req,name=Group,json=group" json:"group,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestRequiredGroupField) Reset() { *m = GoTestRequiredGroupField{} } -func (m *GoTestRequiredGroupField) String() string { return proto.CompactTextString(m) } -func (*GoTestRequiredGroupField) ProtoMessage() {} -func (*GoTestRequiredGroupField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *GoTestRequiredGroupField) GetGroup() *GoTestRequiredGroupField_Group { - if m != nil { - return m.Group - } - return nil -} - -type GoTestRequiredGroupField_Group struct { - Field *int32 `protobuf:"varint,2,req,name=Field" json:"Field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestRequiredGroupField_Group) Reset() { *m = GoTestRequiredGroupField_Group{} } -func (m *GoTestRequiredGroupField_Group) String() string { return proto.CompactTextString(m) } -func (*GoTestRequiredGroupField_Group) ProtoMessage() {} -func (*GoTestRequiredGroupField_Group) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{3, 0} -} - -func (m *GoTestRequiredGroupField_Group) GetField() int32 { - if m != nil && m.Field != nil { - return *m.Field - } - return 0 -} - -// For testing skipping of unrecognized fields. -// Numbers are all big, larger than tag numbers in GoTestField, -// the message used in the corresponding test. -type GoSkipTest struct { - SkipInt32 *int32 `protobuf:"varint,11,req,name=skip_int32,json=skipInt32" json:"skip_int32,omitempty"` - SkipFixed32 *uint32 `protobuf:"fixed32,12,req,name=skip_fixed32,json=skipFixed32" json:"skip_fixed32,omitempty"` - SkipFixed64 *uint64 `protobuf:"fixed64,13,req,name=skip_fixed64,json=skipFixed64" json:"skip_fixed64,omitempty"` - SkipString *string `protobuf:"bytes,14,req,name=skip_string,json=skipString" json:"skip_string,omitempty"` - Skipgroup *GoSkipTest_SkipGroup `protobuf:"group,15,req,name=SkipGroup,json=skipgroup" json:"skipgroup,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoSkipTest) Reset() { *m = GoSkipTest{} } -func (m *GoSkipTest) String() string { return proto.CompactTextString(m) } -func (*GoSkipTest) ProtoMessage() {} -func (*GoSkipTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } - -func (m *GoSkipTest) GetSkipInt32() int32 { - if m != nil && m.SkipInt32 != nil { - return *m.SkipInt32 - } - return 0 -} - -func (m *GoSkipTest) GetSkipFixed32() uint32 { - if m != nil && m.SkipFixed32 != nil { - return *m.SkipFixed32 - } - return 0 -} - -func (m *GoSkipTest) GetSkipFixed64() uint64 { - if m != nil && m.SkipFixed64 != nil { - return *m.SkipFixed64 - } - return 0 -} - -func (m *GoSkipTest) GetSkipString() string { - if m != nil && m.SkipString != nil { - return *m.SkipString - } - return "" -} - -func (m *GoSkipTest) GetSkipgroup() *GoSkipTest_SkipGroup { - if m != nil { - return m.Skipgroup - } - return nil -} - -type GoSkipTest_SkipGroup struct { - GroupInt32 *int32 `protobuf:"varint,16,req,name=group_int32,json=groupInt32" json:"group_int32,omitempty"` - GroupString *string `protobuf:"bytes,17,req,name=group_string,json=groupString" json:"group_string,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoSkipTest_SkipGroup) Reset() { *m = GoSkipTest_SkipGroup{} } -func (m *GoSkipTest_SkipGroup) String() string { return proto.CompactTextString(m) } -func (*GoSkipTest_SkipGroup) ProtoMessage() {} -func (*GoSkipTest_SkipGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} } - -func (m *GoSkipTest_SkipGroup) GetGroupInt32() int32 { - if m != nil && m.GroupInt32 != nil { - return *m.GroupInt32 - } - return 0 -} - -func (m *GoSkipTest_SkipGroup) GetGroupString() string { - if m != nil && m.GroupString != nil { - return *m.GroupString - } - return "" -} - -// For testing packed/non-packed decoder switching. -// A serialized instance of one should be deserializable as the other. -type NonPackedTest struct { - A []int32 `protobuf:"varint,1,rep,name=a" json:"a,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NonPackedTest) Reset() { *m = NonPackedTest{} } -func (m *NonPackedTest) String() string { return proto.CompactTextString(m) } -func (*NonPackedTest) ProtoMessage() {} -func (*NonPackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } - -func (m *NonPackedTest) GetA() []int32 { - if m != nil { - return m.A - } - return nil -} - -type PackedTest struct { - B []int32 `protobuf:"varint,1,rep,packed,name=b" json:"b,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *PackedTest) Reset() { *m = PackedTest{} } -func (m *PackedTest) String() string { return proto.CompactTextString(m) } -func (*PackedTest) ProtoMessage() {} -func (*PackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } - -func (m *PackedTest) GetB() []int32 { - if m != nil { - return m.B - } - return nil -} - -type MaxTag struct { - // Maximum possible tag number. - LastField *string `protobuf:"bytes,536870911,opt,name=last_field,json=lastField" json:"last_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MaxTag) Reset() { *m = MaxTag{} } -func (m *MaxTag) String() string { return proto.CompactTextString(m) } -func (*MaxTag) ProtoMessage() {} -func (*MaxTag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } - -func (m *MaxTag) GetLastField() string { - if m != nil && m.LastField != nil { - return *m.LastField - } - return "" -} - -type OldMessage struct { - Nested *OldMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - Num *int32 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldMessage) Reset() { *m = OldMessage{} } -func (m *OldMessage) String() string { return proto.CompactTextString(m) } -func (*OldMessage) ProtoMessage() {} -func (*OldMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } - -func (m *OldMessage) GetNested() *OldMessage_Nested { - if m != nil { - return m.Nested - } - return nil -} - -func (m *OldMessage) GetNum() int32 { - if m != nil && m.Num != nil { - return *m.Num - } - return 0 -} - -type OldMessage_Nested struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldMessage_Nested) Reset() { *m = OldMessage_Nested{} } -func (m *OldMessage_Nested) String() string { return proto.CompactTextString(m) } -func (*OldMessage_Nested) ProtoMessage() {} -func (*OldMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8, 0} } - -func (m *OldMessage_Nested) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -// NewMessage is wire compatible with OldMessage; -// imagine it as a future version. -type NewMessage struct { - Nested *NewMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - // This is an int32 in OldMessage. - Num *int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NewMessage) Reset() { *m = NewMessage{} } -func (m *NewMessage) String() string { return proto.CompactTextString(m) } -func (*NewMessage) ProtoMessage() {} -func (*NewMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } - -func (m *NewMessage) GetNested() *NewMessage_Nested { - if m != nil { - return m.Nested - } - return nil -} - -func (m *NewMessage) GetNum() int64 { - if m != nil && m.Num != nil { - return *m.Num - } - return 0 -} - -type NewMessage_Nested struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - FoodGroup *string `protobuf:"bytes,2,opt,name=food_group,json=foodGroup" json:"food_group,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NewMessage_Nested) Reset() { *m = NewMessage_Nested{} } -func (m *NewMessage_Nested) String() string { return proto.CompactTextString(m) } -func (*NewMessage_Nested) ProtoMessage() {} -func (*NewMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } - -func (m *NewMessage_Nested) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *NewMessage_Nested) GetFoodGroup() string { - if m != nil && m.FoodGroup != nil { - return *m.FoodGroup - } - return "" -} - -type InnerMessage struct { - Host *string `protobuf:"bytes,1,req,name=host" json:"host,omitempty"` - Port *int32 `protobuf:"varint,2,opt,name=port,def=4000" json:"port,omitempty"` - Connected *bool `protobuf:"varint,3,opt,name=connected" json:"connected,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *InnerMessage) Reset() { *m = InnerMessage{} } -func (m *InnerMessage) String() string { return proto.CompactTextString(m) } -func (*InnerMessage) ProtoMessage() {} -func (*InnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } - -const Default_InnerMessage_Port int32 = 4000 - -func (m *InnerMessage) GetHost() string { - if m != nil && m.Host != nil { - return *m.Host - } - return "" -} - -func (m *InnerMessage) GetPort() int32 { - if m != nil && m.Port != nil { - return *m.Port - } - return Default_InnerMessage_Port -} - -func (m *InnerMessage) GetConnected() bool { - if m != nil && m.Connected != nil { - return *m.Connected - } - return false -} - -type OtherMessage struct { - Key *int64 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - Value []byte `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` - Weight *float32 `protobuf:"fixed32,3,opt,name=weight" json:"weight,omitempty"` - Inner *InnerMessage `protobuf:"bytes,4,opt,name=inner" json:"inner,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherMessage) Reset() { *m = OtherMessage{} } -func (m *OtherMessage) String() string { return proto.CompactTextString(m) } -func (*OtherMessage) ProtoMessage() {} -func (*OtherMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } - -var extRange_OtherMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*OtherMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OtherMessage -} - -func (m *OtherMessage) GetKey() int64 { - if m != nil && m.Key != nil { - return *m.Key - } - return 0 -} - -func (m *OtherMessage) GetValue() []byte { - if m != nil { - return m.Value - } - return nil -} - -func (m *OtherMessage) GetWeight() float32 { - if m != nil && m.Weight != nil { - return *m.Weight - } - return 0 -} - -func (m *OtherMessage) GetInner() *InnerMessage { - if m != nil { - return m.Inner - } - return nil -} - -type RequiredInnerMessage struct { - LeoFinallyWonAnOscar *InnerMessage `protobuf:"bytes,1,req,name=leo_finally_won_an_oscar,json=leoFinallyWonAnOscar" json:"leo_finally_won_an_oscar,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *RequiredInnerMessage) Reset() { *m = RequiredInnerMessage{} } -func (m *RequiredInnerMessage) String() string { return proto.CompactTextString(m) } -func (*RequiredInnerMessage) ProtoMessage() {} -func (*RequiredInnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } - -func (m *RequiredInnerMessage) GetLeoFinallyWonAnOscar() *InnerMessage { - if m != nil { - return m.LeoFinallyWonAnOscar - } - return nil -} - -type MyMessage struct { - Count *int32 `protobuf:"varint,1,req,name=count" json:"count,omitempty"` - Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` - Quote *string `protobuf:"bytes,3,opt,name=quote" json:"quote,omitempty"` - Pet []string `protobuf:"bytes,4,rep,name=pet" json:"pet,omitempty"` - Inner *InnerMessage `protobuf:"bytes,5,opt,name=inner" json:"inner,omitempty"` - Others []*OtherMessage `protobuf:"bytes,6,rep,name=others" json:"others,omitempty"` - WeMustGoDeeper *RequiredInnerMessage `protobuf:"bytes,13,opt,name=we_must_go_deeper,json=weMustGoDeeper" json:"we_must_go_deeper,omitempty"` - RepInner []*InnerMessage `protobuf:"bytes,12,rep,name=rep_inner,json=repInner" json:"rep_inner,omitempty"` - Bikeshed *MyMessage_Color `protobuf:"varint,7,opt,name=bikeshed,enum=testdata.MyMessage_Color" json:"bikeshed,omitempty"` - Somegroup *MyMessage_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` - // This field becomes [][]byte in the generated code. - RepBytes [][]byte `protobuf:"bytes,10,rep,name=rep_bytes,json=repBytes" json:"rep_bytes,omitempty"` - Bigfloat *float64 `protobuf:"fixed64,11,opt,name=bigfloat" json:"bigfloat,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessage) Reset() { *m = MyMessage{} } -func (m *MyMessage) String() string { return proto.CompactTextString(m) } -func (*MyMessage) ProtoMessage() {} -func (*MyMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } - -var extRange_MyMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*MyMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MyMessage -} - -func (m *MyMessage) GetCount() int32 { - if m != nil && m.Count != nil { - return *m.Count - } - return 0 -} - -func (m *MyMessage) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *MyMessage) GetQuote() string { - if m != nil && m.Quote != nil { - return *m.Quote - } - return "" -} - -func (m *MyMessage) GetPet() []string { - if m != nil { - return m.Pet - } - return nil -} - -func (m *MyMessage) GetInner() *InnerMessage { - if m != nil { - return m.Inner - } - return nil -} - -func (m *MyMessage) GetOthers() []*OtherMessage { - if m != nil { - return m.Others - } - return nil -} - -func (m *MyMessage) GetWeMustGoDeeper() *RequiredInnerMessage { - if m != nil { - return m.WeMustGoDeeper - } - return nil -} - -func (m *MyMessage) GetRepInner() []*InnerMessage { - if m != nil { - return m.RepInner - } - return nil -} - -func (m *MyMessage) GetBikeshed() MyMessage_Color { - if m != nil && m.Bikeshed != nil { - return *m.Bikeshed - } - return MyMessage_RED -} - -func (m *MyMessage) GetSomegroup() *MyMessage_SomeGroup { - if m != nil { - return m.Somegroup - } - return nil -} - -func (m *MyMessage) GetRepBytes() [][]byte { - if m != nil { - return m.RepBytes - } - return nil -} - -func (m *MyMessage) GetBigfloat() float64 { - if m != nil && m.Bigfloat != nil { - return *m.Bigfloat - } - return 0 -} - -type MyMessage_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessage_SomeGroup) Reset() { *m = MyMessage_SomeGroup{} } -func (m *MyMessage_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*MyMessage_SomeGroup) ProtoMessage() {} -func (*MyMessage_SomeGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } - -func (m *MyMessage_SomeGroup) GetGroupField() int32 { - if m != nil && m.GroupField != nil { - return *m.GroupField - } - return 0 -} - -type Ext struct { - Data *string `protobuf:"bytes,1,opt,name=data" json:"data,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Ext) Reset() { *m = Ext{} } -func (m *Ext) String() string { return proto.CompactTextString(m) } -func (*Ext) ProtoMessage() {} -func (*Ext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } - -func (m *Ext) GetData() string { - if m != nil && m.Data != nil { - return *m.Data - } - return "" -} - -var E_Ext_More = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*Ext)(nil), - Field: 103, - Name: "testdata.Ext.more", - Tag: "bytes,103,opt,name=more", - Filename: "test.proto", -} - -var E_Ext_Text = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*string)(nil), - Field: 104, - Name: "testdata.Ext.text", - Tag: "bytes,104,opt,name=text", - Filename: "test.proto", -} - -var E_Ext_Number = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 105, - Name: "testdata.Ext.number", - Tag: "varint,105,opt,name=number", - Filename: "test.proto", -} - -type ComplexExtension struct { - First *int32 `protobuf:"varint,1,opt,name=first" json:"first,omitempty"` - Second *int32 `protobuf:"varint,2,opt,name=second" json:"second,omitempty"` - Third []int32 `protobuf:"varint,3,rep,name=third" json:"third,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *ComplexExtension) Reset() { *m = ComplexExtension{} } -func (m *ComplexExtension) String() string { return proto.CompactTextString(m) } -func (*ComplexExtension) ProtoMessage() {} -func (*ComplexExtension) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } - -func (m *ComplexExtension) GetFirst() int32 { - if m != nil && m.First != nil { - return *m.First - } - return 0 -} - -func (m *ComplexExtension) GetSecond() int32 { - if m != nil && m.Second != nil { - return *m.Second - } - return 0 -} - -func (m *ComplexExtension) GetThird() []int32 { - if m != nil { - return m.Third - } - return nil -} - -type DefaultsMessage struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *DefaultsMessage) Reset() { *m = DefaultsMessage{} } -func (m *DefaultsMessage) String() string { return proto.CompactTextString(m) } -func (*DefaultsMessage) ProtoMessage() {} -func (*DefaultsMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } - -var extRange_DefaultsMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*DefaultsMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_DefaultsMessage -} - -type MyMessageSet struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessageSet) Reset() { *m = MyMessageSet{} } -func (m *MyMessageSet) String() string { return proto.CompactTextString(m) } -func (*MyMessageSet) ProtoMessage() {} -func (*MyMessageSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } - -func (m *MyMessageSet) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *MyMessageSet) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} -func (m *MyMessageSet) MarshalJSON() ([]byte, error) { - return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) -} -func (m *MyMessageSet) UnmarshalJSON(buf []byte) error { - return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) -} - -// ensure MyMessageSet satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*MyMessageSet)(nil) -var _ proto.Unmarshaler = (*MyMessageSet)(nil) - -var extRange_MyMessageSet = []proto.ExtensionRange{ - {100, 2147483646}, -} - -func (*MyMessageSet) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MyMessageSet -} - -type Empty struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *Empty) Reset() { *m = Empty{} } -func (m *Empty) String() string { return proto.CompactTextString(m) } -func (*Empty) ProtoMessage() {} -func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } - -type MessageList struct { - Message []*MessageList_Message `protobuf:"group,1,rep,name=Message,json=message" json:"message,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageList) Reset() { *m = MessageList{} } -func (m *MessageList) String() string { return proto.CompactTextString(m) } -func (*MessageList) ProtoMessage() {} -func (*MessageList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } - -func (m *MessageList) GetMessage() []*MessageList_Message { - if m != nil { - return m.Message - } - return nil -} - -type MessageList_Message struct { - Name *string `protobuf:"bytes,2,req,name=name" json:"name,omitempty"` - Count *int32 `protobuf:"varint,3,req,name=count" json:"count,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageList_Message) Reset() { *m = MessageList_Message{} } -func (m *MessageList_Message) String() string { return proto.CompactTextString(m) } -func (*MessageList_Message) ProtoMessage() {} -func (*MessageList_Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} } - -func (m *MessageList_Message) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *MessageList_Message) GetCount() int32 { - if m != nil && m.Count != nil { - return *m.Count - } - return 0 -} - -type Strings struct { - StringField *string `protobuf:"bytes,1,opt,name=string_field,json=stringField" json:"string_field,omitempty"` - BytesField []byte `protobuf:"bytes,2,opt,name=bytes_field,json=bytesField" json:"bytes_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Strings) Reset() { *m = Strings{} } -func (m *Strings) String() string { return proto.CompactTextString(m) } -func (*Strings) ProtoMessage() {} -func (*Strings) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } - -func (m *Strings) GetStringField() string { - if m != nil && m.StringField != nil { - return *m.StringField - } - return "" -} - -func (m *Strings) GetBytesField() []byte { - if m != nil { - return m.BytesField - } - return nil -} - -type Defaults struct { - // Default-valued fields of all basic types. - // Same as GoTest, but copied here to make testing easier. - F_Bool *bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,def=1" json:"F_Bool,omitempty"` - F_Int32 *int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,def=32" json:"F_Int32,omitempty"` - F_Int64 *int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,def=64" json:"F_Int64,omitempty"` - F_Fixed32 *uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,def=320" json:"F_Fixed32,omitempty"` - F_Fixed64 *uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,def=640" json:"F_Fixed64,omitempty"` - F_Uint32 *uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,def=3200" json:"F_Uint32,omitempty"` - F_Uint64 *uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,def=6400" json:"F_Uint64,omitempty"` - F_Float *float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,def=314159" json:"F_Float,omitempty"` - F_Double *float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,def=271828" json:"F_Double,omitempty"` - F_String *string `protobuf:"bytes,10,opt,name=F_String,json=FString,def=hello, \"world!\"\n" json:"F_String,omitempty"` - F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,def=Bignose" json:"F_Bytes,omitempty"` - F_Sint32 *int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,def=-32" json:"F_Sint32,omitempty"` - F_Sint64 *int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,def=-64" json:"F_Sint64,omitempty"` - F_Enum *Defaults_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.Defaults_Color,def=1" json:"F_Enum,omitempty"` - // More fields with crazy defaults. - F_Pinf *float32 `protobuf:"fixed32,15,opt,name=F_Pinf,json=FPinf,def=inf" json:"F_Pinf,omitempty"` - F_Ninf *float32 `protobuf:"fixed32,16,opt,name=F_Ninf,json=FNinf,def=-inf" json:"F_Ninf,omitempty"` - F_Nan *float32 `protobuf:"fixed32,17,opt,name=F_Nan,json=FNan,def=nan" json:"F_Nan,omitempty"` - // Sub-message. - Sub *SubDefaults `protobuf:"bytes,18,opt,name=sub" json:"sub,omitempty"` - // Redundant but explicit defaults. - StrZero *string `protobuf:"bytes,19,opt,name=str_zero,json=strZero,def=" json:"str_zero,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Defaults) Reset() { *m = Defaults{} } -func (m *Defaults) String() string { return proto.CompactTextString(m) } -func (*Defaults) ProtoMessage() {} -func (*Defaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } - -const Default_Defaults_F_Bool bool = true -const Default_Defaults_F_Int32 int32 = 32 -const Default_Defaults_F_Int64 int64 = 64 -const Default_Defaults_F_Fixed32 uint32 = 320 -const Default_Defaults_F_Fixed64 uint64 = 640 -const Default_Defaults_F_Uint32 uint32 = 3200 -const Default_Defaults_F_Uint64 uint64 = 6400 -const Default_Defaults_F_Float float32 = 314159 -const Default_Defaults_F_Double float64 = 271828 -const Default_Defaults_F_String string = "hello, \"world!\"\n" - -var Default_Defaults_F_Bytes []byte = []byte("Bignose") - -const Default_Defaults_F_Sint32 int32 = -32 -const Default_Defaults_F_Sint64 int64 = -64 -const Default_Defaults_F_Enum Defaults_Color = Defaults_GREEN - -var Default_Defaults_F_Pinf float32 = float32(math.Inf(1)) -var Default_Defaults_F_Ninf float32 = float32(math.Inf(-1)) -var Default_Defaults_F_Nan float32 = float32(math.NaN()) - -func (m *Defaults) GetF_Bool() bool { - if m != nil && m.F_Bool != nil { - return *m.F_Bool - } - return Default_Defaults_F_Bool -} - -func (m *Defaults) GetF_Int32() int32 { - if m != nil && m.F_Int32 != nil { - return *m.F_Int32 - } - return Default_Defaults_F_Int32 -} - -func (m *Defaults) GetF_Int64() int64 { - if m != nil && m.F_Int64 != nil { - return *m.F_Int64 - } - return Default_Defaults_F_Int64 -} - -func (m *Defaults) GetF_Fixed32() uint32 { - if m != nil && m.F_Fixed32 != nil { - return *m.F_Fixed32 - } - return Default_Defaults_F_Fixed32 -} - -func (m *Defaults) GetF_Fixed64() uint64 { - if m != nil && m.F_Fixed64 != nil { - return *m.F_Fixed64 - } - return Default_Defaults_F_Fixed64 -} - -func (m *Defaults) GetF_Uint32() uint32 { - if m != nil && m.F_Uint32 != nil { - return *m.F_Uint32 - } - return Default_Defaults_F_Uint32 -} - -func (m *Defaults) GetF_Uint64() uint64 { - if m != nil && m.F_Uint64 != nil { - return *m.F_Uint64 - } - return Default_Defaults_F_Uint64 -} - -func (m *Defaults) GetF_Float() float32 { - if m != nil && m.F_Float != nil { - return *m.F_Float - } - return Default_Defaults_F_Float -} - -func (m *Defaults) GetF_Double() float64 { - if m != nil && m.F_Double != nil { - return *m.F_Double - } - return Default_Defaults_F_Double -} - -func (m *Defaults) GetF_String() string { - if m != nil && m.F_String != nil { - return *m.F_String - } - return Default_Defaults_F_String -} - -func (m *Defaults) GetF_Bytes() []byte { - if m != nil && m.F_Bytes != nil { - return m.F_Bytes - } - return append([]byte(nil), Default_Defaults_F_Bytes...) -} - -func (m *Defaults) GetF_Sint32() int32 { - if m != nil && m.F_Sint32 != nil { - return *m.F_Sint32 - } - return Default_Defaults_F_Sint32 -} - -func (m *Defaults) GetF_Sint64() int64 { - if m != nil && m.F_Sint64 != nil { - return *m.F_Sint64 - } - return Default_Defaults_F_Sint64 -} - -func (m *Defaults) GetF_Enum() Defaults_Color { - if m != nil && m.F_Enum != nil { - return *m.F_Enum - } - return Default_Defaults_F_Enum -} - -func (m *Defaults) GetF_Pinf() float32 { - if m != nil && m.F_Pinf != nil { - return *m.F_Pinf - } - return Default_Defaults_F_Pinf -} - -func (m *Defaults) GetF_Ninf() float32 { - if m != nil && m.F_Ninf != nil { - return *m.F_Ninf - } - return Default_Defaults_F_Ninf -} - -func (m *Defaults) GetF_Nan() float32 { - if m != nil && m.F_Nan != nil { - return *m.F_Nan - } - return Default_Defaults_F_Nan -} - -func (m *Defaults) GetSub() *SubDefaults { - if m != nil { - return m.Sub - } - return nil -} - -func (m *Defaults) GetStrZero() string { - if m != nil && m.StrZero != nil { - return *m.StrZero - } - return "" -} - -type SubDefaults struct { - N *int64 `protobuf:"varint,1,opt,name=n,def=7" json:"n,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *SubDefaults) Reset() { *m = SubDefaults{} } -func (m *SubDefaults) String() string { return proto.CompactTextString(m) } -func (*SubDefaults) ProtoMessage() {} -func (*SubDefaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } - -const Default_SubDefaults_N int64 = 7 - -func (m *SubDefaults) GetN() int64 { - if m != nil && m.N != nil { - return *m.N - } - return Default_SubDefaults_N -} - -type RepeatedEnum struct { - Color []RepeatedEnum_Color `protobuf:"varint,1,rep,name=color,enum=testdata.RepeatedEnum_Color" json:"color,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *RepeatedEnum) Reset() { *m = RepeatedEnum{} } -func (m *RepeatedEnum) String() string { return proto.CompactTextString(m) } -func (*RepeatedEnum) ProtoMessage() {} -func (*RepeatedEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } - -func (m *RepeatedEnum) GetColor() []RepeatedEnum_Color { - if m != nil { - return m.Color - } - return nil -} - -type MoreRepeated struct { - Bools []bool `protobuf:"varint,1,rep,name=bools" json:"bools,omitempty"` - BoolsPacked []bool `protobuf:"varint,2,rep,packed,name=bools_packed,json=boolsPacked" json:"bools_packed,omitempty"` - Ints []int32 `protobuf:"varint,3,rep,name=ints" json:"ints,omitempty"` - IntsPacked []int32 `protobuf:"varint,4,rep,packed,name=ints_packed,json=intsPacked" json:"ints_packed,omitempty"` - Int64SPacked []int64 `protobuf:"varint,7,rep,packed,name=int64s_packed,json=int64sPacked" json:"int64s_packed,omitempty"` - Strings []string `protobuf:"bytes,5,rep,name=strings" json:"strings,omitempty"` - Fixeds []uint32 `protobuf:"fixed32,6,rep,name=fixeds" json:"fixeds,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MoreRepeated) Reset() { *m = MoreRepeated{} } -func (m *MoreRepeated) String() string { return proto.CompactTextString(m) } -func (*MoreRepeated) ProtoMessage() {} -func (*MoreRepeated) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } - -func (m *MoreRepeated) GetBools() []bool { - if m != nil { - return m.Bools - } - return nil -} - -func (m *MoreRepeated) GetBoolsPacked() []bool { - if m != nil { - return m.BoolsPacked - } - return nil -} - -func (m *MoreRepeated) GetInts() []int32 { - if m != nil { - return m.Ints - } - return nil -} - -func (m *MoreRepeated) GetIntsPacked() []int32 { - if m != nil { - return m.IntsPacked - } - return nil -} - -func (m *MoreRepeated) GetInt64SPacked() []int64 { - if m != nil { - return m.Int64SPacked - } - return nil -} - -func (m *MoreRepeated) GetStrings() []string { - if m != nil { - return m.Strings - } - return nil -} - -func (m *MoreRepeated) GetFixeds() []uint32 { - if m != nil { - return m.Fixeds - } - return nil -} - -type GroupOld struct { - G *GroupOld_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupOld) Reset() { *m = GroupOld{} } -func (m *GroupOld) String() string { return proto.CompactTextString(m) } -func (*GroupOld) ProtoMessage() {} -func (*GroupOld) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } - -func (m *GroupOld) GetG() *GroupOld_G { - if m != nil { - return m.G - } - return nil -} - -type GroupOld_G struct { - X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupOld_G) Reset() { *m = GroupOld_G{} } -func (m *GroupOld_G) String() string { return proto.CompactTextString(m) } -func (*GroupOld_G) ProtoMessage() {} -func (*GroupOld_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25, 0} } - -func (m *GroupOld_G) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -type GroupNew struct { - G *GroupNew_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupNew) Reset() { *m = GroupNew{} } -func (m *GroupNew) String() string { return proto.CompactTextString(m) } -func (*GroupNew) ProtoMessage() {} -func (*GroupNew) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } - -func (m *GroupNew) GetG() *GroupNew_G { - if m != nil { - return m.G - } - return nil -} - -type GroupNew_G struct { - X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` - Y *int32 `protobuf:"varint,3,opt,name=y" json:"y,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupNew_G) Reset() { *m = GroupNew_G{} } -func (m *GroupNew_G) String() string { return proto.CompactTextString(m) } -func (*GroupNew_G) ProtoMessage() {} -func (*GroupNew_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26, 0} } - -func (m *GroupNew_G) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -func (m *GroupNew_G) GetY() int32 { - if m != nil && m.Y != nil { - return *m.Y - } - return 0 -} - -type FloatingPoint struct { - F *float64 `protobuf:"fixed64,1,req,name=f" json:"f,omitempty"` - Exact *bool `protobuf:"varint,2,opt,name=exact" json:"exact,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FloatingPoint) Reset() { *m = FloatingPoint{} } -func (m *FloatingPoint) String() string { return proto.CompactTextString(m) } -func (*FloatingPoint) ProtoMessage() {} -func (*FloatingPoint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } - -func (m *FloatingPoint) GetF() float64 { - if m != nil && m.F != nil { - return *m.F - } - return 0 -} - -func (m *FloatingPoint) GetExact() bool { - if m != nil && m.Exact != nil { - return *m.Exact - } - return false -} - -type MessageWithMap struct { - NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - StrToStr map[string]string `protobuf:"bytes,4,rep,name=str_to_str,json=strToStr" json:"str_to_str,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } -func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } -func (*MessageWithMap) ProtoMessage() {} -func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } - -func (m *MessageWithMap) GetNameMapping() map[int32]string { - if m != nil { - return m.NameMapping - } - return nil -} - -func (m *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint { - if m != nil { - return m.MsgMapping - } - return nil -} - -func (m *MessageWithMap) GetByteMapping() map[bool][]byte { - if m != nil { - return m.ByteMapping - } - return nil -} - -func (m *MessageWithMap) GetStrToStr() map[string]string { - if m != nil { - return m.StrToStr - } - return nil -} - -type Oneof struct { - // Types that are valid to be assigned to Union: - // *Oneof_F_Bool - // *Oneof_F_Int32 - // *Oneof_F_Int64 - // *Oneof_F_Fixed32 - // *Oneof_F_Fixed64 - // *Oneof_F_Uint32 - // *Oneof_F_Uint64 - // *Oneof_F_Float - // *Oneof_F_Double - // *Oneof_F_String - // *Oneof_F_Bytes - // *Oneof_F_Sint32 - // *Oneof_F_Sint64 - // *Oneof_F_Enum - // *Oneof_F_Message - // *Oneof_FGroup - // *Oneof_F_Largest_Tag - Union isOneof_Union `protobuf_oneof:"union"` - // Types that are valid to be assigned to Tormato: - // *Oneof_Value - Tormato isOneof_Tormato `protobuf_oneof:"tormato"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Oneof) Reset() { *m = Oneof{} } -func (m *Oneof) String() string { return proto.CompactTextString(m) } -func (*Oneof) ProtoMessage() {} -func (*Oneof) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } - -type isOneof_Union interface { - isOneof_Union() -} -type isOneof_Tormato interface { - isOneof_Tormato() -} - -type Oneof_F_Bool struct { - F_Bool bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,oneof"` -} -type Oneof_F_Int32 struct { - F_Int32 int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,oneof"` -} -type Oneof_F_Int64 struct { - F_Int64 int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,oneof"` -} -type Oneof_F_Fixed32 struct { - F_Fixed32 uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,oneof"` -} -type Oneof_F_Fixed64 struct { - F_Fixed64 uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,oneof"` -} -type Oneof_F_Uint32 struct { - F_Uint32 uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,oneof"` -} -type Oneof_F_Uint64 struct { - F_Uint64 uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,oneof"` -} -type Oneof_F_Float struct { - F_Float float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,oneof"` -} -type Oneof_F_Double struct { - F_Double float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,oneof"` -} -type Oneof_F_String struct { - F_String string `protobuf:"bytes,10,opt,name=F_String,json=FString,oneof"` -} -type Oneof_F_Bytes struct { - F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,oneof"` -} -type Oneof_F_Sint32 struct { - F_Sint32 int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,oneof"` -} -type Oneof_F_Sint64 struct { - F_Sint64 int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,oneof"` -} -type Oneof_F_Enum struct { - F_Enum MyMessage_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.MyMessage_Color,oneof"` -} -type Oneof_F_Message struct { - F_Message *GoTestField `protobuf:"bytes,15,opt,name=F_Message,json=FMessage,oneof"` -} -type Oneof_FGroup struct { - FGroup *Oneof_F_Group `protobuf:"group,16,opt,name=F_Group,json=fGroup,oneof"` -} -type Oneof_F_Largest_Tag struct { - F_Largest_Tag int32 `protobuf:"varint,536870911,opt,name=F_Largest_Tag,json=FLargestTag,oneof"` -} -type Oneof_Value struct { - Value int32 `protobuf:"varint,100,opt,name=value,oneof"` -} - -func (*Oneof_F_Bool) isOneof_Union() {} -func (*Oneof_F_Int32) isOneof_Union() {} -func (*Oneof_F_Int64) isOneof_Union() {} -func (*Oneof_F_Fixed32) isOneof_Union() {} -func (*Oneof_F_Fixed64) isOneof_Union() {} -func (*Oneof_F_Uint32) isOneof_Union() {} -func (*Oneof_F_Uint64) isOneof_Union() {} -func (*Oneof_F_Float) isOneof_Union() {} -func (*Oneof_F_Double) isOneof_Union() {} -func (*Oneof_F_String) isOneof_Union() {} -func (*Oneof_F_Bytes) isOneof_Union() {} -func (*Oneof_F_Sint32) isOneof_Union() {} -func (*Oneof_F_Sint64) isOneof_Union() {} -func (*Oneof_F_Enum) isOneof_Union() {} -func (*Oneof_F_Message) isOneof_Union() {} -func (*Oneof_FGroup) isOneof_Union() {} -func (*Oneof_F_Largest_Tag) isOneof_Union() {} -func (*Oneof_Value) isOneof_Tormato() {} - -func (m *Oneof) GetUnion() isOneof_Union { - if m != nil { - return m.Union - } - return nil -} -func (m *Oneof) GetTormato() isOneof_Tormato { - if m != nil { - return m.Tormato - } - return nil -} - -func (m *Oneof) GetF_Bool() bool { - if x, ok := m.GetUnion().(*Oneof_F_Bool); ok { - return x.F_Bool - } - return false -} - -func (m *Oneof) GetF_Int32() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Int32); ok { - return x.F_Int32 - } - return 0 -} - -func (m *Oneof) GetF_Int64() int64 { - if x, ok := m.GetUnion().(*Oneof_F_Int64); ok { - return x.F_Int64 - } - return 0 -} - -func (m *Oneof) GetF_Fixed32() uint32 { - if x, ok := m.GetUnion().(*Oneof_F_Fixed32); ok { - return x.F_Fixed32 - } - return 0 -} - -func (m *Oneof) GetF_Fixed64() uint64 { - if x, ok := m.GetUnion().(*Oneof_F_Fixed64); ok { - return x.F_Fixed64 - } - return 0 -} - -func (m *Oneof) GetF_Uint32() uint32 { - if x, ok := m.GetUnion().(*Oneof_F_Uint32); ok { - return x.F_Uint32 - } - return 0 -} - -func (m *Oneof) GetF_Uint64() uint64 { - if x, ok := m.GetUnion().(*Oneof_F_Uint64); ok { - return x.F_Uint64 - } - return 0 -} - -func (m *Oneof) GetF_Float() float32 { - if x, ok := m.GetUnion().(*Oneof_F_Float); ok { - return x.F_Float - } - return 0 -} - -func (m *Oneof) GetF_Double() float64 { - if x, ok := m.GetUnion().(*Oneof_F_Double); ok { - return x.F_Double - } - return 0 -} - -func (m *Oneof) GetF_String() string { - if x, ok := m.GetUnion().(*Oneof_F_String); ok { - return x.F_String - } - return "" -} - -func (m *Oneof) GetF_Bytes() []byte { - if x, ok := m.GetUnion().(*Oneof_F_Bytes); ok { - return x.F_Bytes - } - return nil -} - -func (m *Oneof) GetF_Sint32() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Sint32); ok { - return x.F_Sint32 - } - return 0 -} - -func (m *Oneof) GetF_Sint64() int64 { - if x, ok := m.GetUnion().(*Oneof_F_Sint64); ok { - return x.F_Sint64 - } - return 0 -} - -func (m *Oneof) GetF_Enum() MyMessage_Color { - if x, ok := m.GetUnion().(*Oneof_F_Enum); ok { - return x.F_Enum - } - return MyMessage_RED -} - -func (m *Oneof) GetF_Message() *GoTestField { - if x, ok := m.GetUnion().(*Oneof_F_Message); ok { - return x.F_Message - } - return nil -} - -func (m *Oneof) GetFGroup() *Oneof_F_Group { - if x, ok := m.GetUnion().(*Oneof_FGroup); ok { - return x.FGroup - } - return nil -} - -func (m *Oneof) GetF_Largest_Tag() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Largest_Tag); ok { - return x.F_Largest_Tag - } - return 0 -} - -func (m *Oneof) GetValue() int32 { - if x, ok := m.GetTormato().(*Oneof_Value); ok { - return x.Value - } - return 0 -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Oneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Oneof_OneofMarshaler, _Oneof_OneofUnmarshaler, _Oneof_OneofSizer, []interface{}{ - (*Oneof_F_Bool)(nil), - (*Oneof_F_Int32)(nil), - (*Oneof_F_Int64)(nil), - (*Oneof_F_Fixed32)(nil), - (*Oneof_F_Fixed64)(nil), - (*Oneof_F_Uint32)(nil), - (*Oneof_F_Uint64)(nil), - (*Oneof_F_Float)(nil), - (*Oneof_F_Double)(nil), - (*Oneof_F_String)(nil), - (*Oneof_F_Bytes)(nil), - (*Oneof_F_Sint32)(nil), - (*Oneof_F_Sint64)(nil), - (*Oneof_F_Enum)(nil), - (*Oneof_F_Message)(nil), - (*Oneof_FGroup)(nil), - (*Oneof_F_Largest_Tag)(nil), - (*Oneof_Value)(nil), - } -} - -func _Oneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Oneof) - // union - switch x := m.Union.(type) { - case *Oneof_F_Bool: - t := uint64(0) - if x.F_Bool { - t = 1 - } - b.EncodeVarint(1<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Oneof_F_Int32: - b.EncodeVarint(2<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Int32)) - case *Oneof_F_Int64: - b.EncodeVarint(3<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Int64)) - case *Oneof_F_Fixed32: - b.EncodeVarint(4<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(x.F_Fixed32)) - case *Oneof_F_Fixed64: - b.EncodeVarint(5<<3 | proto.WireFixed64) - b.EncodeFixed64(uint64(x.F_Fixed64)) - case *Oneof_F_Uint32: - b.EncodeVarint(6<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Uint32)) - case *Oneof_F_Uint64: - b.EncodeVarint(7<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Uint64)) - case *Oneof_F_Float: - b.EncodeVarint(8<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.F_Float))) - case *Oneof_F_Double: - b.EncodeVarint(9<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.F_Double)) - case *Oneof_F_String: - b.EncodeVarint(10<<3 | proto.WireBytes) - b.EncodeStringBytes(x.F_String) - case *Oneof_F_Bytes: - b.EncodeVarint(11<<3 | proto.WireBytes) - b.EncodeRawBytes(x.F_Bytes) - case *Oneof_F_Sint32: - b.EncodeVarint(12<<3 | proto.WireVarint) - b.EncodeZigzag32(uint64(x.F_Sint32)) - case *Oneof_F_Sint64: - b.EncodeVarint(13<<3 | proto.WireVarint) - b.EncodeZigzag64(uint64(x.F_Sint64)) - case *Oneof_F_Enum: - b.EncodeVarint(14<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Enum)) - case *Oneof_F_Message: - b.EncodeVarint(15<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.F_Message); err != nil { - return err - } - case *Oneof_FGroup: - b.EncodeVarint(16<<3 | proto.WireStartGroup) - if err := b.Marshal(x.FGroup); err != nil { - return err - } - b.EncodeVarint(16<<3 | proto.WireEndGroup) - case *Oneof_F_Largest_Tag: - b.EncodeVarint(536870911<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Largest_Tag)) - case nil: - default: - return fmt.Errorf("Oneof.Union has unexpected type %T", x) - } - // tormato - switch x := m.Tormato.(type) { - case *Oneof_Value: - b.EncodeVarint(100<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Value)) - case nil: - default: - return fmt.Errorf("Oneof.Tormato has unexpected type %T", x) - } - return nil -} - -func _Oneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Oneof) - switch tag { - case 1: // union.F_Bool - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Bool{x != 0} - return true, err - case 2: // union.F_Int32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Int32{int32(x)} - return true, err - case 3: // union.F_Int64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Int64{int64(x)} - return true, err - case 4: // union.F_Fixed32 - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Oneof_F_Fixed32{uint32(x)} - return true, err - case 5: // union.F_Fixed64 - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Oneof_F_Fixed64{x} - return true, err - case 6: // union.F_Uint32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Uint32{uint32(x)} - return true, err - case 7: // union.F_Uint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Uint64{x} - return true, err - case 8: // union.F_Float - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Oneof_F_Float{math.Float32frombits(uint32(x))} - return true, err - case 9: // union.F_Double - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Oneof_F_Double{math.Float64frombits(x)} - return true, err - case 10: // union.F_String - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Oneof_F_String{x} - return true, err - case 11: // union.F_Bytes - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Oneof_F_Bytes{x} - return true, err - case 12: // union.F_Sint32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag32() - m.Union = &Oneof_F_Sint32{int32(x)} - return true, err - case 13: // union.F_Sint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag64() - m.Union = &Oneof_F_Sint64{int64(x)} - return true, err - case 14: // union.F_Enum - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Enum{MyMessage_Color(x)} - return true, err - case 15: // union.F_Message - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(GoTestField) - err := b.DecodeMessage(msg) - m.Union = &Oneof_F_Message{msg} - return true, err - case 16: // union.f_group - if wire != proto.WireStartGroup { - return true, proto.ErrInternalBadWireType - } - msg := new(Oneof_F_Group) - err := b.DecodeGroup(msg) - m.Union = &Oneof_FGroup{msg} - return true, err - case 536870911: // union.F_Largest_Tag - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Largest_Tag{int32(x)} - return true, err - case 100: // tormato.value - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Tormato = &Oneof_Value{int32(x)} - return true, err - default: - return false, nil - } -} - -func _Oneof_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Oneof) - // union - switch x := m.Union.(type) { - case *Oneof_F_Bool: - n += proto.SizeVarint(1<<3 | proto.WireVarint) - n += 1 - case *Oneof_F_Int32: - n += proto.SizeVarint(2<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Int32)) - case *Oneof_F_Int64: - n += proto.SizeVarint(3<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Int64)) - case *Oneof_F_Fixed32: - n += proto.SizeVarint(4<<3 | proto.WireFixed32) - n += 4 - case *Oneof_F_Fixed64: - n += proto.SizeVarint(5<<3 | proto.WireFixed64) - n += 8 - case *Oneof_F_Uint32: - n += proto.SizeVarint(6<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Uint32)) - case *Oneof_F_Uint64: - n += proto.SizeVarint(7<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Uint64)) - case *Oneof_F_Float: - n += proto.SizeVarint(8<<3 | proto.WireFixed32) - n += 4 - case *Oneof_F_Double: - n += proto.SizeVarint(9<<3 | proto.WireFixed64) - n += 8 - case *Oneof_F_String: - n += proto.SizeVarint(10<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.F_String))) - n += len(x.F_String) - case *Oneof_F_Bytes: - n += proto.SizeVarint(11<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.F_Bytes))) - n += len(x.F_Bytes) - case *Oneof_F_Sint32: - n += proto.SizeVarint(12<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64((uint32(x.F_Sint32) << 1) ^ uint32((int32(x.F_Sint32) >> 31)))) - case *Oneof_F_Sint64: - n += proto.SizeVarint(13<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(uint64(x.F_Sint64<<1) ^ uint64((int64(x.F_Sint64) >> 63)))) - case *Oneof_F_Enum: - n += proto.SizeVarint(14<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Enum)) - case *Oneof_F_Message: - s := proto.Size(x.F_Message) - n += proto.SizeVarint(15<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Oneof_FGroup: - n += proto.SizeVarint(16<<3 | proto.WireStartGroup) - n += proto.Size(x.FGroup) - n += proto.SizeVarint(16<<3 | proto.WireEndGroup) - case *Oneof_F_Largest_Tag: - n += proto.SizeVarint(536870911<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Largest_Tag)) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - // tormato - switch x := m.Tormato.(type) { - case *Oneof_Value: - n += proto.SizeVarint(100<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Value)) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type Oneof_F_Group struct { - X *int32 `protobuf:"varint,17,opt,name=x" json:"x,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Oneof_F_Group) Reset() { *m = Oneof_F_Group{} } -func (m *Oneof_F_Group) String() string { return proto.CompactTextString(m) } -func (*Oneof_F_Group) ProtoMessage() {} -func (*Oneof_F_Group) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29, 0} } - -func (m *Oneof_F_Group) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -type Communique struct { - MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` - // This is a oneof, called "union". - // - // Types that are valid to be assigned to Union: - // *Communique_Number - // *Communique_Name - // *Communique_Data - // *Communique_TempC - // *Communique_Col - // *Communique_Msg - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique) Reset() { *m = Communique{} } -func (m *Communique) String() string { return proto.CompactTextString(m) } -func (*Communique) ProtoMessage() {} -func (*Communique) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } - -type isCommunique_Union interface { - isCommunique_Union() -} - -type Communique_Number struct { - Number int32 `protobuf:"varint,5,opt,name=number,oneof"` -} -type Communique_Name struct { - Name string `protobuf:"bytes,6,opt,name=name,oneof"` -} -type Communique_Data struct { - Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` -} -type Communique_TempC struct { - TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` -} -type Communique_Col struct { - Col MyMessage_Color `protobuf:"varint,9,opt,name=col,enum=testdata.MyMessage_Color,oneof"` -} -type Communique_Msg struct { - Msg *Strings `protobuf:"bytes,10,opt,name=msg,oneof"` -} - -func (*Communique_Number) isCommunique_Union() {} -func (*Communique_Name) isCommunique_Union() {} -func (*Communique_Data) isCommunique_Union() {} -func (*Communique_TempC) isCommunique_Union() {} -func (*Communique_Col) isCommunique_Union() {} -func (*Communique_Msg) isCommunique_Union() {} - -func (m *Communique) GetUnion() isCommunique_Union { - if m != nil { - return m.Union - } - return nil -} - -func (m *Communique) GetMakeMeCry() bool { - if m != nil && m.MakeMeCry != nil { - return *m.MakeMeCry - } - return false -} - -func (m *Communique) GetNumber() int32 { - if x, ok := m.GetUnion().(*Communique_Number); ok { - return x.Number - } - return 0 -} - -func (m *Communique) GetName() string { - if x, ok := m.GetUnion().(*Communique_Name); ok { - return x.Name - } - return "" -} - -func (m *Communique) GetData() []byte { - if x, ok := m.GetUnion().(*Communique_Data); ok { - return x.Data - } - return nil -} - -func (m *Communique) GetTempC() float64 { - if x, ok := m.GetUnion().(*Communique_TempC); ok { - return x.TempC - } - return 0 -} - -func (m *Communique) GetCol() MyMessage_Color { - if x, ok := m.GetUnion().(*Communique_Col); ok { - return x.Col - } - return MyMessage_RED -} - -func (m *Communique) GetMsg() *Strings { - if x, ok := m.GetUnion().(*Communique_Msg); ok { - return x.Msg - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ - (*Communique_Number)(nil), - (*Communique_Name)(nil), - (*Communique_Data)(nil), - (*Communique_TempC)(nil), - (*Communique_Col)(nil), - (*Communique_Msg)(nil), - } -} - -func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - b.EncodeVarint(5<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Number)) - case *Communique_Name: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Name) - case *Communique_Data: - b.EncodeVarint(7<<3 | proto.WireBytes) - b.EncodeRawBytes(x.Data) - case *Communique_TempC: - b.EncodeVarint(8<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.TempC)) - case *Communique_Col: - b.EncodeVarint(9<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Col)) - case *Communique_Msg: - b.EncodeVarint(10<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Msg); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Communique.Union has unexpected type %T", x) - } - return nil -} - -func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Communique) - switch tag { - case 5: // union.number - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Number{int32(x)} - return true, err - case 6: // union.name - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Communique_Name{x} - return true, err - case 7: // union.data - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Communique_Data{x} - return true, err - case 8: // union.temp_c - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Communique_TempC{math.Float64frombits(x)} - return true, err - case 9: // union.col - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Col{MyMessage_Color(x)} - return true, err - case 10: // union.msg - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Strings) - err := b.DecodeMessage(msg) - m.Union = &Communique_Msg{msg} - return true, err - default: - return false, nil - } -} - -func _Communique_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Number)) - case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Name))) - n += len(x.Name) - case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Data))) - n += len(x.Data) - case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) - n += 8 - case *Communique_Col: - n += proto.SizeVarint(9<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Col)) - case *Communique_Msg: - s := proto.Size(x.Msg) - n += proto.SizeVarint(10<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -var E_Greeting = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: ([]string)(nil), - Field: 106, - Name: "testdata.greeting", - Tag: "bytes,106,rep,name=greeting", - Filename: "test.proto", -} - -var E_Complex = &proto.ExtensionDesc{ - ExtendedType: (*OtherMessage)(nil), - ExtensionType: (*ComplexExtension)(nil), - Field: 200, - Name: "testdata.complex", - Tag: "bytes,200,opt,name=complex", - Filename: "test.proto", -} - -var E_RComplex = &proto.ExtensionDesc{ - ExtendedType: (*OtherMessage)(nil), - ExtensionType: ([]*ComplexExtension)(nil), - Field: 201, - Name: "testdata.r_complex", - Tag: "bytes,201,rep,name=r_complex,json=rComplex", - Filename: "test.proto", -} - -var E_NoDefaultDouble = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float64)(nil), - Field: 101, - Name: "testdata.no_default_double", - Tag: "fixed64,101,opt,name=no_default_double,json=noDefaultDouble", - Filename: "test.proto", -} - -var E_NoDefaultFloat = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float32)(nil), - Field: 102, - Name: "testdata.no_default_float", - Tag: "fixed32,102,opt,name=no_default_float,json=noDefaultFloat", - Filename: "test.proto", -} - -var E_NoDefaultInt32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 103, - Name: "testdata.no_default_int32", - Tag: "varint,103,opt,name=no_default_int32,json=noDefaultInt32", - Filename: "test.proto", -} - -var E_NoDefaultInt64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 104, - Name: "testdata.no_default_int64", - Tag: "varint,104,opt,name=no_default_int64,json=noDefaultInt64", - Filename: "test.proto", -} - -var E_NoDefaultUint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 105, - Name: "testdata.no_default_uint32", - Tag: "varint,105,opt,name=no_default_uint32,json=noDefaultUint32", - Filename: "test.proto", -} - -var E_NoDefaultUint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 106, - Name: "testdata.no_default_uint64", - Tag: "varint,106,opt,name=no_default_uint64,json=noDefaultUint64", - Filename: "test.proto", -} - -var E_NoDefaultSint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 107, - Name: "testdata.no_default_sint32", - Tag: "zigzag32,107,opt,name=no_default_sint32,json=noDefaultSint32", - Filename: "test.proto", -} - -var E_NoDefaultSint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 108, - Name: "testdata.no_default_sint64", - Tag: "zigzag64,108,opt,name=no_default_sint64,json=noDefaultSint64", - Filename: "test.proto", -} - -var E_NoDefaultFixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 109, - Name: "testdata.no_default_fixed32", - Tag: "fixed32,109,opt,name=no_default_fixed32,json=noDefaultFixed32", - Filename: "test.proto", -} - -var E_NoDefaultFixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 110, - Name: "testdata.no_default_fixed64", - Tag: "fixed64,110,opt,name=no_default_fixed64,json=noDefaultFixed64", - Filename: "test.proto", -} - -var E_NoDefaultSfixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 111, - Name: "testdata.no_default_sfixed32", - Tag: "fixed32,111,opt,name=no_default_sfixed32,json=noDefaultSfixed32", - Filename: "test.proto", -} - -var E_NoDefaultSfixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 112, - Name: "testdata.no_default_sfixed64", - Tag: "fixed64,112,opt,name=no_default_sfixed64,json=noDefaultSfixed64", - Filename: "test.proto", -} - -var E_NoDefaultBool = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 113, - Name: "testdata.no_default_bool", - Tag: "varint,113,opt,name=no_default_bool,json=noDefaultBool", - Filename: "test.proto", -} - -var E_NoDefaultString = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*string)(nil), - Field: 114, - Name: "testdata.no_default_string", - Tag: "bytes,114,opt,name=no_default_string,json=noDefaultString", - Filename: "test.proto", -} - -var E_NoDefaultBytes = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: ([]byte)(nil), - Field: 115, - Name: "testdata.no_default_bytes", - Tag: "bytes,115,opt,name=no_default_bytes,json=noDefaultBytes", - Filename: "test.proto", -} - -var E_NoDefaultEnum = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), - Field: 116, - Name: "testdata.no_default_enum", - Tag: "varint,116,opt,name=no_default_enum,json=noDefaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum", - Filename: "test.proto", -} - -var E_DefaultDouble = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float64)(nil), - Field: 201, - Name: "testdata.default_double", - Tag: "fixed64,201,opt,name=default_double,json=defaultDouble,def=3.1415", - Filename: "test.proto", -} - -var E_DefaultFloat = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float32)(nil), - Field: 202, - Name: "testdata.default_float", - Tag: "fixed32,202,opt,name=default_float,json=defaultFloat,def=3.14", - Filename: "test.proto", -} - -var E_DefaultInt32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 203, - Name: "testdata.default_int32", - Tag: "varint,203,opt,name=default_int32,json=defaultInt32,def=42", - Filename: "test.proto", -} - -var E_DefaultInt64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 204, - Name: "testdata.default_int64", - Tag: "varint,204,opt,name=default_int64,json=defaultInt64,def=43", - Filename: "test.proto", -} - -var E_DefaultUint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 205, - Name: "testdata.default_uint32", - Tag: "varint,205,opt,name=default_uint32,json=defaultUint32,def=44", - Filename: "test.proto", -} - -var E_DefaultUint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 206, - Name: "testdata.default_uint64", - Tag: "varint,206,opt,name=default_uint64,json=defaultUint64,def=45", - Filename: "test.proto", -} - -var E_DefaultSint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 207, - Name: "testdata.default_sint32", - Tag: "zigzag32,207,opt,name=default_sint32,json=defaultSint32,def=46", - Filename: "test.proto", -} - -var E_DefaultSint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 208, - Name: "testdata.default_sint64", - Tag: "zigzag64,208,opt,name=default_sint64,json=defaultSint64,def=47", - Filename: "test.proto", -} - -var E_DefaultFixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 209, - Name: "testdata.default_fixed32", - Tag: "fixed32,209,opt,name=default_fixed32,json=defaultFixed32,def=48", - Filename: "test.proto", -} - -var E_DefaultFixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 210, - Name: "testdata.default_fixed64", - Tag: "fixed64,210,opt,name=default_fixed64,json=defaultFixed64,def=49", - Filename: "test.proto", -} - -var E_DefaultSfixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 211, - Name: "testdata.default_sfixed32", - Tag: "fixed32,211,opt,name=default_sfixed32,json=defaultSfixed32,def=50", - Filename: "test.proto", -} - -var E_DefaultSfixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 212, - Name: "testdata.default_sfixed64", - Tag: "fixed64,212,opt,name=default_sfixed64,json=defaultSfixed64,def=51", - Filename: "test.proto", -} - -var E_DefaultBool = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 213, - Name: "testdata.default_bool", - Tag: "varint,213,opt,name=default_bool,json=defaultBool,def=1", - Filename: "test.proto", -} - -var E_DefaultString = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*string)(nil), - Field: 214, - Name: "testdata.default_string", - Tag: "bytes,214,opt,name=default_string,json=defaultString,def=Hello, string", - Filename: "test.proto", -} - -var E_DefaultBytes = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: ([]byte)(nil), - Field: 215, - Name: "testdata.default_bytes", - Tag: "bytes,215,opt,name=default_bytes,json=defaultBytes,def=Hello, bytes", - Filename: "test.proto", -} - -var E_DefaultEnum = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), - Field: 216, - Name: "testdata.default_enum", - Tag: "varint,216,opt,name=default_enum,json=defaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum,def=1", - Filename: "test.proto", -} - -var E_X201 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 201, - Name: "testdata.x201", - Tag: "bytes,201,opt,name=x201", - Filename: "test.proto", -} - -var E_X202 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 202, - Name: "testdata.x202", - Tag: "bytes,202,opt,name=x202", - Filename: "test.proto", -} - -var E_X203 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 203, - Name: "testdata.x203", - Tag: "bytes,203,opt,name=x203", - Filename: "test.proto", -} - -var E_X204 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 204, - Name: "testdata.x204", - Tag: "bytes,204,opt,name=x204", - Filename: "test.proto", -} - -var E_X205 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 205, - Name: "testdata.x205", - Tag: "bytes,205,opt,name=x205", - Filename: "test.proto", -} - -var E_X206 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 206, - Name: "testdata.x206", - Tag: "bytes,206,opt,name=x206", - Filename: "test.proto", -} - -var E_X207 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 207, - Name: "testdata.x207", - Tag: "bytes,207,opt,name=x207", - Filename: "test.proto", -} - -var E_X208 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 208, - Name: "testdata.x208", - Tag: "bytes,208,opt,name=x208", - Filename: "test.proto", -} - -var E_X209 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 209, - Name: "testdata.x209", - Tag: "bytes,209,opt,name=x209", - Filename: "test.proto", -} - -var E_X210 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 210, - Name: "testdata.x210", - Tag: "bytes,210,opt,name=x210", - Filename: "test.proto", -} - -var E_X211 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 211, - Name: "testdata.x211", - Tag: "bytes,211,opt,name=x211", - Filename: "test.proto", -} - -var E_X212 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 212, - Name: "testdata.x212", - Tag: "bytes,212,opt,name=x212", - Filename: "test.proto", -} - -var E_X213 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 213, - Name: "testdata.x213", - Tag: "bytes,213,opt,name=x213", - Filename: "test.proto", -} - -var E_X214 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 214, - Name: "testdata.x214", - Tag: "bytes,214,opt,name=x214", - Filename: "test.proto", -} - -var E_X215 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 215, - Name: "testdata.x215", - Tag: "bytes,215,opt,name=x215", - Filename: "test.proto", -} - -var E_X216 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 216, - Name: "testdata.x216", - Tag: "bytes,216,opt,name=x216", - Filename: "test.proto", -} - -var E_X217 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 217, - Name: "testdata.x217", - Tag: "bytes,217,opt,name=x217", - Filename: "test.proto", -} - -var E_X218 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 218, - Name: "testdata.x218", - Tag: "bytes,218,opt,name=x218", - Filename: "test.proto", -} - -var E_X219 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 219, - Name: "testdata.x219", - Tag: "bytes,219,opt,name=x219", - Filename: "test.proto", -} - -var E_X220 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 220, - Name: "testdata.x220", - Tag: "bytes,220,opt,name=x220", - Filename: "test.proto", -} - -var E_X221 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 221, - Name: "testdata.x221", - Tag: "bytes,221,opt,name=x221", - Filename: "test.proto", -} - -var E_X222 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 222, - Name: "testdata.x222", - Tag: "bytes,222,opt,name=x222", - Filename: "test.proto", -} - -var E_X223 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 223, - Name: "testdata.x223", - Tag: "bytes,223,opt,name=x223", - Filename: "test.proto", -} - -var E_X224 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 224, - Name: "testdata.x224", - Tag: "bytes,224,opt,name=x224", - Filename: "test.proto", -} - -var E_X225 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 225, - Name: "testdata.x225", - Tag: "bytes,225,opt,name=x225", - Filename: "test.proto", -} - -var E_X226 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 226, - Name: "testdata.x226", - Tag: "bytes,226,opt,name=x226", - Filename: "test.proto", -} - -var E_X227 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 227, - Name: "testdata.x227", - Tag: "bytes,227,opt,name=x227", - Filename: "test.proto", -} - -var E_X228 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 228, - Name: "testdata.x228", - Tag: "bytes,228,opt,name=x228", - Filename: "test.proto", -} - -var E_X229 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 229, - Name: "testdata.x229", - Tag: "bytes,229,opt,name=x229", - Filename: "test.proto", -} - -var E_X230 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 230, - Name: "testdata.x230", - Tag: "bytes,230,opt,name=x230", - Filename: "test.proto", -} - -var E_X231 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 231, - Name: "testdata.x231", - Tag: "bytes,231,opt,name=x231", - Filename: "test.proto", -} - -var E_X232 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 232, - Name: "testdata.x232", - Tag: "bytes,232,opt,name=x232", - Filename: "test.proto", -} - -var E_X233 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 233, - Name: "testdata.x233", - Tag: "bytes,233,opt,name=x233", - Filename: "test.proto", -} - -var E_X234 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 234, - Name: "testdata.x234", - Tag: "bytes,234,opt,name=x234", - Filename: "test.proto", -} - -var E_X235 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 235, - Name: "testdata.x235", - Tag: "bytes,235,opt,name=x235", - Filename: "test.proto", -} - -var E_X236 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 236, - Name: "testdata.x236", - Tag: "bytes,236,opt,name=x236", - Filename: "test.proto", -} - -var E_X237 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 237, - Name: "testdata.x237", - Tag: "bytes,237,opt,name=x237", - Filename: "test.proto", -} - -var E_X238 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 238, - Name: "testdata.x238", - Tag: "bytes,238,opt,name=x238", - Filename: "test.proto", -} - -var E_X239 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 239, - Name: "testdata.x239", - Tag: "bytes,239,opt,name=x239", - Filename: "test.proto", -} - -var E_X240 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 240, - Name: "testdata.x240", - Tag: "bytes,240,opt,name=x240", - Filename: "test.proto", -} - -var E_X241 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 241, - Name: "testdata.x241", - Tag: "bytes,241,opt,name=x241", - Filename: "test.proto", -} - -var E_X242 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 242, - Name: "testdata.x242", - Tag: "bytes,242,opt,name=x242", - Filename: "test.proto", -} - -var E_X243 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 243, - Name: "testdata.x243", - Tag: "bytes,243,opt,name=x243", - Filename: "test.proto", -} - -var E_X244 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 244, - Name: "testdata.x244", - Tag: "bytes,244,opt,name=x244", - Filename: "test.proto", -} - -var E_X245 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 245, - Name: "testdata.x245", - Tag: "bytes,245,opt,name=x245", - Filename: "test.proto", -} - -var E_X246 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 246, - Name: "testdata.x246", - Tag: "bytes,246,opt,name=x246", - Filename: "test.proto", -} - -var E_X247 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 247, - Name: "testdata.x247", - Tag: "bytes,247,opt,name=x247", - Filename: "test.proto", -} - -var E_X248 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 248, - Name: "testdata.x248", - Tag: "bytes,248,opt,name=x248", - Filename: "test.proto", -} - -var E_X249 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 249, - Name: "testdata.x249", - Tag: "bytes,249,opt,name=x249", - Filename: "test.proto", -} - -var E_X250 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 250, - Name: "testdata.x250", - Tag: "bytes,250,opt,name=x250", - Filename: "test.proto", -} - -func init() { - proto.RegisterType((*GoEnum)(nil), "testdata.GoEnum") - proto.RegisterType((*GoTestField)(nil), "testdata.GoTestField") - proto.RegisterType((*GoTest)(nil), "testdata.GoTest") - proto.RegisterType((*GoTest_RequiredGroup)(nil), "testdata.GoTest.RequiredGroup") - proto.RegisterType((*GoTest_RepeatedGroup)(nil), "testdata.GoTest.RepeatedGroup") - proto.RegisterType((*GoTest_OptionalGroup)(nil), "testdata.GoTest.OptionalGroup") - proto.RegisterType((*GoTestRequiredGroupField)(nil), "testdata.GoTestRequiredGroupField") - proto.RegisterType((*GoTestRequiredGroupField_Group)(nil), "testdata.GoTestRequiredGroupField.Group") - proto.RegisterType((*GoSkipTest)(nil), "testdata.GoSkipTest") - proto.RegisterType((*GoSkipTest_SkipGroup)(nil), "testdata.GoSkipTest.SkipGroup") - proto.RegisterType((*NonPackedTest)(nil), "testdata.NonPackedTest") - proto.RegisterType((*PackedTest)(nil), "testdata.PackedTest") - proto.RegisterType((*MaxTag)(nil), "testdata.MaxTag") - proto.RegisterType((*OldMessage)(nil), "testdata.OldMessage") - proto.RegisterType((*OldMessage_Nested)(nil), "testdata.OldMessage.Nested") - proto.RegisterType((*NewMessage)(nil), "testdata.NewMessage") - proto.RegisterType((*NewMessage_Nested)(nil), "testdata.NewMessage.Nested") - proto.RegisterType((*InnerMessage)(nil), "testdata.InnerMessage") - proto.RegisterType((*OtherMessage)(nil), "testdata.OtherMessage") - proto.RegisterType((*RequiredInnerMessage)(nil), "testdata.RequiredInnerMessage") - proto.RegisterType((*MyMessage)(nil), "testdata.MyMessage") - proto.RegisterType((*MyMessage_SomeGroup)(nil), "testdata.MyMessage.SomeGroup") - proto.RegisterType((*Ext)(nil), "testdata.Ext") - proto.RegisterType((*ComplexExtension)(nil), "testdata.ComplexExtension") - proto.RegisterType((*DefaultsMessage)(nil), "testdata.DefaultsMessage") - proto.RegisterType((*MyMessageSet)(nil), "testdata.MyMessageSet") - proto.RegisterType((*Empty)(nil), "testdata.Empty") - proto.RegisterType((*MessageList)(nil), "testdata.MessageList") - proto.RegisterType((*MessageList_Message)(nil), "testdata.MessageList.Message") - proto.RegisterType((*Strings)(nil), "testdata.Strings") - proto.RegisterType((*Defaults)(nil), "testdata.Defaults") - proto.RegisterType((*SubDefaults)(nil), "testdata.SubDefaults") - proto.RegisterType((*RepeatedEnum)(nil), "testdata.RepeatedEnum") - proto.RegisterType((*MoreRepeated)(nil), "testdata.MoreRepeated") - proto.RegisterType((*GroupOld)(nil), "testdata.GroupOld") - proto.RegisterType((*GroupOld_G)(nil), "testdata.GroupOld.G") - proto.RegisterType((*GroupNew)(nil), "testdata.GroupNew") - proto.RegisterType((*GroupNew_G)(nil), "testdata.GroupNew.G") - proto.RegisterType((*FloatingPoint)(nil), "testdata.FloatingPoint") - proto.RegisterType((*MessageWithMap)(nil), "testdata.MessageWithMap") - proto.RegisterType((*Oneof)(nil), "testdata.Oneof") - proto.RegisterType((*Oneof_F_Group)(nil), "testdata.Oneof.F_Group") - proto.RegisterType((*Communique)(nil), "testdata.Communique") - proto.RegisterEnum("testdata.FOO", FOO_name, FOO_value) - proto.RegisterEnum("testdata.GoTest_KIND", GoTest_KIND_name, GoTest_KIND_value) - proto.RegisterEnum("testdata.MyMessage_Color", MyMessage_Color_name, MyMessage_Color_value) - proto.RegisterEnum("testdata.DefaultsMessage_DefaultsEnum", DefaultsMessage_DefaultsEnum_name, DefaultsMessage_DefaultsEnum_value) - proto.RegisterEnum("testdata.Defaults_Color", Defaults_Color_name, Defaults_Color_value) - proto.RegisterEnum("testdata.RepeatedEnum_Color", RepeatedEnum_Color_name, RepeatedEnum_Color_value) - proto.RegisterExtension(E_Ext_More) - proto.RegisterExtension(E_Ext_Text) - proto.RegisterExtension(E_Ext_Number) - proto.RegisterExtension(E_Greeting) - proto.RegisterExtension(E_Complex) - proto.RegisterExtension(E_RComplex) - proto.RegisterExtension(E_NoDefaultDouble) - proto.RegisterExtension(E_NoDefaultFloat) - proto.RegisterExtension(E_NoDefaultInt32) - proto.RegisterExtension(E_NoDefaultInt64) - proto.RegisterExtension(E_NoDefaultUint32) - proto.RegisterExtension(E_NoDefaultUint64) - proto.RegisterExtension(E_NoDefaultSint32) - proto.RegisterExtension(E_NoDefaultSint64) - proto.RegisterExtension(E_NoDefaultFixed32) - proto.RegisterExtension(E_NoDefaultFixed64) - proto.RegisterExtension(E_NoDefaultSfixed32) - proto.RegisterExtension(E_NoDefaultSfixed64) - proto.RegisterExtension(E_NoDefaultBool) - proto.RegisterExtension(E_NoDefaultString) - proto.RegisterExtension(E_NoDefaultBytes) - proto.RegisterExtension(E_NoDefaultEnum) - proto.RegisterExtension(E_DefaultDouble) - proto.RegisterExtension(E_DefaultFloat) - proto.RegisterExtension(E_DefaultInt32) - proto.RegisterExtension(E_DefaultInt64) - proto.RegisterExtension(E_DefaultUint32) - proto.RegisterExtension(E_DefaultUint64) - proto.RegisterExtension(E_DefaultSint32) - proto.RegisterExtension(E_DefaultSint64) - proto.RegisterExtension(E_DefaultFixed32) - proto.RegisterExtension(E_DefaultFixed64) - proto.RegisterExtension(E_DefaultSfixed32) - proto.RegisterExtension(E_DefaultSfixed64) - proto.RegisterExtension(E_DefaultBool) - proto.RegisterExtension(E_DefaultString) - proto.RegisterExtension(E_DefaultBytes) - proto.RegisterExtension(E_DefaultEnum) - proto.RegisterExtension(E_X201) - proto.RegisterExtension(E_X202) - proto.RegisterExtension(E_X203) - proto.RegisterExtension(E_X204) - proto.RegisterExtension(E_X205) - proto.RegisterExtension(E_X206) - proto.RegisterExtension(E_X207) - proto.RegisterExtension(E_X208) - proto.RegisterExtension(E_X209) - proto.RegisterExtension(E_X210) - proto.RegisterExtension(E_X211) - proto.RegisterExtension(E_X212) - proto.RegisterExtension(E_X213) - proto.RegisterExtension(E_X214) - proto.RegisterExtension(E_X215) - proto.RegisterExtension(E_X216) - proto.RegisterExtension(E_X217) - proto.RegisterExtension(E_X218) - proto.RegisterExtension(E_X219) - proto.RegisterExtension(E_X220) - proto.RegisterExtension(E_X221) - proto.RegisterExtension(E_X222) - proto.RegisterExtension(E_X223) - proto.RegisterExtension(E_X224) - proto.RegisterExtension(E_X225) - proto.RegisterExtension(E_X226) - proto.RegisterExtension(E_X227) - proto.RegisterExtension(E_X228) - proto.RegisterExtension(E_X229) - proto.RegisterExtension(E_X230) - proto.RegisterExtension(E_X231) - proto.RegisterExtension(E_X232) - proto.RegisterExtension(E_X233) - proto.RegisterExtension(E_X234) - proto.RegisterExtension(E_X235) - proto.RegisterExtension(E_X236) - proto.RegisterExtension(E_X237) - proto.RegisterExtension(E_X238) - proto.RegisterExtension(E_X239) - proto.RegisterExtension(E_X240) - proto.RegisterExtension(E_X241) - proto.RegisterExtension(E_X242) - proto.RegisterExtension(E_X243) - proto.RegisterExtension(E_X244) - proto.RegisterExtension(E_X245) - proto.RegisterExtension(E_X246) - proto.RegisterExtension(E_X247) - proto.RegisterExtension(E_X248) - proto.RegisterExtension(E_X249) - proto.RegisterExtension(E_X250) -} - -func init() { proto.RegisterFile("test.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 4453 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xc9, 0x77, 0xdb, 0x48, - 0x7a, 0x37, 0xc0, 0xfd, 0x23, 0x25, 0x42, 0x65, 0xb5, 0x9b, 0x96, 0xbc, 0xc0, 0x9c, 0xe9, 0x6e, - 0x7a, 0xd3, 0x48, 0x20, 0x44, 0xdb, 0x74, 0xa7, 0xdf, 0xf3, 0x42, 0xca, 0x7a, 0x63, 0x89, 0x0a, - 0xa4, 0xee, 0x7e, 0xd3, 0x39, 0xf0, 0x51, 0x22, 0x44, 0xb3, 0x4d, 0x02, 0x34, 0x09, 0xc5, 0x52, - 0x72, 0xe9, 0x4b, 0x72, 0xcd, 0x76, 0xc9, 0x35, 0xa7, 0x9c, 0x92, 0xbc, 0x97, 0x7f, 0x22, 0xe9, - 0xee, 0x59, 0x7b, 0xd6, 0xac, 0x93, 0x7d, 0x99, 0xec, 0xdb, 0x4c, 0x92, 0x4b, 0xcf, 0xab, 0xaf, - 0x0a, 0x40, 0x01, 0x24, 0x20, 0xf9, 0x24, 0x56, 0xd5, 0xef, 0xf7, 0xd5, 0xf6, 0xab, 0xef, 0xab, - 0xaf, 0x20, 0x00, 0xc7, 0x9c, 0x38, 0x2b, 0xa3, 0xb1, 0xed, 0xd8, 0x24, 0x4b, 0x7f, 0x77, 0x3b, - 0x4e, 0xa7, 0x7c, 0x1d, 0xd2, 0x1b, 0x76, 0xc3, 0x3a, 0x1a, 0x92, 0xab, 0x90, 0x38, 0xb4, 0xed, - 0x92, 0xa4, 0xca, 0x95, 0x79, 0x6d, 0x6e, 0xc5, 0x45, 0xac, 0x34, 0x5b, 0x2d, 0x83, 0xb6, 0x94, - 0xef, 0x40, 0x7e, 0xc3, 0xde, 0x33, 0x27, 0x4e, 0xb3, 0x6f, 0x0e, 0xba, 0x64, 0x11, 0x52, 0x4f, - 0x3b, 0xfb, 0xe6, 0x00, 0x19, 0x39, 0x83, 0x15, 0x08, 0x81, 0xe4, 0xde, 0xc9, 0xc8, 0x2c, 0xc9, - 0x58, 0x89, 0xbf, 0xcb, 0xbf, 0x72, 0x85, 0x76, 0x42, 0x99, 0xe4, 0x3a, 0x24, 0xbf, 0xdc, 0xb7, - 0xba, 0xbc, 0x97, 0xd7, 0xfc, 0x5e, 0x58, 0xfb, 0xca, 0x97, 0x37, 0xb7, 0x1f, 0x1b, 0x08, 0xa1, - 0xf6, 0xf7, 0x3a, 0xfb, 0x03, 0x6a, 0x4a, 0xa2, 0xf6, 0xb1, 0x40, 0x6b, 0x77, 0x3a, 0xe3, 0xce, - 0xb0, 0x94, 0x50, 0xa5, 0x4a, 0xca, 0x60, 0x05, 0x72, 0x1f, 0xe6, 0x0c, 0xf3, 0xc5, 0x51, 0x7f, - 0x6c, 0x76, 0x71, 0x70, 0xa5, 0xa4, 0x2a, 0x57, 0xf2, 0xd3, 0xf6, 0xb1, 0xd1, 0x08, 0x62, 0x19, - 0x79, 0x64, 0x76, 0x1c, 0x97, 0x9c, 0x52, 0x13, 0xb1, 0x64, 0x01, 0x4b, 0xc9, 0xad, 0x91, 0xd3, - 0xb7, 0xad, 0xce, 0x80, 0x91, 0xd3, 0xaa, 0x14, 0x43, 0x0e, 0x60, 0xc9, 0x9b, 0x50, 0x6c, 0xb6, - 0x1f, 0xda, 0xf6, 0xa0, 0x3d, 0xe6, 0x23, 0x2a, 0x81, 0x2a, 0x57, 0xb2, 0xc6, 0x5c, 0x93, 0xd6, - 0xba, 0xc3, 0x24, 0x15, 0x50, 0x9a, 0xed, 0x4d, 0xcb, 0xa9, 0x6a, 0x3e, 0x30, 0xaf, 0xca, 0x95, - 0x94, 0x31, 0xdf, 0xc4, 0xea, 0x29, 0x64, 0x4d, 0xf7, 0x91, 0x05, 0x55, 0xae, 0x24, 0x18, 0xb2, - 0xa6, 0x7b, 0xc8, 0x5b, 0x40, 0x9a, 0xed, 0x66, 0xff, 0xd8, 0xec, 0x8a, 0x56, 0xe7, 0x54, 0xb9, - 0x92, 0x31, 0x94, 0x26, 0x6f, 0x98, 0x81, 0x16, 0x2d, 0xcf, 0xab, 0x72, 0x25, 0xed, 0xa2, 0x05, - 0xdb, 0x37, 0x60, 0xa1, 0xd9, 0x7e, 0xb7, 0x1f, 0x1c, 0x70, 0x51, 0x95, 0x2b, 0x73, 0x46, 0xb1, - 0xc9, 0xea, 0xa7, 0xb1, 0xa2, 0x61, 0x45, 0x95, 0x2b, 0x49, 0x8e, 0x15, 0xec, 0xe2, 0xec, 0x9a, - 0x03, 0xbb, 0xe3, 0xf8, 0xd0, 0x05, 0x55, 0xae, 0xc8, 0xc6, 0x7c, 0x13, 0xab, 0x83, 0x56, 0x1f, - 0xdb, 0x47, 0xfb, 0x03, 0xd3, 0x87, 0x12, 0x55, 0xae, 0x48, 0x46, 0xb1, 0xc9, 0xea, 0x83, 0xd8, - 0x5d, 0x67, 0xdc, 0xb7, 0x7a, 0x3e, 0xf6, 0x3c, 0xea, 0xb7, 0xd8, 0x64, 0xf5, 0xc1, 0x11, 0x3c, - 0x3c, 0x71, 0xcc, 0x89, 0x0f, 0x35, 0x55, 0xb9, 0x52, 0x30, 0xe6, 0x9b, 0x58, 0x1d, 0xb2, 0x1a, - 0x5a, 0x83, 0x43, 0x55, 0xae, 0x2c, 0x50, 0xab, 0x33, 0xd6, 0x60, 0x37, 0xb4, 0x06, 0x3d, 0x55, - 0xae, 0x10, 0x8e, 0x15, 0xd6, 0x40, 0xd4, 0x0c, 0x13, 0x62, 0x69, 0x51, 0x4d, 0x08, 0x9a, 0x61, - 0x95, 0x41, 0xcd, 0x70, 0xe0, 0x6b, 0x6a, 0x42, 0xd4, 0x4c, 0x08, 0x89, 0x9d, 0x73, 0xe4, 0x05, - 0x35, 0x21, 0x6a, 0x86, 0x23, 0x43, 0x9a, 0xe1, 0xd8, 0xd7, 0xd5, 0x44, 0x50, 0x33, 0x53, 0x68, - 0xd1, 0x72, 0x49, 0x4d, 0x04, 0x35, 0xc3, 0xd1, 0x41, 0xcd, 0x70, 0xf0, 0x45, 0x35, 0x11, 0xd0, - 0x4c, 0x18, 0x2b, 0x1a, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0xb3, 0x73, 0x35, 0xc3, 0xa1, 0xcb, - 0x6a, 0x42, 0xd4, 0x8c, 0x68, 0xd5, 0xd3, 0x0c, 0x87, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0x58, - 0x4f, 0x33, 0x1c, 0x7b, 0x59, 0x4d, 0x04, 0x34, 0xc3, 0xb1, 0xd7, 0x45, 0xcd, 0x70, 0xe8, 0xc7, - 0x92, 0x9a, 0x10, 0x45, 0xc3, 0xa1, 0x37, 0x03, 0xa2, 0xe1, 0xd8, 0x4f, 0x28, 0x56, 0x54, 0x4d, - 0x18, 0x2c, 0xae, 0xc2, 0xa7, 0x14, 0x2c, 0xca, 0x86, 0x83, 0x7d, 0xd9, 0xd8, 0xdc, 0x05, 0x95, - 0xae, 0xa8, 0x92, 0x27, 0x1b, 0xd7, 0x2f, 0x89, 0xb2, 0xf1, 0x80, 0x57, 0xd1, 0xd5, 0x72, 0xd9, - 0x4c, 0x21, 0x6b, 0xba, 0x8f, 0x54, 0x55, 0xc9, 0x97, 0x8d, 0x87, 0x0c, 0xc8, 0xc6, 0xc3, 0x5e, - 0x53, 0x25, 0x51, 0x36, 0x33, 0xd0, 0xa2, 0xe5, 0xb2, 0x2a, 0x89, 0xb2, 0xf1, 0xd0, 0xa2, 0x6c, - 0x3c, 0xf0, 0x17, 0x54, 0x49, 0x90, 0xcd, 0x34, 0x56, 0x34, 0xfc, 0x45, 0x55, 0x12, 0x64, 0x13, - 0x9c, 0x1d, 0x93, 0x8d, 0x07, 0x7d, 0x43, 0x95, 0x7c, 0xd9, 0x04, 0xad, 0x72, 0xd9, 0x78, 0xd0, - 0x37, 0x55, 0x49, 0x90, 0x4d, 0x10, 0xcb, 0x65, 0xe3, 0x61, 0xdf, 0xc2, 0xf8, 0xe6, 0xca, 0xc6, - 0xc3, 0x0a, 0xb2, 0xf1, 0xa0, 0xbf, 0x43, 0x63, 0xa1, 0x27, 0x1b, 0x0f, 0x2a, 0xca, 0xc6, 0xc3, - 0xfe, 0x2e, 0xc5, 0xfa, 0xb2, 0x99, 0x06, 0x8b, 0xab, 0xf0, 0x7b, 0x14, 0xec, 0xcb, 0xc6, 0x03, - 0xaf, 0xe0, 0x20, 0xa8, 0x6c, 0xba, 0xe6, 0x61, 0xe7, 0x68, 0x40, 0x25, 0x56, 0xa1, 0xba, 0xa9, - 0x27, 0x9d, 0xf1, 0x91, 0x49, 0x47, 0x62, 0xdb, 0x83, 0xc7, 0x6e, 0x1b, 0x59, 0xa1, 0xc6, 0x99, - 0x7c, 0x7c, 0xc2, 0x75, 0xaa, 0x9f, 0xba, 0x5c, 0xd5, 0x8c, 0x22, 0xd3, 0xd0, 0x34, 0xbe, 0xa6, - 0x0b, 0xf8, 0x1b, 0x54, 0x45, 0x75, 0xb9, 0xa6, 0x33, 0x7c, 0x4d, 0xf7, 0xf1, 0x55, 0x38, 0xef, - 0x4b, 0xc9, 0x67, 0xdc, 0xa4, 0x5a, 0xaa, 0x27, 0xaa, 0xda, 0xaa, 0xb1, 0xe0, 0x0a, 0x6a, 0x16, - 0x29, 0xd0, 0xcd, 0x2d, 0x2a, 0xa9, 0x7a, 0xa2, 0xa6, 0x7b, 0x24, 0xb1, 0x27, 0x8d, 0xca, 0x90, - 0x0b, 0xcb, 0xe7, 0xdc, 0xa6, 0xca, 0xaa, 0x27, 0xab, 0xda, 0xea, 0xaa, 0xa1, 0x70, 0x7d, 0xcd, - 0xe0, 0x04, 0xfa, 0x59, 0xa1, 0x0a, 0xab, 0x27, 0x6b, 0xba, 0xc7, 0x09, 0xf6, 0xb3, 0xe0, 0x0a, - 0xcd, 0xa7, 0x7c, 0x89, 0x2a, 0xad, 0x9e, 0xae, 0xae, 0xe9, 0x6b, 0xeb, 0xf7, 0x8c, 0x22, 0x53, - 0x9c, 0xcf, 0xd1, 0x69, 0x3f, 0x5c, 0x72, 0x3e, 0x69, 0x95, 0x6a, 0xae, 0x9e, 0xd6, 0xee, 0xac, - 0xdd, 0xd5, 0xee, 0x1a, 0x0a, 0xd7, 0x9e, 0xcf, 0x7a, 0x87, 0xb2, 0xb8, 0xf8, 0x7c, 0xd6, 0x1a, - 0x55, 0x5f, 0x5d, 0x79, 0x66, 0x0e, 0x06, 0xf6, 0x2d, 0xb5, 0xfc, 0xd2, 0x1e, 0x0f, 0xba, 0xd7, - 0xca, 0x60, 0x28, 0x5c, 0x8f, 0x62, 0xaf, 0x0b, 0xae, 0x20, 0x7d, 0xfa, 0xaf, 0xd1, 0x7b, 0x58, - 0xa1, 0x9e, 0x79, 0xd8, 0xef, 0x59, 0xf6, 0xc4, 0x34, 0x8a, 0x4c, 0x9a, 0xa1, 0x35, 0xd9, 0x0d, - 0xaf, 0xe3, 0xaf, 0x53, 0xda, 0x42, 0x3d, 0x71, 0xbb, 0xaa, 0xd1, 0x9e, 0x66, 0xad, 0xe3, 0x6e, - 0x78, 0x1d, 0x7f, 0x83, 0x72, 0x48, 0x3d, 0x71, 0xbb, 0xa6, 0x73, 0x8e, 0xb8, 0x8e, 0x77, 0xe0, - 0x42, 0x28, 0x2e, 0xb6, 0x47, 0x9d, 0x83, 0xe7, 0x66, 0xb7, 0xa4, 0xd1, 0xf0, 0xf8, 0x50, 0x56, - 0x24, 0xe3, 0x7c, 0x20, 0x44, 0xee, 0x60, 0x33, 0xb9, 0x07, 0xaf, 0x87, 0x03, 0xa5, 0xcb, 0xac, - 0xd2, 0x78, 0x89, 0xcc, 0xc5, 0x60, 0xcc, 0x0c, 0x51, 0x05, 0x07, 0xec, 0x52, 0x75, 0x1a, 0x40, - 0x7d, 0xaa, 0xef, 0x89, 0x39, 0xf5, 0x67, 0xe0, 0xe2, 0x74, 0x28, 0x75, 0xc9, 0xeb, 0x34, 0xa2, - 0x22, 0xf9, 0x42, 0x38, 0xaa, 0x4e, 0xd1, 0x67, 0xf4, 0x5d, 0xa3, 0x21, 0x56, 0xa4, 0x4f, 0xf5, - 0x7e, 0x1f, 0x4a, 0x53, 0xc1, 0xd6, 0x65, 0xdf, 0xa1, 0x31, 0x17, 0xd9, 0xaf, 0x85, 0xe2, 0x6e, - 0x98, 0x3c, 0xa3, 0xeb, 0xbb, 0x34, 0x08, 0x0b, 0xe4, 0xa9, 0x9e, 0x71, 0xc9, 0x82, 0xe1, 0xd8, - 0xe5, 0xde, 0xa3, 0x51, 0x99, 0x2f, 0x59, 0x20, 0x32, 0x8b, 0xfd, 0x86, 0xe2, 0xb3, 0xcb, 0xad, - 0xd3, 0x30, 0xcd, 0xfb, 0x0d, 0x86, 0x6a, 0x4e, 0x7e, 0x9b, 0x92, 0x77, 0x67, 0xcf, 0xf8, 0xc7, - 0x09, 0x1a, 0x60, 0x39, 0x7b, 0x77, 0xd6, 0x94, 0x3d, 0xf6, 0x8c, 0x29, 0xff, 0x84, 0xb2, 0x89, - 0xc0, 0x9e, 0x9a, 0xf3, 0x63, 0x98, 0x73, 0x6f, 0x75, 0xbd, 0xb1, 0x7d, 0x34, 0x2a, 0x35, 0x55, - 0xb9, 0x02, 0xda, 0x95, 0xa9, 0xec, 0xc7, 0xbd, 0xe4, 0x6d, 0x50, 0x94, 0x11, 0x24, 0x31, 0x2b, - 0xcc, 0x2e, 0xb3, 0xb2, 0xa3, 0x26, 0x22, 0xac, 0x30, 0x94, 0x67, 0x45, 0x20, 0x51, 0x2b, 0xae, - 0xd3, 0x67, 0x56, 0x3e, 0x50, 0xa5, 0x99, 0x56, 0xdc, 0x10, 0xc0, 0xad, 0x04, 0x48, 0x4b, 0xeb, - 0x7e, 0xbe, 0x85, 0xed, 0xe4, 0x8b, 0xe1, 0x04, 0x6c, 0x03, 0xef, 0xcf, 0xc1, 0x4a, 0x46, 0x13, - 0x06, 0x37, 0x4d, 0xfb, 0xd9, 0x08, 0x5a, 0x60, 0x34, 0xd3, 0xb4, 0x9f, 0x9b, 0x41, 0x2b, 0xff, - 0xa6, 0x04, 0x49, 0x9a, 0x4f, 0x92, 0x2c, 0x24, 0xdf, 0x6b, 0x6d, 0x3e, 0x56, 0xce, 0xd1, 0x5f, - 0x0f, 0x5b, 0xad, 0xa7, 0x8a, 0x44, 0x72, 0x90, 0x7a, 0xf8, 0x95, 0xbd, 0xc6, 0xae, 0x22, 0x93, - 0x22, 0xe4, 0x9b, 0x9b, 0xdb, 0x1b, 0x0d, 0x63, 0xc7, 0xd8, 0xdc, 0xde, 0x53, 0x12, 0xb4, 0xad, - 0xf9, 0xb4, 0xf5, 0x60, 0x4f, 0x49, 0x92, 0x0c, 0x24, 0x68, 0x5d, 0x8a, 0x00, 0xa4, 0x77, 0xf7, - 0x8c, 0xcd, 0xed, 0x0d, 0x25, 0x4d, 0xad, 0xec, 0x6d, 0x6e, 0x35, 0x94, 0x0c, 0x45, 0xee, 0xbd, - 0xbb, 0xf3, 0xb4, 0xa1, 0x64, 0xe9, 0xcf, 0x07, 0x86, 0xf1, 0xe0, 0x2b, 0x4a, 0x8e, 0x92, 0xb6, - 0x1e, 0xec, 0x28, 0x80, 0xcd, 0x0f, 0x1e, 0x3e, 0x6d, 0x28, 0x79, 0x52, 0x80, 0x6c, 0xf3, 0xdd, - 0xed, 0x47, 0x7b, 0x9b, 0xad, 0x6d, 0xa5, 0x50, 0x3e, 0x81, 0x12, 0x5b, 0xe6, 0xc0, 0x2a, 0xb2, - 0xa4, 0xf0, 0x1d, 0x48, 0xb1, 0x9d, 0x91, 0x50, 0x25, 0x95, 0xf0, 0xce, 0x4c, 0x53, 0x56, 0xd8, - 0x1e, 0x31, 0xda, 0xd2, 0x65, 0x48, 0xb1, 0x55, 0x5a, 0x84, 0x14, 0x5b, 0x1d, 0x19, 0x53, 0x45, - 0x56, 0x28, 0xff, 0x96, 0x0c, 0xb0, 0x61, 0xef, 0x3e, 0xef, 0x8f, 0x30, 0x21, 0xbf, 0x0c, 0x30, - 0x79, 0xde, 0x1f, 0xb5, 0x51, 0xf5, 0x3c, 0xa9, 0xcc, 0xd1, 0x1a, 0xf4, 0x77, 0xe4, 0x1a, 0x14, - 0xb0, 0xf9, 0x90, 0x79, 0x21, 0xcc, 0x25, 0x33, 0x46, 0x9e, 0xd6, 0x71, 0xc7, 0x14, 0x84, 0xd4, - 0x74, 0x4c, 0x21, 0xd3, 0x02, 0xa4, 0xa6, 0x93, 0xab, 0x80, 0xc5, 0xf6, 0x04, 0x23, 0x0a, 0xa6, - 0x8d, 0x39, 0x03, 0xfb, 0x65, 0x31, 0x86, 0xbc, 0x0d, 0xd8, 0x27, 0x9b, 0x77, 0x71, 0xfa, 0x74, - 0xb8, 0xc3, 0x5d, 0xa1, 0x3f, 0xd8, 0x6c, 0x7d, 0xc2, 0x52, 0x0b, 0x72, 0x5e, 0x3d, 0xed, 0x0b, - 0x6b, 0xf9, 0x8c, 0x14, 0x9c, 0x11, 0x60, 0x95, 0x37, 0x25, 0x06, 0xe0, 0xa3, 0x59, 0xc0, 0xd1, - 0x30, 0x12, 0x1b, 0x4e, 0xf9, 0x32, 0xcc, 0x6d, 0xdb, 0x16, 0x3b, 0xbd, 0xb8, 0x4a, 0x05, 0x90, - 0x3a, 0x25, 0x09, 0xb3, 0x27, 0xa9, 0x53, 0xbe, 0x02, 0x20, 0xb4, 0x29, 0x20, 0xed, 0xb3, 0x36, - 0xf4, 0x01, 0xd2, 0x7e, 0xf9, 0x26, 0xa4, 0xb7, 0x3a, 0xc7, 0x7b, 0x9d, 0x1e, 0xb9, 0x06, 0x30, - 0xe8, 0x4c, 0x9c, 0xf6, 0x21, 0xee, 0xc3, 0xe7, 0x9f, 0x7f, 0xfe, 0xb9, 0x84, 0x97, 0xbd, 0x1c, - 0xad, 0x65, 0xfb, 0xf1, 0x02, 0xa0, 0x35, 0xe8, 0x6e, 0x99, 0x93, 0x49, 0xa7, 0x67, 0x92, 0x2a, - 0xa4, 0x2d, 0x73, 0x42, 0xa3, 0x9d, 0x84, 0xef, 0x08, 0xcb, 0xfe, 0x2a, 0xf8, 0xa8, 0x95, 0x6d, - 0x84, 0x18, 0x1c, 0x4a, 0x14, 0x48, 0x58, 0x47, 0x43, 0x7c, 0x27, 0x49, 0x19, 0xf4, 0xe7, 0xd2, - 0x25, 0x48, 0x33, 0x0c, 0x21, 0x90, 0xb4, 0x3a, 0x43, 0xb3, 0xc4, 0xfa, 0xc5, 0xdf, 0xe5, 0x5f, - 0x95, 0x00, 0xb6, 0xcd, 0x97, 0x67, 0xe8, 0xd3, 0x47, 0xc5, 0xf4, 0x99, 0x60, 0x7d, 0xde, 0x8f, - 0xeb, 0x93, 0xea, 0xec, 0xd0, 0xb6, 0xbb, 0x6d, 0xb6, 0xc5, 0xec, 0x49, 0x27, 0x47, 0x6b, 0x70, - 0xd7, 0xca, 0x1f, 0x40, 0x61, 0xd3, 0xb2, 0xcc, 0xb1, 0x3b, 0x26, 0x02, 0xc9, 0x67, 0xf6, 0xc4, - 0xe1, 0x6f, 0x4b, 0xf8, 0x9b, 0x94, 0x20, 0x39, 0xb2, 0xc7, 0x0e, 0x9b, 0x67, 0x3d, 0xa9, 0xaf, - 0xae, 0xae, 0x1a, 0x58, 0x43, 0x2e, 0x41, 0xee, 0xc0, 0xb6, 0x2c, 0xf3, 0x80, 0x4e, 0x22, 0x81, - 0x69, 0x8d, 0x5f, 0x51, 0xfe, 0x65, 0x09, 0x0a, 0x2d, 0xe7, 0x99, 0x6f, 0x5c, 0x81, 0xc4, 0x73, - 0xf3, 0x04, 0x87, 0x97, 0x30, 0xe8, 0x4f, 0x7a, 0x54, 0x7e, 0xbe, 0x33, 0x38, 0x62, 0x6f, 0x4d, - 0x05, 0x83, 0x15, 0xc8, 0x05, 0x48, 0xbf, 0x34, 0xfb, 0xbd, 0x67, 0x0e, 0xda, 0x94, 0x0d, 0x5e, - 0x22, 0xb7, 0x20, 0xd5, 0xa7, 0x83, 0x2d, 0x25, 0x71, 0xbd, 0x2e, 0xf8, 0xeb, 0x25, 0xce, 0xc1, - 0x60, 0xa0, 0x1b, 0xd9, 0x6c, 0x57, 0xf9, 0xe8, 0xa3, 0x8f, 0x3e, 0x92, 0xcb, 0x87, 0xb0, 0xe8, - 0x1e, 0xde, 0xc0, 0x64, 0xb7, 0xa1, 0x34, 0x30, 0xed, 0xf6, 0x61, 0xdf, 0xea, 0x0c, 0x06, 0x27, - 0xed, 0x97, 0xb6, 0xd5, 0xee, 0x58, 0x6d, 0x7b, 0x72, 0xd0, 0x19, 0xe3, 0x02, 0x44, 0x77, 0xb1, - 0x38, 0x30, 0xed, 0x26, 0xa3, 0xbd, 0x6f, 0x5b, 0x0f, 0xac, 0x16, 0xe5, 0x94, 0xff, 0x20, 0x09, - 0xb9, 0xad, 0x13, 0xd7, 0xfa, 0x22, 0xa4, 0x0e, 0xec, 0x23, 0x8b, 0xad, 0x65, 0xca, 0x60, 0x05, - 0x6f, 0x8f, 0x64, 0x61, 0x8f, 0x16, 0x21, 0xf5, 0xe2, 0xc8, 0x76, 0x4c, 0x9c, 0x6e, 0xce, 0x60, - 0x05, 0xba, 0x5a, 0x23, 0xd3, 0x29, 0x25, 0x31, 0xb9, 0xa5, 0x3f, 0xfd, 0xf9, 0xa7, 0xce, 0x30, - 0x7f, 0xb2, 0x02, 0x69, 0x9b, 0xae, 0xfe, 0xa4, 0x94, 0xc6, 0x77, 0x35, 0x01, 0x2e, 0xee, 0x8a, - 0xc1, 0x51, 0x64, 0x13, 0x16, 0x5e, 0x9a, 0xed, 0xe1, 0xd1, 0xc4, 0x69, 0xf7, 0xec, 0x76, 0xd7, - 0x34, 0x47, 0xe6, 0xb8, 0x34, 0x87, 0x3d, 0x09, 0x3e, 0x61, 0xd6, 0x42, 0x1a, 0xf3, 0x2f, 0xcd, - 0xad, 0xa3, 0x89, 0xb3, 0x61, 0x3f, 0x46, 0x16, 0xa9, 0x42, 0x6e, 0x6c, 0x52, 0x4f, 0x40, 0x07, - 0x5b, 0x08, 0xf7, 0x1e, 0xa0, 0x66, 0xc7, 0xe6, 0x08, 0x2b, 0xc8, 0x3a, 0x64, 0xf7, 0xfb, 0xcf, - 0xcd, 0xc9, 0x33, 0xb3, 0x5b, 0xca, 0xa8, 0x52, 0x65, 0x5e, 0xbb, 0xe8, 0x73, 0xbc, 0x65, 0x5d, - 0x79, 0x64, 0x0f, 0xec, 0xb1, 0xe1, 0x41, 0xc9, 0x7d, 0xc8, 0x4d, 0xec, 0xa1, 0xc9, 0xf4, 0x9d, - 0xc5, 0xa0, 0x7a, 0x79, 0x16, 0x6f, 0xd7, 0x1e, 0x9a, 0xae, 0x07, 0x73, 0xf1, 0x64, 0x99, 0x0d, - 0x74, 0x9f, 0x5e, 0x9d, 0x4b, 0x80, 0x4f, 0x03, 0x74, 0x40, 0x78, 0x95, 0x26, 0x4b, 0x74, 0x40, - 0xbd, 0x43, 0x7a, 0x23, 0x2a, 0xe5, 0x31, 0xaf, 0xf4, 0xca, 0x4b, 0xb7, 0x20, 0xe7, 0x19, 0xf4, - 0x5d, 0x1f, 0x73, 0x37, 0x39, 0xf4, 0x07, 0xcc, 0xf5, 0x31, 0x5f, 0xf3, 0x06, 0xa4, 0x70, 0xd8, - 0x34, 0x42, 0x19, 0x0d, 0x1a, 0x10, 0x73, 0x90, 0xda, 0x30, 0x1a, 0x8d, 0x6d, 0x45, 0xc2, 0xd8, - 0xf8, 0xf4, 0xdd, 0x86, 0x22, 0x0b, 0x8a, 0xfd, 0x6d, 0x09, 0x12, 0x8d, 0x63, 0x54, 0x0b, 0x9d, - 0x86, 0x7b, 0xa2, 0xe9, 0x6f, 0xad, 0x06, 0xc9, 0xa1, 0x3d, 0x36, 0xc9, 0xf9, 0x19, 0xb3, 0x2c, - 0xf5, 0x70, 0xbf, 0x84, 0x57, 0xe4, 0xc6, 0xb1, 0x63, 0x20, 0x5e, 0x7b, 0x0b, 0x92, 0x8e, 0x79, - 0xec, 0xcc, 0xe6, 0x3d, 0x63, 0x1d, 0x50, 0x80, 0x76, 0x13, 0xd2, 0xd6, 0xd1, 0x70, 0xdf, 0x1c, - 0xcf, 0x86, 0xf6, 0x71, 0x7a, 0x1c, 0x52, 0x7e, 0x0f, 0x94, 0x47, 0xf6, 0x70, 0x34, 0x30, 0x8f, - 0x1b, 0xc7, 0x8e, 0x69, 0x4d, 0xfa, 0xb6, 0x45, 0xf5, 0x7c, 0xd8, 0x1f, 0xa3, 0x17, 0xc1, 0xb7, - 0x62, 0x2c, 0xd0, 0x53, 0x3d, 0x31, 0x0f, 0x6c, 0xab, 0xcb, 0x1d, 0x26, 0x2f, 0x51, 0xb4, 0xf3, - 0xac, 0x3f, 0xa6, 0x0e, 0x84, 0xfa, 0x79, 0x56, 0x28, 0x6f, 0x40, 0x91, 0xe7, 0x18, 0x13, 0xde, - 0x71, 0xf9, 0x06, 0x14, 0xdc, 0x2a, 0x7c, 0x38, 0xcf, 0x42, 0xf2, 0x83, 0x86, 0xd1, 0x52, 0xce, - 0xd1, 0x65, 0x6d, 0x6d, 0x37, 0x14, 0x89, 0xfe, 0xd8, 0x7b, 0xbf, 0x15, 0x58, 0xca, 0x4b, 0x50, - 0xf0, 0xc6, 0xbe, 0x6b, 0x3a, 0xd8, 0x42, 0x03, 0x42, 0xa6, 0x2e, 0x67, 0xa5, 0x72, 0x06, 0x52, - 0x8d, 0xe1, 0xc8, 0x39, 0x29, 0xff, 0x22, 0xe4, 0x39, 0xe8, 0x69, 0x7f, 0xe2, 0x90, 0x3b, 0x90, - 0x19, 0xf2, 0xf9, 0x4a, 0x78, 0xdd, 0x13, 0x35, 0xe5, 0xe3, 0xdc, 0xdf, 0x86, 0x8b, 0x5e, 0xaa, - 0x42, 0x46, 0xf0, 0xa5, 0xfc, 0xa8, 0xcb, 0xe2, 0x51, 0x67, 0x4e, 0x21, 0x21, 0x38, 0x85, 0xf2, - 0x16, 0x64, 0x58, 0x04, 0x9c, 0x60, 0x54, 0x67, 0xa9, 0x22, 0x13, 0x13, 0xdb, 0xf9, 0x3c, 0xab, - 0x63, 0x17, 0x95, 0xab, 0x90, 0x47, 0xc1, 0x72, 0x04, 0x73, 0x9d, 0x80, 0x55, 0x4c, 0x6e, 0xbf, - 0x9f, 0x82, 0xac, 0xbb, 0x52, 0x64, 0x19, 0xd2, 0x2c, 0x3f, 0x43, 0x53, 0xee, 0xfb, 0x41, 0x0a, - 0x33, 0x32, 0xb2, 0x0c, 0x19, 0x9e, 0x83, 0x71, 0xef, 0x2e, 0x57, 0x35, 0x23, 0xcd, 0x72, 0x2e, - 0xaf, 0xb1, 0xa6, 0xa3, 0x63, 0x62, 0x2f, 0x03, 0x69, 0x96, 0x55, 0x11, 0x15, 0x72, 0x5e, 0x1e, - 0x85, 0xfe, 0x98, 0x3f, 0x03, 0x64, 0xdd, 0xc4, 0x49, 0x40, 0xd4, 0x74, 0xf4, 0x58, 0x3c, 0xe7, - 0xcf, 0x36, 0xfd, 0xeb, 0x49, 0xd6, 0xcd, 0x86, 0xf0, 0xf9, 0xde, 0x4d, 0xf0, 0x33, 0x3c, 0xff, - 0xf1, 0x01, 0x35, 0x1d, 0x5d, 0x82, 0x9b, 0xcd, 0x67, 0x78, 0x8e, 0x43, 0xae, 0xd2, 0x21, 0x62, - 0xce, 0x82, 0x47, 0xdf, 0x4f, 0xdd, 0xd3, 0x2c, 0x93, 0x21, 0xd7, 0xa8, 0x05, 0x96, 0x98, 0xe0, - 0xb9, 0xf4, 0xf3, 0xf4, 0x0c, 0xcf, 0x57, 0xc8, 0x4d, 0x0a, 0x61, 0xcb, 0x5f, 0x82, 0x88, 0xa4, - 0x3c, 0xc3, 0x93, 0x72, 0xa2, 0xd2, 0x0e, 0xd1, 0x3d, 0xa0, 0x4b, 0x10, 0x12, 0xf0, 0x34, 0x4b, - 0xc0, 0xc9, 0x15, 0x34, 0xc7, 0x26, 0x55, 0xf0, 0x93, 0xed, 0x0c, 0x4f, 0x70, 0xfc, 0x76, 0xbc, - 0xb2, 0x79, 0x89, 0x75, 0x86, 0xa7, 0x30, 0xa4, 0x46, 0xf7, 0x8b, 0xea, 0xbb, 0x34, 0x8f, 0x4e, - 0xb0, 0xe4, 0x0b, 0xcf, 0xdd, 0x53, 0xe6, 0x03, 0xeb, 0xcc, 0x83, 0x18, 0xa9, 0x26, 0x9e, 0x86, - 0x25, 0xca, 0xdb, 0xe9, 0x5b, 0x87, 0xa5, 0x22, 0xae, 0x44, 0xa2, 0x6f, 0x1d, 0x1a, 0xa9, 0x26, - 0xad, 0x61, 0x1a, 0xd8, 0xa6, 0x6d, 0x0a, 0xb6, 0x25, 0x6f, 0xb3, 0x46, 0x5a, 0x45, 0x4a, 0x90, - 0x6a, 0xb6, 0xb7, 0x3b, 0x56, 0x69, 0x81, 0xf1, 0xac, 0x8e, 0x65, 0x24, 0x9b, 0xdb, 0x1d, 0x8b, - 0xbc, 0x05, 0x89, 0xc9, 0xd1, 0x7e, 0x89, 0x84, 0xbf, 0xac, 0xec, 0x1e, 0xed, 0xbb, 0x43, 0x31, - 0x28, 0x82, 0x2c, 0x43, 0x76, 0xe2, 0x8c, 0xdb, 0xbf, 0x60, 0x8e, 0xed, 0xd2, 0x79, 0x5c, 0xc2, - 0x73, 0x46, 0x66, 0xe2, 0x8c, 0x3f, 0x30, 0xc7, 0xf6, 0x19, 0x9d, 0x5f, 0xf9, 0x0a, 0xe4, 0x05, - 0xbb, 0xa4, 0x08, 0x92, 0xc5, 0x6e, 0x0a, 0x75, 0xe9, 0x8e, 0x21, 0x59, 0xe5, 0x3d, 0x28, 0xb8, - 0x39, 0x0c, 0xce, 0x57, 0xa3, 0x27, 0x69, 0x60, 0x8f, 0xf1, 0x7c, 0xce, 0x6b, 0x97, 0xc4, 0x10, - 0xe5, 0xc3, 0x78, 0xb8, 0x60, 0xd0, 0xb2, 0x12, 0x1a, 0x8a, 0x54, 0xfe, 0xa1, 0x04, 0x85, 0x2d, - 0x7b, 0xec, 0x3f, 0x30, 0x2f, 0x42, 0x6a, 0xdf, 0xb6, 0x07, 0x13, 0x34, 0x9b, 0x35, 0x58, 0x81, - 0xbc, 0x01, 0x05, 0xfc, 0xe1, 0xe6, 0x9e, 0xb2, 0xf7, 0xb4, 0x91, 0xc7, 0x7a, 0x9e, 0x70, 0x12, - 0x48, 0xf6, 0x2d, 0x67, 0xc2, 0x3d, 0x19, 0xfe, 0x26, 0x5f, 0x80, 0x3c, 0xfd, 0xeb, 0x32, 0x93, - 0xde, 0x85, 0x15, 0x68, 0x35, 0x27, 0xbe, 0x05, 0x73, 0xb8, 0xfb, 0x1e, 0x2c, 0xe3, 0x3d, 0x63, - 0x14, 0x58, 0x03, 0x07, 0x96, 0x20, 0xc3, 0x5c, 0xc1, 0x04, 0xbf, 0x96, 0xe5, 0x0c, 0xb7, 0x48, - 0xdd, 0x2b, 0x66, 0x02, 0x2c, 0xdc, 0x67, 0x0c, 0x5e, 0x2a, 0x3f, 0x80, 0x2c, 0x46, 0xa9, 0xd6, - 0xa0, 0x4b, 0xca, 0x20, 0xf5, 0x4a, 0x26, 0xc6, 0xc8, 0x45, 0xe1, 0x9a, 0xcf, 0x9b, 0x57, 0x36, - 0x0c, 0xa9, 0xb7, 0xb4, 0x00, 0xd2, 0x06, 0xbd, 0x77, 0x1f, 0x73, 0x37, 0x2d, 0x1d, 0x97, 0x5b, - 0xdc, 0xc4, 0xb6, 0xf9, 0x32, 0xce, 0xc4, 0xb6, 0xf9, 0x92, 0x99, 0xb8, 0x3a, 0x65, 0x82, 0x96, - 0x4e, 0xf8, 0xa7, 0x43, 0xe9, 0xa4, 0x5c, 0x85, 0x39, 0x3c, 0x9e, 0x7d, 0xab, 0xb7, 0x63, 0xf7, - 0x2d, 0xbc, 0xe7, 0x1f, 0xe2, 0x3d, 0x49, 0x32, 0xa4, 0x43, 0xba, 0x07, 0xe6, 0x71, 0xe7, 0x80, - 0xdd, 0x38, 0xb3, 0x06, 0x2b, 0x94, 0x3f, 0x4b, 0xc2, 0x3c, 0x77, 0xad, 0xef, 0xf7, 0x9d, 0x67, - 0x5b, 0x9d, 0x11, 0x79, 0x0a, 0x05, 0xea, 0x55, 0xdb, 0xc3, 0xce, 0x68, 0x44, 0x8f, 0xaf, 0x84, - 0x57, 0x8d, 0xeb, 0x53, 0xae, 0x9a, 0xe3, 0x57, 0xb6, 0x3b, 0x43, 0x73, 0x8b, 0x61, 0x1b, 0x96, - 0x33, 0x3e, 0x31, 0xf2, 0x96, 0x5f, 0x43, 0x36, 0x21, 0x3f, 0x9c, 0xf4, 0x3c, 0x63, 0x32, 0x1a, - 0xab, 0x44, 0x1a, 0xdb, 0x9a, 0xf4, 0x02, 0xb6, 0x60, 0xe8, 0x55, 0xd0, 0x81, 0x51, 0x7f, 0xec, - 0xd9, 0x4a, 0x9c, 0x32, 0x30, 0xea, 0x3a, 0x82, 0x03, 0xdb, 0xf7, 0x6b, 0xc8, 0x63, 0x00, 0x7a, - 0xbc, 0x1c, 0x9b, 0xa6, 0x4e, 0xa8, 0xa0, 0xbc, 0xf6, 0x66, 0xa4, 0xad, 0x5d, 0x67, 0xbc, 0x67, - 0xef, 0x3a, 0x63, 0x66, 0x88, 0x1e, 0x4c, 0x2c, 0x2e, 0xbd, 0x03, 0x4a, 0x78, 0xfe, 0xe2, 0x8d, - 0x3c, 0x35, 0xe3, 0x46, 0x9e, 0xe3, 0x37, 0xf2, 0xba, 0x7c, 0x57, 0x5a, 0x7a, 0x0f, 0x8a, 0xa1, - 0x29, 0x8b, 0x74, 0xc2, 0xe8, 0xb7, 0x45, 0x7a, 0x5e, 0x7b, 0x5d, 0xf8, 0x9c, 0x2d, 0x6e, 0xb8, - 0x68, 0xf7, 0x1d, 0x50, 0xc2, 0xd3, 0x17, 0x0d, 0x67, 0x63, 0x32, 0x05, 0xe4, 0xdf, 0x87, 0xb9, - 0xc0, 0x94, 0x45, 0x72, 0xee, 0x94, 0x49, 0x95, 0x7f, 0x29, 0x05, 0xa9, 0x96, 0x65, 0xda, 0x87, - 0xe4, 0xf5, 0x60, 0x9c, 0x7c, 0x72, 0xce, 0x8d, 0x91, 0x17, 0x43, 0x31, 0xf2, 0xc9, 0x39, 0x2f, - 0x42, 0x5e, 0x0c, 0x45, 0x48, 0xb7, 0xa9, 0xa6, 0x93, 0xcb, 0x53, 0xf1, 0xf1, 0xc9, 0x39, 0x21, - 0x38, 0x5e, 0x9e, 0x0a, 0x8e, 0x7e, 0x73, 0x4d, 0xa7, 0x0e, 0x35, 0x18, 0x19, 0x9f, 0x9c, 0xf3, - 0xa3, 0xe2, 0x72, 0x38, 0x2a, 0x7a, 0x8d, 0x35, 0x9d, 0x0d, 0x49, 0x88, 0x88, 0x38, 0x24, 0x16, - 0x0b, 0x97, 0xc3, 0xb1, 0x10, 0x79, 0x3c, 0x0a, 0x2e, 0x87, 0xa3, 0x20, 0x36, 0xf2, 0xa8, 0x77, - 0x31, 0x14, 0xf5, 0xd0, 0x28, 0x0b, 0x77, 0xcb, 0xe1, 0x70, 0xc7, 0x78, 0xc2, 0x48, 0xc5, 0x58, - 0xe7, 0x35, 0xd6, 0x74, 0xa2, 0x85, 0x02, 0x5d, 0xf4, 0x6d, 0x1f, 0xf7, 0x02, 0x9d, 0xbe, 0x4e, - 0x97, 0xcd, 0xbd, 0x88, 0x16, 0x63, 0xbe, 0xf8, 0xe3, 0x6a, 0xba, 0x17, 0x31, 0x0d, 0x32, 0x87, - 0x3c, 0x01, 0x56, 0xd0, 0x73, 0x09, 0xb2, 0xc4, 0xcd, 0x5f, 0x69, 0xb6, 0xd1, 0x83, 0xd1, 0x79, - 0x1d, 0xb2, 0x3b, 0x7d, 0x05, 0xe6, 0x9a, 0xed, 0xa7, 0x9d, 0x71, 0xcf, 0x9c, 0x38, 0xed, 0xbd, - 0x4e, 0xcf, 0x7b, 0x44, 0xa0, 0xfb, 0x9f, 0x6f, 0xf2, 0x96, 0xbd, 0x4e, 0x8f, 0x5c, 0x70, 0xc5, - 0xd5, 0xc5, 0x56, 0x89, 0xcb, 0x6b, 0xe9, 0x75, 0xba, 0x68, 0xcc, 0x18, 0xfa, 0xc2, 0x05, 0xee, - 0x0b, 0x1f, 0x66, 0x20, 0x75, 0x64, 0xf5, 0x6d, 0xeb, 0x61, 0x0e, 0x32, 0x8e, 0x3d, 0x1e, 0x76, - 0x1c, 0xbb, 0xfc, 0x23, 0x09, 0xe0, 0x91, 0x3d, 0x1c, 0x1e, 0x59, 0xfd, 0x17, 0x47, 0x26, 0xb9, - 0x02, 0xf9, 0x61, 0xe7, 0xb9, 0xd9, 0x1e, 0x9a, 0xed, 0x83, 0xb1, 0x7b, 0x0e, 0x72, 0xb4, 0x6a, - 0xcb, 0x7c, 0x34, 0x3e, 0x21, 0x25, 0xf7, 0x8a, 0x8e, 0xda, 0x41, 0x49, 0xf2, 0x2b, 0xfb, 0x22, - 0xbf, 0x74, 0xa6, 0xf9, 0x1e, 0xba, 0xd7, 0x4e, 0x96, 0x47, 0x64, 0xf8, 0xee, 0x61, 0x89, 0x4a, - 0xde, 0x31, 0x87, 0xa3, 0xf6, 0x01, 0x4a, 0x85, 0xca, 0x21, 0x45, 0xcb, 0x8f, 0xc8, 0x6d, 0x48, - 0x1c, 0xd8, 0x03, 0x14, 0xc9, 0x29, 0xfb, 0x42, 0x71, 0xe4, 0x0d, 0x48, 0x0c, 0x27, 0x4c, 0x36, - 0x79, 0x6d, 0x41, 0xb8, 0x27, 0xb0, 0xd0, 0x44, 0x61, 0xc3, 0x49, 0xcf, 0x9b, 0xf7, 0x8d, 0x22, - 0x24, 0x9a, 0xad, 0x16, 0x8d, 0xfd, 0xcd, 0x56, 0x6b, 0x4d, 0x91, 0xea, 0x5f, 0x82, 0x6c, 0x6f, - 0x6c, 0x9a, 0xd4, 0x3d, 0xcc, 0xce, 0x39, 0x3e, 0xc4, 0x58, 0xe7, 0x81, 0xea, 0x5b, 0x90, 0x39, - 0x60, 0x59, 0x07, 0x89, 0x48, 0x6b, 0x4b, 0x7f, 0xc8, 0x1e, 0x55, 0x96, 0xfc, 0xe6, 0x70, 0x9e, - 0x62, 0xb8, 0x36, 0xea, 0x3b, 0x90, 0x1b, 0xb7, 0x4f, 0x33, 0xf8, 0x31, 0x8b, 0x2e, 0x71, 0x06, - 0xb3, 0x63, 0x5e, 0x55, 0x6f, 0xc0, 0x82, 0x65, 0xbb, 0xdf, 0x50, 0xda, 0x5d, 0x76, 0xc6, 0x2e, - 0x4e, 0x5f, 0xe5, 0x5c, 0xe3, 0x26, 0xfb, 0x6e, 0x69, 0xd9, 0xbc, 0x81, 0x9d, 0xca, 0xfa, 0x23, - 0x50, 0x04, 0x33, 0x98, 0x7a, 0xc6, 0x59, 0x39, 0x64, 0x1f, 0x4a, 0x3d, 0x2b, 0x78, 0xee, 0x43, - 0x46, 0xd8, 0xc9, 0x8c, 0x31, 0xd2, 0x63, 0x5f, 0x9d, 0x3d, 0x23, 0xe8, 0xea, 0xa6, 0x8d, 0x50, - 0x5f, 0x13, 0x6d, 0xe4, 0x19, 0xfb, 0x20, 0x2d, 0x1a, 0xa9, 0xe9, 0xa1, 0x55, 0x39, 0x3a, 0x75, - 0x28, 0x7d, 0xf6, 0x3d, 0xd9, 0xb3, 0xc2, 0x1c, 0xe0, 0x0c, 0x33, 0xf1, 0x83, 0xf9, 0x90, 0x7d, - 0x6a, 0x0e, 0x98, 0x99, 0x1a, 0xcd, 0xe4, 0xd4, 0xd1, 0x3c, 0x67, 0xdf, 0x75, 0x3d, 0x33, 0xbb, - 0xb3, 0x46, 0x33, 0x39, 0x75, 0x34, 0x03, 0xf6, 0xc5, 0x37, 0x60, 0xa6, 0xa6, 0xd7, 0x37, 0x80, - 0x88, 0x5b, 0xcd, 0xe3, 0x44, 0x8c, 0x9d, 0x21, 0xfb, 0x8e, 0xef, 0x6f, 0x36, 0xa3, 0xcc, 0x32, - 0x14, 0x3f, 0x20, 0x8b, 0x7d, 0xe2, 0x0f, 0x1a, 0xaa, 0xe9, 0xf5, 0x4d, 0x38, 0x2f, 0x4e, 0xec, - 0x0c, 0x43, 0xb2, 0x55, 0xa9, 0x52, 0x34, 0x16, 0xfc, 0xa9, 0x71, 0xce, 0x4c, 0x53, 0xf1, 0x83, - 0x1a, 0xa9, 0x52, 0x45, 0x99, 0x32, 0x55, 0xd3, 0xeb, 0x0f, 0xa0, 0x28, 0x98, 0xda, 0xc7, 0x08, - 0x1d, 0x6d, 0xe6, 0x05, 0xfb, 0x5f, 0x0b, 0xcf, 0x0c, 0x8d, 0xe8, 0xe1, 0x1d, 0xe3, 0x31, 0x2e, - 0xda, 0xc8, 0x98, 0xfd, 0xa3, 0x80, 0x3f, 0x16, 0x64, 0x84, 0x8e, 0x04, 0xe6, 0xdf, 0x71, 0x56, - 0x26, 0xec, 0x5f, 0x08, 0xfc, 0xa1, 0x50, 0x42, 0xbd, 0x1f, 0x98, 0x8e, 0x49, 0x83, 0x5c, 0x8c, - 0x0d, 0x07, 0x3d, 0xf2, 0x9b, 0x91, 0x80, 0x15, 0xf1, 0x81, 0x44, 0x98, 0x36, 0x2d, 0xd6, 0x37, - 0x61, 0xfe, 0xec, 0x0e, 0xe9, 0x63, 0x89, 0x65, 0xcb, 0xd5, 0x15, 0x9a, 0x50, 0x1b, 0x73, 0xdd, - 0x80, 0x5f, 0x6a, 0xc0, 0xdc, 0x99, 0x9d, 0xd2, 0x27, 0x12, 0xcb, 0x39, 0xa9, 0x25, 0xa3, 0xd0, - 0x0d, 0x7a, 0xa6, 0xb9, 0x33, 0xbb, 0xa5, 0x4f, 0x25, 0xf6, 0x40, 0xa1, 0x6b, 0x9e, 0x11, 0xd7, - 0x33, 0xcd, 0x9d, 0xd9, 0x2d, 0x7d, 0x95, 0x65, 0x94, 0xb2, 0x5e, 0x15, 0x8d, 0xa0, 0x2f, 0x98, - 0x3f, 0xbb, 0x5b, 0xfa, 0x9a, 0x84, 0x8f, 0x15, 0xb2, 0xae, 0x7b, 0xeb, 0xe2, 0x79, 0xa6, 0xf9, - 0xb3, 0xbb, 0xa5, 0xaf, 0x4b, 0xf8, 0xa4, 0x21, 0xeb, 0xeb, 0x01, 0x33, 0xc1, 0xd1, 0x9c, 0xee, - 0x96, 0xbe, 0x21, 0xe1, 0x2b, 0x83, 0xac, 0xd7, 0x3c, 0x33, 0xbb, 0x53, 0xa3, 0x39, 0xdd, 0x2d, - 0x7d, 0x13, 0x6f, 0xf1, 0x75, 0x59, 0xbf, 0x13, 0x30, 0x83, 0x9e, 0xa9, 0xf8, 0x0a, 0x6e, 0xe9, - 0x5b, 0x12, 0x3e, 0x06, 0xc9, 0xfa, 0x5d, 0xc3, 0xed, 0xdd, 0xf7, 0x4c, 0xc5, 0x57, 0x70, 0x4b, - 0x9f, 0x49, 0xf8, 0x66, 0x24, 0xeb, 0xf7, 0x82, 0x86, 0xd0, 0x33, 0x29, 0xaf, 0xe2, 0x96, 0xbe, - 0x4d, 0x2d, 0x15, 0xeb, 0xf2, 0xfa, 0xaa, 0xe1, 0x0e, 0x40, 0xf0, 0x4c, 0xca, 0xab, 0xb8, 0xa5, - 0xef, 0x50, 0x53, 0x4a, 0x5d, 0x5e, 0x5f, 0x0b, 0x99, 0xaa, 0xe9, 0xf5, 0x47, 0x50, 0x38, 0xab, - 0x5b, 0xfa, 0xae, 0xf8, 0x16, 0x97, 0xef, 0x0a, 0xbe, 0x69, 0x47, 0xd8, 0xb3, 0x53, 0x1d, 0xd3, - 0xf7, 0x30, 0xc7, 0xa9, 0xcf, 0x3d, 0x61, 0xef, 0x55, 0x8c, 0xe0, 0x6f, 0x1f, 0x73, 0x53, 0x5b, - 0xfe, 0xf9, 0x38, 0xd5, 0x47, 0x7d, 0x5f, 0xc2, 0x47, 0xad, 0x02, 0x37, 0x88, 0x78, 0xef, 0xa4, - 0x30, 0x87, 0xf5, 0xa1, 0x3f, 0xcb, 0xd3, 0xbc, 0xd5, 0x0f, 0xa4, 0x57, 0x71, 0x57, 0xf5, 0x44, - 0x6b, 0xbb, 0xe1, 0x2d, 0x06, 0xd6, 0xbc, 0x0d, 0xc9, 0x63, 0x6d, 0x75, 0x4d, 0xbc, 0x92, 0x89, - 0x6f, 0xb9, 0xcc, 0x49, 0xe5, 0xb5, 0xa2, 0xf0, 0xdc, 0x3d, 0x1c, 0x39, 0x27, 0x06, 0xb2, 0x38, - 0x5b, 0x8b, 0x64, 0x7f, 0x12, 0xc3, 0xd6, 0x38, 0xbb, 0x1a, 0xc9, 0xfe, 0x34, 0x86, 0x5d, 0xe5, - 0x6c, 0x3d, 0x92, 0xfd, 0xd5, 0x18, 0xb6, 0xce, 0xd9, 0xeb, 0x91, 0xec, 0xaf, 0xc5, 0xb0, 0xd7, - 0x39, 0xbb, 0x16, 0xc9, 0xfe, 0x7a, 0x0c, 0xbb, 0xc6, 0xd9, 0x77, 0x22, 0xd9, 0xdf, 0x88, 0x61, - 0xdf, 0xe1, 0xec, 0xbb, 0x91, 0xec, 0x6f, 0xc6, 0xb0, 0xef, 0x72, 0xf6, 0xbd, 0x48, 0xf6, 0xb7, - 0x62, 0xd8, 0xf7, 0x18, 0x7b, 0x6d, 0x35, 0x92, 0xfd, 0x59, 0x34, 0x7b, 0x6d, 0x95, 0xb3, 0xa3, - 0xb5, 0xf6, 0xed, 0x18, 0x36, 0xd7, 0xda, 0x5a, 0xb4, 0xd6, 0xbe, 0x13, 0xc3, 0xe6, 0x5a, 0x5b, - 0x8b, 0xd6, 0xda, 0x77, 0x63, 0xd8, 0x5c, 0x6b, 0x6b, 0xd1, 0x5a, 0xfb, 0x5e, 0x0c, 0x9b, 0x6b, - 0x6d, 0x2d, 0x5a, 0x6b, 0xdf, 0x8f, 0x61, 0x73, 0xad, 0xad, 0x45, 0x6b, 0xed, 0x07, 0x31, 0x6c, - 0xae, 0xb5, 0xb5, 0x68, 0xad, 0xfd, 0x51, 0x0c, 0x9b, 0x6b, 0x6d, 0x2d, 0x5a, 0x6b, 0x7f, 0x1c, - 0xc3, 0xe6, 0x5a, 0x5b, 0x8b, 0xd6, 0xda, 0x9f, 0xc4, 0xb0, 0xb9, 0xd6, 0xb4, 0x68, 0xad, 0xfd, - 0x69, 0x34, 0x5b, 0xe3, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0x67, 0x31, 0x6c, 0xae, 0x35, 0x2d, 0x5a, - 0x6b, 0x7f, 0x1e, 0xc3, 0xe6, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0xc3, 0x18, 0x36, 0xd7, 0x9a, 0x16, - 0xad, 0xb5, 0xbf, 0x88, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xcb, 0x18, 0x36, 0xd7, 0x9a, - 0x16, 0xad, 0xb5, 0xbf, 0x8a, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xeb, 0x18, 0x36, 0xd7, - 0x9a, 0x16, 0xad, 0xb5, 0xbf, 0x89, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xdb, 0x18, 0x36, - 0xd7, 0x5a, 0x35, 0x5a, 0x6b, 0x7f, 0x17, 0xcd, 0xae, 0x72, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xf7, - 0x31, 0x6c, 0xae, 0xb5, 0x6a, 0xb4, 0xd6, 0xfe, 0x21, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, - 0x3f, 0xc6, 0xb0, 0xb9, 0xd6, 0xaa, 0xd1, 0x5a, 0xfb, 0x51, 0x0c, 0x9b, 0x6b, 0xad, 0x1a, 0xad, - 0xb5, 0x7f, 0x8a, 0x61, 0x73, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xcf, 0x31, 0x6c, 0xae, 0xb5, 0x6a, - 0xb4, 0xd6, 0xfe, 0x25, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, 0xbf, 0xc6, 0xb0, 0xb9, 0xd6, - 0xaa, 0xd1, 0x5a, 0xfb, 0xb7, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0x7f, 0x8f, 0x66, 0xeb, - 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x23, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0x3f, 0x63, - 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x2b, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0xbf, - 0x63, 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x27, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, - 0xc7, 0x31, 0x6c, 0xae, 0x35, 0x3d, 0x5a, 0x6b, 0x3f, 0x89, 0x61, 0x73, 0xad, 0xe9, 0xd1, 0x5a, - 0xfb, 0xdf, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0xff, 0x8b, 0x61, 0x73, 0xad, 0xad, 0x47, - 0x6b, 0xed, 0xff, 0xa3, 0xd9, 0xeb, 0xab, 0x3f, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xaa, 0x00, 0xcd, - 0x32, 0x57, 0x39, 0x00, 0x00, -} diff --git a/vendor/github.com/golang/protobuf/proto/text.go b/vendor/github.com/golang/protobuf/proto/text.go index 965876bf..2205fdaa 100644 --- a/vendor/github.com/golang/protobuf/proto/text.go +++ b/vendor/github.com/golang/protobuf/proto/text.go @@ -50,7 +50,6 @@ import ( var ( newline = []byte("\n") spaces = []byte(" ") - gtNewline = []byte(">\n") endBraceNewline = []byte("}\n") backslashN = []byte{'\\', 'n'} backslashR = []byte{'\\', 'r'} @@ -170,11 +169,6 @@ func writeName(w *textWriter, props *Properties) error { return nil } -// raw is the interface satisfied by RawMessage. -type raw interface { - Bytes() []byte -} - func requiresQuotes(u string) bool { // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. for _, ch := range u { @@ -269,6 +263,10 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { props := sprops.Prop[i] name := st.Field(i).Name + if name == "XXX_NoUnkeyedLiteral" { + continue + } + if strings.HasPrefix(name, "XXX_") { // There are two XXX_ fields: // XXX_unrecognized []byte @@ -436,12 +434,6 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return err } } - if b, ok := fv.Interface().(raw); ok { - if err := writeRaw(w, b.Bytes()); err != nil { - return err - } - continue - } // Enums have a String method, so writeAny will work fine. if err := tm.writeAny(w, fv, props); err != nil { @@ -455,7 +447,7 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { // Extensions (the XXX_extensions field). pv := sv.Addr() - if _, ok := extendable(pv.Interface()); ok { + if _, err := extendable(pv.Interface()); err == nil { if err := tm.writeExtensions(w, pv); err != nil { return err } @@ -464,27 +456,6 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return nil } -// writeRaw writes an uninterpreted raw message. -func writeRaw(w *textWriter, b []byte) error { - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if err := writeUnknownStruct(w, b); err != nil { - return err - } - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - return nil -} - // writeAny writes an arbitrary field. func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { v = reflect.Indirect(v) @@ -535,6 +506,19 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert } } w.indent() + if v.CanAddr() { + // Calling v.Interface on a struct causes the reflect package to + // copy the entire struct. This is racy with the new Marshaler + // since we atomically update the XXX_sizecache. + // + // Thus, we retrieve a pointer to the struct if possible to avoid + // a race since v.Interface on the pointer doesn't copy the struct. + // + // If v is not addressable, then we are not worried about a race + // since it implies that the binary Marshaler cannot possibly be + // mutating this value. + v = v.Addr() + } if etm, ok := v.Interface().(encoding.TextMarshaler); ok { text, err := etm.MarshalText() if err != nil { @@ -543,8 +527,13 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert if _, err = w.Write(text); err != nil { return err } - } else if err := tm.writeStruct(w, v); err != nil { - return err + } else { + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + if err := tm.writeStruct(w, v); err != nil { + return err + } } w.unindent() if err := w.WriteByte(ket); err != nil { diff --git a/vendor/github.com/golang/protobuf/proto/text_parser.go b/vendor/github.com/golang/protobuf/proto/text_parser.go index 61f83c1e..0685bae3 100644 --- a/vendor/github.com/golang/protobuf/proto/text_parser.go +++ b/vendor/github.com/golang/protobuf/proto/text_parser.go @@ -206,7 +206,6 @@ func (p *textParser) advance() { var ( errBadUTF8 = errors.New("proto: bad UTF-8") - errBadHex = errors.New("proto: bad hexadecimal") ) func unquoteC(s string, quote rune) (string, error) { @@ -277,60 +276,47 @@ func unescape(s string) (ch string, tail string, err error) { return "?", s, nil // trigraph workaround case '\'', '"', '\\': return string(r), s, nil - case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X': + case '0', '1', '2', '3', '4', '5', '6', '7': if len(s) < 2 { return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) } - base := 8 - ss := s[:2] + ss := string(r) + s[:2] s = s[2:] - if r == 'x' || r == 'X' { - base = 16 - } else { - ss = string(r) + ss - } - i, err := strconv.ParseUint(ss, base, 8) + i, err := strconv.ParseUint(ss, 8, 8) if err != nil { - return "", "", err + return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) } return string([]byte{byte(i)}), s, nil - case 'u', 'U': - n := 4 - if r == 'U' { + case 'x', 'X', 'u', 'U': + var n int + switch r { + case 'x', 'X': + n = 2 + case 'u': + n = 4 + case 'U': n = 8 } if len(s) < n { - return "", "", fmt.Errorf(`\%c requires %d digits`, r, n) - } - - bs := make([]byte, n/2) - for i := 0; i < n; i += 2 { - a, ok1 := unhex(s[i]) - b, ok2 := unhex(s[i+1]) - if !ok1 || !ok2 { - return "", "", errBadHex - } - bs[i/2] = a<<4 | b + return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) } + ss := s[:n] s = s[n:] - return string(bs), s, nil + i, err := strconv.ParseUint(ss, 16, 64) + if err != nil { + return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) + } + if r == 'x' || r == 'X' { + return string([]byte{byte(i)}), s, nil + } + if i > utf8.MaxRune { + return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) + } + return string(i), s, nil } return "", "", fmt.Errorf(`unknown escape \%c`, r) } -// Adapted from src/pkg/strconv/quote.go. -func unhex(b byte) (v byte, ok bool) { - switch { - case '0' <= b && b <= '9': - return b - '0', true - case 'a' <= b && b <= 'f': - return b - 'a' + 10, true - case 'A' <= b && b <= 'F': - return b - 'A' + 10, true - } - return 0, false -} - // Back off the parser by one token. Can only be done between calls to next(). // It makes the next advance() a no-op. func (p *textParser) back() { p.backed = true } @@ -728,6 +714,9 @@ func (p *textParser) consumeExtName() (string, error) { if tok.err != nil { return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) } + if p.done && tok.value != "]" { + return "", p.errorf("unclosed type_url or extension name") + } } return strings.Join(parts, ""), nil } @@ -883,13 +872,9 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error { // UnmarshalText returns *RequiredNotSetError. func UnmarshalText(s string, pb Message) error { if um, ok := pb.(encoding.TextUnmarshaler); ok { - err := um.UnmarshalText([]byte(s)) - return err + return um.UnmarshalText([]byte(s)) } pb.Reset() v := reflect.ValueOf(pb) - if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil { - return pe - } - return nil + return newTextParser(s).readStruct(v.Elem(), "") } diff --git a/vendor/github.com/golang/protobuf/proto/text_parser_test.go b/vendor/github.com/golang/protobuf/proto/text_parser_test.go index 8f7cb4d2..a8198087 100644 --- a/vendor/github.com/golang/protobuf/proto/text_parser_test.go +++ b/vendor/github.com/golang/protobuf/proto/text_parser_test.go @@ -32,13 +32,13 @@ package proto_test import ( + "fmt" "math" - "reflect" "testing" . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - . "github.com/golang/protobuf/proto/testdata" + . "github.com/golang/protobuf/proto/test_proto" ) type UnmarshalTextTest struct { @@ -167,10 +167,19 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Quoted string with UTF-8 bytes. { - in: "count:42 name: '\303\277\302\201\xAB'", + in: "count:42 name: '\303\277\302\201\x00\xAB\xCD\xEF'", out: &MyMessage{ Count: Int32(42), - Name: String("\303\277\302\201\xAB"), + Name: String("\303\277\302\201\x00\xAB\xCD\xEF"), + }, + }, + + // Quoted string with unicode escapes. + { + in: `count: 42 name: "\u0047\U00000047\uffff\U0010ffff"`, + out: &MyMessage{ + Count: Int32(42), + Name: String("GG\uffff\U0010ffff"), }, }, @@ -180,6 +189,24 @@ var unMarshalTextTests = []UnmarshalTextTest{ err: `line 1.15: invalid quoted string "\0": \0 requires 2 following digits`, }, + // Bad \u escape + { + in: `count: 42 name: "\u000"`, + err: `line 1.16: invalid quoted string "\u000": \u requires 4 following digits`, + }, + + // Bad \U escape + { + in: `count: 42 name: "\U0000000"`, + err: `line 1.16: invalid quoted string "\U0000000": \U requires 8 following digits`, + }, + + // Bad \U escape + { + in: `count: 42 name: "\xxx"`, + err: `line 1.16: invalid quoted string "\xxx": \xxx contains non-hexadecimal digits`, + }, + // Number too large for int64 { in: "count: 1 others { key: 123456789012345678901 }", @@ -263,6 +290,12 @@ var unMarshalTextTests = []UnmarshalTextTest{ err: `line 1.17: invalid float32: "17.4"`, }, + // unclosed bracket doesn't cause infinite loop + { + in: `[`, + err: `line 1.0: unclosed type_url or extension name`, + }, + // Enum { in: `count:42 bikeshed: BLUE`, @@ -330,7 +363,7 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Missing required field { in: `name: "Pawel"`, - err: `proto: required field "testdata.MyMessage.count" not set`, + err: fmt.Sprintf(`proto: required field "%T.count" not set`, MyMessage{}), out: &MyMessage{ Name: String("Pawel"), }, @@ -339,7 +372,7 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Missing required field in a required submessage { in: `count: 42 we_must_go_deeper < leo_finally_won_an_oscar <> >`, - err: `proto: required field "testdata.InnerMessage.host" not set`, + err: fmt.Sprintf(`proto: required field "%T.host" not set`, InnerMessage{}), out: &MyMessage{ Count: Int32(42), WeMustGoDeeper: &RequiredInnerMessage{LeoFinallyWonAnOscar: &InnerMessage{}}, @@ -470,10 +503,10 @@ var unMarshalTextTests = []UnmarshalTextTest{ }, // Extension - buildExtStructTest(`count: 42 [testdata.Ext.more]:`), - buildExtStructTest(`count: 42 [testdata.Ext.more] {data:"Hello, world!"}`), - buildExtDataTest(`count: 42 [testdata.Ext.text]:"Hello, world!" [testdata.Ext.number]:1729`), - buildExtRepStringTest(`count: 42 [testdata.greeting]:"bula" [testdata.greeting]:"hola"`), + buildExtStructTest(`count: 42 [test_proto.Ext.more]:`), + buildExtStructTest(`count: 42 [test_proto.Ext.more] {data:"Hello, world!"}`), + buildExtDataTest(`count: 42 [test_proto.Ext.text]:"Hello, world!" [test_proto.Ext.number]:1729`), + buildExtRepStringTest(`count: 42 [test_proto.greeting]:"bula" [test_proto.greeting]:"hola"`), // Big all-in-one { @@ -534,7 +567,7 @@ func TestUnmarshalText(t *testing.T) { // We don't expect failure. if err != nil { t.Errorf("Test %d: Unexpected error: %v", i, err) - } else if !reflect.DeepEqual(pb, test.out) { + } else if !Equal(pb, test.out) { t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", i, pb, test.out) } @@ -545,7 +578,7 @@ func TestUnmarshalText(t *testing.T) { } else if err.Error() != test.err { t.Errorf("Test %d: Incorrect error.\nHave: %v\nWant: %v", i, err.Error(), test.err) - } else if _, ok := err.(*RequiredNotSetError); ok && test.out != nil && !reflect.DeepEqual(pb, test.out) { + } else if _, ok := err.(*RequiredNotSetError); ok && test.out != nil && !Equal(pb, test.out) { t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", i, pb, test.out) } diff --git a/vendor/github.com/golang/protobuf/proto/text_test.go b/vendor/github.com/golang/protobuf/proto/text_test.go index 3eabacac..3c8b033c 100644 --- a/vendor/github.com/golang/protobuf/proto/text_test.go +++ b/vendor/github.com/golang/protobuf/proto/text_test.go @@ -37,12 +37,14 @@ import ( "io/ioutil" "math" "strings" + "sync" "testing" "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" + anypb "github.com/golang/protobuf/ptypes/any" ) // textMessage implements the methods that allow it to marshal and unmarshal @@ -151,12 +153,12 @@ SomeGroup { } /* 2 unknown bytes */ 13: 4 -[testdata.Ext.more]: < +[test_proto.Ext.more]: < data: "Big gobs for big rats" > -[testdata.greeting]: "adg" -[testdata.greeting]: "easy" -[testdata.greeting]: "cow" +[test_proto.greeting]: "adg" +[test_proto.greeting]: "easy" +[test_proto.greeting]: "cow" /* 13 unknown bytes */ 201: "\t3G skiing" /* 3 unknown bytes */ @@ -472,3 +474,45 @@ func TestProto3Text(t *testing.T) { } } } + +func TestRacyMarshal(t *testing.T) { + // This test should be run with the race detector. + + any := &pb.MyMessage{Count: proto.Int32(47), Name: proto.String("David")} + proto.SetExtension(any, pb.E_Ext_Text, proto.String("bar")) + b, err := proto.Marshal(any) + if err != nil { + panic(err) + } + m := &proto3pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(any), Value: b}, + } + + wantText := proto.MarshalTextString(m) + wantBytes, err := proto.Marshal(m) + if err != nil { + t.Fatalf("proto.Marshal error: %v", err) + } + + var wg sync.WaitGroup + defer wg.Wait() + wg.Add(20) + for i := 0; i < 10; i++ { + go func() { + defer wg.Done() + got := proto.MarshalTextString(m) + if got != wantText { + t.Errorf("proto.MarshalTextString = %q, want %q", got, wantText) + } + }() + go func() { + defer wg.Done() + got, err := proto.Marshal(m) + if !bytes.Equal(got, wantBytes) || err != nil { + t.Errorf("proto.Marshal = (%x, %v), want (%x, nil)", got, err, wantBytes) + } + }() + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile deleted file mode 100644 index a42cc371..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -test: - cd testdata && make test diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile deleted file mode 100644 index 41a2d04d..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but descriptor.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/descriptor.proto -regenerate: - @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=../../../../.. -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go index 63cf2c80..e855b1f5 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go @@ -1,35 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/descriptor.proto -/* -Package descriptor is a generated protocol buffer package. - -It is generated from these files: - google/protobuf/descriptor.proto - -It has these top-level messages: - FileDescriptorSet - FileDescriptorProto - DescriptorProto - FieldDescriptorProto - OneofDescriptorProto - EnumDescriptorProto - EnumValueDescriptorProto - ServiceDescriptorProto - MethodDescriptorProto - FileOptions - MessageOptions - FieldOptions - OneofOptions - EnumOptions - EnumValueOptions - ServiceOptions - MethodOptions - UninterpretedOption - SourceCodeInfo - GeneratedCodeInfo -*/ -package descriptor +package descriptor // import "github.com/golang/protobuf/protoc-gen-go/descriptor" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -137,7 +109,9 @@ func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { *x = FieldDescriptorProto_Type(value) return nil } -func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{3, 0} } +func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{4, 0} +} type FieldDescriptorProto_Label int32 @@ -176,7 +150,7 @@ func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { return nil } func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{3, 1} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{4, 1} } // Generated classes can be optimized for speed or code size. @@ -216,7 +190,9 @@ func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { *x = FileOptions_OptimizeMode(value) return nil } -func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } +func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{10, 0} +} type FieldOptions_CType int32 @@ -254,7 +230,9 @@ func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { *x = FieldOptions_CType(value) return nil } -func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 0} } +func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{12, 0} +} type FieldOptions_JSType int32 @@ -294,7 +272,9 @@ func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { *x = FieldOptions_JSType(value) return nil } -func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 1} } +func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{12, 1} +} // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, // or neither? HTTP based RPC implementation may choose GET verb for safe @@ -335,20 +315,41 @@ func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { return nil } func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{16, 0} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{17, 0} } // The protocol compiler can output a FileDescriptorSet containing the .proto // files it parses. type FileDescriptorSet struct { - File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` + File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } -func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorSet) ProtoMessage() {} -func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } +func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorSet) ProtoMessage() {} +func (*FileDescriptorSet) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{0} +} +func (m *FileDescriptorSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileDescriptorSet.Unmarshal(m, b) +} +func (m *FileDescriptorSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileDescriptorSet.Marshal(b, m, deterministic) +} +func (dst *FileDescriptorSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileDescriptorSet.Merge(dst, src) +} +func (m *FileDescriptorSet) XXX_Size() int { + return xxx_messageInfo_FileDescriptorSet.Size(m) +} +func (m *FileDescriptorSet) XXX_DiscardUnknown() { + xxx_messageInfo_FileDescriptorSet.DiscardUnknown(m) +} + +var xxx_messageInfo_FileDescriptorSet proto.InternalMessageInfo func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { if m != nil { @@ -381,14 +382,35 @@ type FileDescriptorProto struct { SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` // The syntax of the proto file. // The supported values are "proto2" and "proto3". - Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` - XXX_unrecognized []byte `json:"-"` + Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } -func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorProto) ProtoMessage() {} -func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } +func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorProto) ProtoMessage() {} +func (*FileDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{1} +} +func (m *FileDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileDescriptorProto.Unmarshal(m, b) +} +func (m *FileDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *FileDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileDescriptorProto.Merge(dst, src) +} +func (m *FileDescriptorProto) XXX_Size() int { + return xxx_messageInfo_FileDescriptorProto.Size(m) +} +func (m *FileDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_FileDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_FileDescriptorProto proto.InternalMessageInfo func (m *FileDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -487,14 +509,35 @@ type DescriptorProto struct { ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved field names, which may not be used by fields in the same message. // A given name may only be reserved once. - ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` - XXX_unrecognized []byte `json:"-"` + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } -func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } -func (*DescriptorProto) ProtoMessage() {} -func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } +func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto) ProtoMessage() {} +func (*DescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{2} +} +func (m *DescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto.Unmarshal(m, b) +} +func (m *DescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto.Marshal(b, m, deterministic) +} +func (dst *DescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto.Merge(dst, src) +} +func (m *DescriptorProto) XXX_Size() int { + return xxx_messageInfo_DescriptorProto.Size(m) +} +func (m *DescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_DescriptorProto proto.InternalMessageInfo func (m *DescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -567,17 +610,37 @@ func (m *DescriptorProto) GetReservedName() []string { } type DescriptorProto_ExtensionRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ExtensionRange) ProtoMessage() {} func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 0} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{2, 0} } +func (m *DescriptorProto_ExtensionRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Unmarshal(m, b) +} +func (m *DescriptorProto_ExtensionRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Marshal(b, m, deterministic) +} +func (dst *DescriptorProto_ExtensionRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto_ExtensionRange.Merge(dst, src) +} +func (m *DescriptorProto_ExtensionRange) XXX_Size() int { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Size(m) +} +func (m *DescriptorProto_ExtensionRange) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto_ExtensionRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DescriptorProto_ExtensionRange proto.InternalMessageInfo func (m *DescriptorProto_ExtensionRange) GetStart() int32 { if m != nil && m.Start != nil { @@ -593,21 +656,47 @@ func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { return 0 } +func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { + if m != nil { + return m.Options + } + return nil +} + // Range of reserved tag numbers. Reserved tag numbers may not be used by // fields or extension ranges in the same message. Reserved ranges may // not overlap. type DescriptorProto_ReservedRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ReservedRange) ProtoMessage() {} func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 1} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{2, 1} } +func (m *DescriptorProto_ReservedRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto_ReservedRange.Unmarshal(m, b) +} +func (m *DescriptorProto_ReservedRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto_ReservedRange.Marshal(b, m, deterministic) +} +func (dst *DescriptorProto_ReservedRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto_ReservedRange.Merge(dst, src) +} +func (m *DescriptorProto_ReservedRange) XXX_Size() int { + return xxx_messageInfo_DescriptorProto_ReservedRange.Size(m) +} +func (m *DescriptorProto_ReservedRange) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto_ReservedRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DescriptorProto_ReservedRange proto.InternalMessageInfo func (m *DescriptorProto_ReservedRange) GetStart() int32 { if m != nil && m.Start != nil { @@ -623,6 +712,54 @@ func (m *DescriptorProto_ReservedRange) GetEnd() int32 { return 0 } +type ExtensionRangeOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionRangeOptions) Reset() { *m = ExtensionRangeOptions{} } +func (m *ExtensionRangeOptions) String() string { return proto.CompactTextString(m) } +func (*ExtensionRangeOptions) ProtoMessage() {} +func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{3} +} + +var extRange_ExtensionRangeOptions = []proto.ExtensionRange{ + {Start: 1000, End: 536870911}, +} + +func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ExtensionRangeOptions +} +func (m *ExtensionRangeOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionRangeOptions.Unmarshal(m, b) +} +func (m *ExtensionRangeOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionRangeOptions.Marshal(b, m, deterministic) +} +func (dst *ExtensionRangeOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionRangeOptions.Merge(dst, src) +} +func (m *ExtensionRangeOptions) XXX_Size() int { + return xxx_messageInfo_ExtensionRangeOptions.Size(m) +} +func (m *ExtensionRangeOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionRangeOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionRangeOptions proto.InternalMessageInfo + +func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + // Describes a field within a message. type FieldDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` @@ -653,15 +790,36 @@ type FieldDescriptorProto struct { // user has set a "json_name" option on this field, that option's value // will be used. Otherwise, it's deduced from the field's name by converting // it to camelCase. - JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` - Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` + Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } -func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FieldDescriptorProto) ProtoMessage() {} -func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } +func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FieldDescriptorProto) ProtoMessage() {} +func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{4} +} +func (m *FieldDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldDescriptorProto.Unmarshal(m, b) +} +func (m *FieldDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *FieldDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldDescriptorProto.Merge(dst, src) +} +func (m *FieldDescriptorProto) XXX_Size() int { + return xxx_messageInfo_FieldDescriptorProto.Size(m) +} +func (m *FieldDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_FieldDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldDescriptorProto proto.InternalMessageInfo func (m *FieldDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -735,15 +893,36 @@ func (m *FieldDescriptorProto) GetOptions() *FieldOptions { // Describes a oneof. type OneofDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } -func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*OneofDescriptorProto) ProtoMessage() {} -func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } +func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*OneofDescriptorProto) ProtoMessage() {} +func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{5} +} +func (m *OneofDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OneofDescriptorProto.Unmarshal(m, b) +} +func (m *OneofDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OneofDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *OneofDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_OneofDescriptorProto.Merge(dst, src) +} +func (m *OneofDescriptorProto) XXX_Size() int { + return xxx_messageInfo_OneofDescriptorProto.Size(m) +} +func (m *OneofDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_OneofDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_OneofDescriptorProto proto.InternalMessageInfo func (m *OneofDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -761,16 +940,44 @@ func (m *OneofDescriptorProto) GetOptions() *OneofOptions { // Describes an enum type. type EnumDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` - Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` + Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + ReservedRange []*EnumDescriptorProto_EnumReservedRange `protobuf:"bytes,4,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } -func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumDescriptorProto) ProtoMessage() {} -func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } +func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } +func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto) ProtoMessage() {} +func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{6} +} +func (m *EnumDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumDescriptorProto.Unmarshal(m, b) +} +func (m *EnumDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *EnumDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumDescriptorProto.Merge(dst, src) +} +func (m *EnumDescriptorProto) XXX_Size() int { + return xxx_messageInfo_EnumDescriptorProto.Size(m) +} +func (m *EnumDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_EnumDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumDescriptorProto proto.InternalMessageInfo func (m *EnumDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -793,18 +1000,105 @@ func (m *EnumDescriptorProto) GetOptions() *EnumOptions { return nil } -// Describes a value within an enum. -type EnumValueDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` - Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` +func (m *EnumDescriptorProto) GetReservedRange() []*EnumDescriptorProto_EnumReservedRange { + if m != nil { + return m.ReservedRange + } + return nil } -func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } -func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumValueDescriptorProto) ProtoMessage() {} -func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } +func (m *EnumDescriptorProto) GetReservedName() []string { + if m != nil { + return m.ReservedName + } + return nil +} + +// Range of reserved numeric values. Reserved values may not be used by +// entries in the same enum. Reserved ranges may not overlap. +// +// Note that this is distinct from DescriptorProto.ReservedRange in that it +// is inclusive such that it can appropriately represent the entire int32 +// domain. +type EnumDescriptorProto_EnumReservedRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumDescriptorProto_EnumReservedRange) Reset() { *m = EnumDescriptorProto_EnumReservedRange{} } +func (m *EnumDescriptorProto_EnumReservedRange) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto_EnumReservedRange) ProtoMessage() {} +func (*EnumDescriptorProto_EnumReservedRange) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{6, 0} +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Unmarshal(m, b) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Marshal(b, m, deterministic) +} +func (dst *EnumDescriptorProto_EnumReservedRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Merge(dst, src) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Size() int { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Size(m) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_DiscardUnknown() { + xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumDescriptorProto_EnumReservedRange proto.InternalMessageInfo + +func (m *EnumDescriptorProto_EnumReservedRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *EnumDescriptorProto_EnumReservedRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +// Describes a value within an enum. +type EnumValueDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` + Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } +func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumValueDescriptorProto) ProtoMessage() {} +func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{7} +} +func (m *EnumValueDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValueDescriptorProto.Unmarshal(m, b) +} +func (m *EnumValueDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValueDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *EnumValueDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValueDescriptorProto.Merge(dst, src) +} +func (m *EnumValueDescriptorProto) XXX_Size() int { + return xxx_messageInfo_EnumValueDescriptorProto.Size(m) +} +func (m *EnumValueDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValueDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValueDescriptorProto proto.InternalMessageInfo func (m *EnumValueDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -829,16 +1123,37 @@ func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { // Describes a service. type ServiceDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` - Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` + Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } -func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*ServiceDescriptorProto) ProtoMessage() {} -func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } +func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } +func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*ServiceDescriptorProto) ProtoMessage() {} +func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{8} +} +func (m *ServiceDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceDescriptorProto.Unmarshal(m, b) +} +func (m *ServiceDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *ServiceDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceDescriptorProto.Merge(dst, src) +} +func (m *ServiceDescriptorProto) XXX_Size() int { + return xxx_messageInfo_ServiceDescriptorProto.Size(m) +} +func (m *ServiceDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceDescriptorProto proto.InternalMessageInfo func (m *ServiceDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -872,14 +1187,35 @@ type MethodDescriptorProto struct { // Identifies if client streams multiple client messages ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` // Identifies if server streams multiple server messages - ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` - XXX_unrecognized []byte `json:"-"` + ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } -func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*MethodDescriptorProto) ProtoMessage() {} -func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } +func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } +func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*MethodDescriptorProto) ProtoMessage() {} +func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{9} +} +func (m *MethodDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MethodDescriptorProto.Unmarshal(m, b) +} +func (m *MethodDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MethodDescriptorProto.Marshal(b, m, deterministic) +} +func (dst *MethodDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_MethodDescriptorProto.Merge(dst, src) +} +func (m *MethodDescriptorProto) XXX_Size() int { + return xxx_messageInfo_MethodDescriptorProto.Size(m) +} +func (m *MethodDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_MethodDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_MethodDescriptorProto proto.InternalMessageInfo const Default_MethodDescriptorProto_ClientStreaming bool = false const Default_MethodDescriptorProto_ServerStreaming bool = false @@ -946,7 +1282,7 @@ type FileOptions struct { // top-level extensions defined in the file. JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` // This option does nothing. - JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` + JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` // Deprecated: Do not use. // If set true, then the Java2 code generator will generate code that // throws an exception whenever an attempt is made to assign a non-UTF-8 // byte sequence to a string field. @@ -974,6 +1310,7 @@ type FileOptions struct { CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` + PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` // Is this file deprecated? // Depending on the target platform, this can emit Deprecated annotations // for everything in the file, or it will be completely ignored; in the very @@ -995,24 +1332,50 @@ type FileOptions struct { // Sets the php class prefix which is prepended to all php generated classes // from this .proto. Default is empty. PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` - // The parser stores options it doesn't recognize here. See above. + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FileOptions) Reset() { *m = FileOptions{} } -func (m *FileOptions) String() string { return proto.CompactTextString(m) } -func (*FileOptions) ProtoMessage() {} -func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } +func (m *FileOptions) Reset() { *m = FileOptions{} } +func (m *FileOptions) String() string { return proto.CompactTextString(m) } +func (*FileOptions) ProtoMessage() {} +func (*FileOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{10} +} var extRange_FileOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FileOptions } +func (m *FileOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileOptions.Unmarshal(m, b) +} +func (m *FileOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileOptions.Marshal(b, m, deterministic) +} +func (dst *FileOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileOptions.Merge(dst, src) +} +func (m *FileOptions) XXX_Size() int { + return xxx_messageInfo_FileOptions.Size(m) +} +func (m *FileOptions) XXX_DiscardUnknown() { + xxx_messageInfo_FileOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_FileOptions proto.InternalMessageInfo const Default_FileOptions_JavaMultipleFiles bool = false const Default_FileOptions_JavaStringCheckUtf8 bool = false @@ -1020,6 +1383,7 @@ const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPE const Default_FileOptions_CcGenericServices bool = false const Default_FileOptions_JavaGenericServices bool = false const Default_FileOptions_PyGenericServices bool = false +const Default_FileOptions_PhpGenericServices bool = false const Default_FileOptions_Deprecated bool = false const Default_FileOptions_CcEnableArenas bool = false @@ -1044,6 +1408,7 @@ func (m *FileOptions) GetJavaMultipleFiles() bool { return Default_FileOptions_JavaMultipleFiles } +// Deprecated: Do not use. func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { if m != nil && m.JavaGenerateEqualsAndHash != nil { return *m.JavaGenerateEqualsAndHash @@ -1093,6 +1458,13 @@ func (m *FileOptions) GetPyGenericServices() bool { return Default_FileOptions_PyGenericServices } +func (m *FileOptions) GetPhpGenericServices() bool { + if m != nil && m.PhpGenericServices != nil { + return *m.PhpGenericServices + } + return Default_FileOptions_PhpGenericServices +} + func (m *FileOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated @@ -1135,6 +1507,13 @@ func (m *FileOptions) GetPhpClassPrefix() string { return "" } +func (m *FileOptions) GetPhpNamespace() string { + if m != nil && m.PhpNamespace != nil { + return *m.PhpNamespace + } + return "" +} + func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption @@ -1195,22 +1574,43 @@ type MessageOptions struct { MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MessageOptions) Reset() { *m = MessageOptions{} } -func (m *MessageOptions) String() string { return proto.CompactTextString(m) } -func (*MessageOptions) ProtoMessage() {} -func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } +func (m *MessageOptions) Reset() { *m = MessageOptions{} } +func (m *MessageOptions) String() string { return proto.CompactTextString(m) } +func (*MessageOptions) ProtoMessage() {} +func (*MessageOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{11} +} var extRange_MessageOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MessageOptions } +func (m *MessageOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageOptions.Unmarshal(m, b) +} +func (m *MessageOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageOptions.Marshal(b, m, deterministic) +} +func (dst *MessageOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageOptions.Merge(dst, src) +} +func (m *MessageOptions) XXX_Size() int { + return xxx_messageInfo_MessageOptions.Size(m) +} +func (m *MessageOptions) XXX_DiscardUnknown() { + xxx_messageInfo_MessageOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageOptions proto.InternalMessageInfo const Default_MessageOptions_MessageSetWireFormat bool = false const Default_MessageOptions_NoStandardDescriptorAccessor bool = false @@ -1265,13 +1665,15 @@ type FieldOptions struct { Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` // The jstype option determines the JavaScript type used for values of the // field. The option is permitted only for 64 bit integral and fixed types - // (int64, uint64, sint64, fixed64, sfixed64). By default these types are - // represented as JavaScript strings. This avoids loss of precision that can - // happen when a large value is converted to a floating point JavaScript - // numbers. Specifying JS_NUMBER for the jstype causes the generated - // JavaScript code to use the JavaScript "number" type instead of strings. - // This option is an enum to permit additional types to be added, - // e.g. goog.math.Integer. + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` // Should this field be parsed lazily? Lazy applies only to message-type // fields. It means that when the outer message is initially parsed, the @@ -1311,22 +1713,43 @@ type FieldOptions struct { Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FieldOptions) Reset() { *m = FieldOptions{} } -func (m *FieldOptions) String() string { return proto.CompactTextString(m) } -func (*FieldOptions) ProtoMessage() {} -func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } +func (m *FieldOptions) Reset() { *m = FieldOptions{} } +func (m *FieldOptions) String() string { return proto.CompactTextString(m) } +func (*FieldOptions) ProtoMessage() {} +func (*FieldOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{12} +} var extRange_FieldOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FieldOptions } +func (m *FieldOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOptions.Unmarshal(m, b) +} +func (m *FieldOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOptions.Marshal(b, m, deterministic) +} +func (dst *FieldOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOptions.Merge(dst, src) +} +func (m *FieldOptions) XXX_Size() int { + return xxx_messageInfo_FieldOptions.Size(m) +} +func (m *FieldOptions) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOptions proto.InternalMessageInfo const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL @@ -1386,22 +1809,43 @@ func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { type OneofOptions struct { // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *OneofOptions) Reset() { *m = OneofOptions{} } -func (m *OneofOptions) String() string { return proto.CompactTextString(m) } -func (*OneofOptions) ProtoMessage() {} -func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } +func (m *OneofOptions) Reset() { *m = OneofOptions{} } +func (m *OneofOptions) String() string { return proto.CompactTextString(m) } +func (*OneofOptions) ProtoMessage() {} +func (*OneofOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{13} +} var extRange_OneofOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OneofOptions } +func (m *OneofOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OneofOptions.Unmarshal(m, b) +} +func (m *OneofOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OneofOptions.Marshal(b, m, deterministic) +} +func (dst *OneofOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_OneofOptions.Merge(dst, src) +} +func (m *OneofOptions) XXX_Size() int { + return xxx_messageInfo_OneofOptions.Size(m) +} +func (m *OneofOptions) XXX_DiscardUnknown() { + xxx_messageInfo_OneofOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_OneofOptions proto.InternalMessageInfo func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { @@ -1421,22 +1865,43 @@ type EnumOptions struct { Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *EnumOptions) Reset() { *m = EnumOptions{} } -func (m *EnumOptions) String() string { return proto.CompactTextString(m) } -func (*EnumOptions) ProtoMessage() {} -func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } +func (m *EnumOptions) Reset() { *m = EnumOptions{} } +func (m *EnumOptions) String() string { return proto.CompactTextString(m) } +func (*EnumOptions) ProtoMessage() {} +func (*EnumOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{14} +} var extRange_EnumOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumOptions } +func (m *EnumOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumOptions.Unmarshal(m, b) +} +func (m *EnumOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumOptions.Marshal(b, m, deterministic) +} +func (dst *EnumOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumOptions.Merge(dst, src) +} +func (m *EnumOptions) XXX_Size() int { + return xxx_messageInfo_EnumOptions.Size(m) +} +func (m *EnumOptions) XXX_DiscardUnknown() { + xxx_messageInfo_EnumOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumOptions proto.InternalMessageInfo const Default_EnumOptions_Deprecated bool = false @@ -1469,22 +1934,43 @@ type EnumValueOptions struct { Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } -func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } -func (*EnumValueOptions) ProtoMessage() {} -func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } +func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } +func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } +func (*EnumValueOptions) ProtoMessage() {} +func (*EnumValueOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{15} +} var extRange_EnumValueOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumValueOptions } +func (m *EnumValueOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValueOptions.Unmarshal(m, b) +} +func (m *EnumValueOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValueOptions.Marshal(b, m, deterministic) +} +func (dst *EnumValueOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValueOptions.Merge(dst, src) +} +func (m *EnumValueOptions) XXX_Size() int { + return xxx_messageInfo_EnumValueOptions.Size(m) +} +func (m *EnumValueOptions) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValueOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValueOptions proto.InternalMessageInfo const Default_EnumValueOptions_Deprecated bool = false @@ -1510,22 +1996,43 @@ type ServiceOptions struct { Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } -func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } -func (*ServiceOptions) ProtoMessage() {} -func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } +func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } +func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } +func (*ServiceOptions) ProtoMessage() {} +func (*ServiceOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{16} +} var extRange_ServiceOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_ServiceOptions } +func (m *ServiceOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceOptions.Unmarshal(m, b) +} +func (m *ServiceOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceOptions.Marshal(b, m, deterministic) +} +func (dst *ServiceOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceOptions.Merge(dst, src) +} +func (m *ServiceOptions) XXX_Size() int { + return xxx_messageInfo_ServiceOptions.Size(m) +} +func (m *ServiceOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceOptions proto.InternalMessageInfo const Default_ServiceOptions_Deprecated bool = false @@ -1552,22 +2059,43 @@ type MethodOptions struct { IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MethodOptions) Reset() { *m = MethodOptions{} } -func (m *MethodOptions) String() string { return proto.CompactTextString(m) } -func (*MethodOptions) ProtoMessage() {} -func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } +func (m *MethodOptions) Reset() { *m = MethodOptions{} } +func (m *MethodOptions) String() string { return proto.CompactTextString(m) } +func (*MethodOptions) ProtoMessage() {} +func (*MethodOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{17} +} var extRange_MethodOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MethodOptions } +func (m *MethodOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MethodOptions.Unmarshal(m, b) +} +func (m *MethodOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MethodOptions.Marshal(b, m, deterministic) +} +func (dst *MethodOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_MethodOptions.Merge(dst, src) +} +func (m *MethodOptions) XXX_Size() int { + return xxx_messageInfo_MethodOptions.Size(m) +} +func (m *MethodOptions) XXX_DiscardUnknown() { + xxx_messageInfo_MethodOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_MethodOptions proto.InternalMessageInfo const Default_MethodOptions_Deprecated bool = false const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN @@ -1603,19 +2131,40 @@ type UninterpretedOption struct { Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` // The value of the uninterpreted option, in whatever type the tokenizer // identified it as during parsing. Exactly one of these should be set. - IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` - PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` - NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` - DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` - StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` - AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` - XXX_unrecognized []byte `json:"-"` + IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` + PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` + NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } -func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } -func (*UninterpretedOption) ProtoMessage() {} -func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } +func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } +func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption) ProtoMessage() {} +func (*UninterpretedOption) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{18} +} +func (m *UninterpretedOption) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UninterpretedOption.Unmarshal(m, b) +} +func (m *UninterpretedOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UninterpretedOption.Marshal(b, m, deterministic) +} +func (dst *UninterpretedOption) XXX_Merge(src proto.Message) { + xxx_messageInfo_UninterpretedOption.Merge(dst, src) +} +func (m *UninterpretedOption) XXX_Size() int { + return xxx_messageInfo_UninterpretedOption.Size(m) +} +func (m *UninterpretedOption) XXX_DiscardUnknown() { + xxx_messageInfo_UninterpretedOption.DiscardUnknown(m) +} + +var xxx_messageInfo_UninterpretedOption proto.InternalMessageInfo func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { if m != nil { @@ -1672,17 +2221,36 @@ func (m *UninterpretedOption) GetAggregateValue() string { // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents // "foo.(bar.baz).qux". type UninterpretedOption_NamePart struct { - NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` - IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` - XXX_unrecognized []byte `json:"-"` + NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` + IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } func (*UninterpretedOption_NamePart) ProtoMessage() {} func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{17, 0} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{18, 0} } +func (m *UninterpretedOption_NamePart) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UninterpretedOption_NamePart.Unmarshal(m, b) +} +func (m *UninterpretedOption_NamePart) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UninterpretedOption_NamePart.Marshal(b, m, deterministic) +} +func (dst *UninterpretedOption_NamePart) XXX_Merge(src proto.Message) { + xxx_messageInfo_UninterpretedOption_NamePart.Merge(dst, src) +} +func (m *UninterpretedOption_NamePart) XXX_Size() int { + return xxx_messageInfo_UninterpretedOption_NamePart.Size(m) +} +func (m *UninterpretedOption_NamePart) XXX_DiscardUnknown() { + xxx_messageInfo_UninterpretedOption_NamePart.DiscardUnknown(m) +} + +var xxx_messageInfo_UninterpretedOption_NamePart proto.InternalMessageInfo func (m *UninterpretedOption_NamePart) GetNamePart() string { if m != nil && m.NamePart != nil { @@ -1744,14 +2312,35 @@ type SourceCodeInfo struct { // - Code which tries to interpret locations should probably be designed to // ignore those that it doesn't understand, as more types of locations could // be recorded in the future. - Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` - XXX_unrecognized []byte `json:"-"` + Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } -func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo) ProtoMessage() {} -func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } +func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } +func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo) ProtoMessage() {} +func (*SourceCodeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{19} +} +func (m *SourceCodeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceCodeInfo.Unmarshal(m, b) +} +func (m *SourceCodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceCodeInfo.Marshal(b, m, deterministic) +} +func (dst *SourceCodeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceCodeInfo.Merge(dst, src) +} +func (m *SourceCodeInfo) XXX_Size() int { + return xxx_messageInfo_SourceCodeInfo.Size(m) +} +func (m *SourceCodeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceCodeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceCodeInfo proto.InternalMessageInfo func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { if m != nil { @@ -1841,13 +2430,34 @@ type SourceCodeInfo_Location struct { LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } -func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo_Location) ProtoMessage() {} -func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18, 0} } +func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } +func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo_Location) ProtoMessage() {} +func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{19, 0} +} +func (m *SourceCodeInfo_Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceCodeInfo_Location.Unmarshal(m, b) +} +func (m *SourceCodeInfo_Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceCodeInfo_Location.Marshal(b, m, deterministic) +} +func (dst *SourceCodeInfo_Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceCodeInfo_Location.Merge(dst, src) +} +func (m *SourceCodeInfo_Location) XXX_Size() int { + return xxx_messageInfo_SourceCodeInfo_Location.Size(m) +} +func (m *SourceCodeInfo_Location) XXX_DiscardUnknown() { + xxx_messageInfo_SourceCodeInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceCodeInfo_Location proto.InternalMessageInfo func (m *SourceCodeInfo_Location) GetPath() []int32 { if m != nil { @@ -1890,14 +2500,35 @@ func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { type GeneratedCodeInfo struct { // An Annotation connects some span of text in generated code to an element // of its generating .proto file. - Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` - XXX_unrecognized []byte `json:"-"` + Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } -func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } -func (*GeneratedCodeInfo) ProtoMessage() {} -func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } +func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } +func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo) ProtoMessage() {} +func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{20} +} +func (m *GeneratedCodeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeneratedCodeInfo.Unmarshal(m, b) +} +func (m *GeneratedCodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeneratedCodeInfo.Marshal(b, m, deterministic) +} +func (dst *GeneratedCodeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeneratedCodeInfo.Merge(dst, src) +} +func (m *GeneratedCodeInfo) XXX_Size() int { + return xxx_messageInfo_GeneratedCodeInfo.Size(m) +} +func (m *GeneratedCodeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GeneratedCodeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GeneratedCodeInfo proto.InternalMessageInfo func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { if m != nil { @@ -1918,16 +2549,35 @@ type GeneratedCodeInfo_Annotation struct { // Identifies the ending offset in bytes in the generated code that // relates to the identified offset. The end offset should be one past // the last relevant byte (so the length of the text = end - begin). - End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{19, 0} + return fileDescriptor_descriptor_4df4cb5f42392df6, []int{20, 0} } +func (m *GeneratedCodeInfo_Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Unmarshal(m, b) +} +func (m *GeneratedCodeInfo_Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Marshal(b, m, deterministic) +} +func (dst *GeneratedCodeInfo_Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeneratedCodeInfo_Annotation.Merge(dst, src) +} +func (m *GeneratedCodeInfo_Annotation) XXX_Size() int { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Size(m) +} +func (m *GeneratedCodeInfo_Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_GeneratedCodeInfo_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_GeneratedCodeInfo_Annotation proto.InternalMessageInfo func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { if m != nil { @@ -1963,9 +2613,11 @@ func init() { proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") + proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions") proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") + proto.RegisterType((*EnumDescriptorProto_EnumReservedRange)(nil), "google.protobuf.EnumDescriptorProto.EnumReservedRange") proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") @@ -1991,162 +2643,170 @@ func init() { proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) } -func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 2460 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0x5b, 0x6f, 0xdb, 0xc8, - 0x15, 0x5e, 0x5d, 0x2d, 0x1d, 0xc9, 0xf2, 0x78, 0xec, 0x4d, 0x18, 0xef, 0x25, 0x8e, 0xf6, 0x12, - 0x6f, 0xd2, 0xc8, 0x0b, 0xe7, 0xb2, 0x59, 0xa7, 0x48, 0x21, 0x4b, 0x8c, 0x57, 0xa9, 0x2c, 0xa9, - 0x94, 0xdc, 0x4d, 0xf6, 0x85, 0x18, 0x93, 0x23, 0x99, 0x09, 0x45, 0x72, 0x49, 0x2a, 0x89, 0xf7, - 0x29, 0x40, 0x9f, 0x0a, 0xf4, 0x07, 0x14, 0x45, 0xd1, 0x87, 0x7d, 0x59, 0xa0, 0x3f, 0xa0, 0xcf, - 0xfd, 0x05, 0x05, 0xf6, 0xb9, 0x2f, 0x45, 0x51, 0xa0, 0xfd, 0x07, 0x7d, 0x2d, 0x66, 0x86, 0xa4, - 0x48, 0x5d, 0x12, 0x77, 0x81, 0xec, 0x3e, 0xd9, 0x73, 0xce, 0x77, 0x0e, 0xcf, 0x9c, 0xf9, 0x66, - 0xce, 0x99, 0x11, 0x6c, 0x8f, 0x6c, 0x7b, 0x64, 0xd2, 0x5d, 0xc7, 0xb5, 0x7d, 0xfb, 0x64, 0x32, - 0xdc, 0xd5, 0xa9, 0xa7, 0xb9, 0x86, 0xe3, 0xdb, 0x6e, 0x8d, 0xcb, 0xf0, 0x9a, 0x40, 0xd4, 0x42, - 0x44, 0xf5, 0x08, 0xd6, 0x1f, 0x18, 0x26, 0x6d, 0x46, 0xc0, 0x3e, 0xf5, 0xf1, 0x5d, 0xc8, 0x0e, - 0x0d, 0x93, 0x4a, 0xa9, 0xed, 0xcc, 0x4e, 0x69, 0xef, 0xc3, 0xda, 0x8c, 0x51, 0x2d, 0x69, 0xd1, - 0x63, 0x62, 0x85, 0x5b, 0x54, 0xff, 0x95, 0x85, 0x8d, 0x05, 0x5a, 0x8c, 0x21, 0x6b, 0x91, 0x31, - 0xf3, 0x98, 0xda, 0x29, 0x2a, 0xfc, 0x7f, 0x2c, 0xc1, 0x8a, 0x43, 0xb4, 0xa7, 0x64, 0x44, 0xa5, - 0x34, 0x17, 0x87, 0x43, 0xfc, 0x3e, 0x80, 0x4e, 0x1d, 0x6a, 0xe9, 0xd4, 0xd2, 0xce, 0xa4, 0xcc, - 0x76, 0x66, 0xa7, 0xa8, 0xc4, 0x24, 0xf8, 0x3a, 0xac, 0x3b, 0x93, 0x13, 0xd3, 0xd0, 0xd4, 0x18, - 0x0c, 0xb6, 0x33, 0x3b, 0x39, 0x05, 0x09, 0x45, 0x73, 0x0a, 0xbe, 0x0a, 0x6b, 0xcf, 0x29, 0x79, - 0x1a, 0x87, 0x96, 0x38, 0xb4, 0xc2, 0xc4, 0x31, 0x60, 0x03, 0xca, 0x63, 0xea, 0x79, 0x64, 0x44, - 0x55, 0xff, 0xcc, 0xa1, 0x52, 0x96, 0xcf, 0x7e, 0x7b, 0x6e, 0xf6, 0xb3, 0x33, 0x2f, 0x05, 0x56, - 0x83, 0x33, 0x87, 0xe2, 0x3a, 0x14, 0xa9, 0x35, 0x19, 0x0b, 0x0f, 0xb9, 0x25, 0xf9, 0x93, 0xad, - 0xc9, 0x78, 0xd6, 0x4b, 0x81, 0x99, 0x05, 0x2e, 0x56, 0x3c, 0xea, 0x3e, 0x33, 0x34, 0x2a, 0xe5, - 0xb9, 0x83, 0xab, 0x73, 0x0e, 0xfa, 0x42, 0x3f, 0xeb, 0x23, 0xb4, 0xc3, 0x0d, 0x28, 0xd2, 0x17, - 0x3e, 0xb5, 0x3c, 0xc3, 0xb6, 0xa4, 0x15, 0xee, 0xe4, 0xa3, 0x05, 0xab, 0x48, 0x4d, 0x7d, 0xd6, - 0xc5, 0xd4, 0x0e, 0xdf, 0x81, 0x15, 0xdb, 0xf1, 0x0d, 0xdb, 0xf2, 0xa4, 0xc2, 0x76, 0x6a, 0xa7, - 0xb4, 0xf7, 0xee, 0x42, 0x22, 0x74, 0x05, 0x46, 0x09, 0xc1, 0xb8, 0x05, 0xc8, 0xb3, 0x27, 0xae, - 0x46, 0x55, 0xcd, 0xd6, 0xa9, 0x6a, 0x58, 0x43, 0x5b, 0x2a, 0x72, 0x07, 0x97, 0xe7, 0x27, 0xc2, - 0x81, 0x0d, 0x5b, 0xa7, 0x2d, 0x6b, 0x68, 0x2b, 0x15, 0x2f, 0x31, 0xc6, 0x17, 0x20, 0xef, 0x9d, - 0x59, 0x3e, 0x79, 0x21, 0x95, 0x39, 0x43, 0x82, 0x51, 0xf5, 0xbf, 0x39, 0x58, 0x3b, 0x0f, 0xc5, - 0xee, 0x41, 0x6e, 0xc8, 0x66, 0x29, 0xa5, 0xff, 0x9f, 0x1c, 0x08, 0x9b, 0x64, 0x12, 0xf3, 0x3f, - 0x30, 0x89, 0x75, 0x28, 0x59, 0xd4, 0xf3, 0xa9, 0x2e, 0x18, 0x91, 0x39, 0x27, 0xa7, 0x40, 0x18, - 0xcd, 0x53, 0x2a, 0xfb, 0x83, 0x28, 0xf5, 0x08, 0xd6, 0xa2, 0x90, 0x54, 0x97, 0x58, 0xa3, 0x90, - 0x9b, 0xbb, 0xaf, 0x8b, 0xa4, 0x26, 0x87, 0x76, 0x0a, 0x33, 0x53, 0x2a, 0x34, 0x31, 0xc6, 0x4d, - 0x00, 0xdb, 0xa2, 0xf6, 0x50, 0xd5, 0xa9, 0x66, 0x4a, 0x85, 0x25, 0x59, 0xea, 0x32, 0xc8, 0x5c, - 0x96, 0x6c, 0x21, 0xd5, 0x4c, 0xfc, 0xf9, 0x94, 0x6a, 0x2b, 0x4b, 0x98, 0x72, 0x24, 0x36, 0xd9, - 0x1c, 0xdb, 0x8e, 0xa1, 0xe2, 0x52, 0xc6, 0x7b, 0xaa, 0x07, 0x33, 0x2b, 0xf2, 0x20, 0x6a, 0xaf, - 0x9d, 0x99, 0x12, 0x98, 0x89, 0x89, 0xad, 0xba, 0xf1, 0x21, 0xfe, 0x00, 0x22, 0x81, 0xca, 0x69, - 0x05, 0xfc, 0x14, 0x2a, 0x87, 0xc2, 0x0e, 0x19, 0xd3, 0xad, 0xbb, 0x50, 0x49, 0xa6, 0x07, 0x6f, - 0x42, 0xce, 0xf3, 0x89, 0xeb, 0x73, 0x16, 0xe6, 0x14, 0x31, 0xc0, 0x08, 0x32, 0xd4, 0xd2, 0xf9, - 0x29, 0x97, 0x53, 0xd8, 0xbf, 0x5b, 0x9f, 0xc1, 0x6a, 0xe2, 0xf3, 0xe7, 0x35, 0xac, 0xfe, 0x3e, - 0x0f, 0x9b, 0x8b, 0x38, 0xb7, 0x90, 0xfe, 0x17, 0x20, 0x6f, 0x4d, 0xc6, 0x27, 0xd4, 0x95, 0x32, - 0xdc, 0x43, 0x30, 0xc2, 0x75, 0xc8, 0x99, 0xe4, 0x84, 0x9a, 0x52, 0x76, 0x3b, 0xb5, 0x53, 0xd9, - 0xbb, 0x7e, 0x2e, 0x56, 0xd7, 0xda, 0xcc, 0x44, 0x11, 0x96, 0xf8, 0x3e, 0x64, 0x83, 0x23, 0x8e, - 0x79, 0xb8, 0x76, 0x3e, 0x0f, 0x8c, 0x8b, 0x0a, 0xb7, 0xc3, 0xef, 0x40, 0x91, 0xfd, 0x15, 0xb9, - 0xcd, 0xf3, 0x98, 0x0b, 0x4c, 0xc0, 0xf2, 0x8a, 0xb7, 0xa0, 0xc0, 0x69, 0xa6, 0xd3, 0xb0, 0x34, - 0x44, 0x63, 0xb6, 0x30, 0x3a, 0x1d, 0x92, 0x89, 0xe9, 0xab, 0xcf, 0x88, 0x39, 0xa1, 0x9c, 0x30, - 0x45, 0xa5, 0x1c, 0x08, 0x7f, 0xcd, 0x64, 0xf8, 0x32, 0x94, 0x04, 0x2b, 0x0d, 0x4b, 0xa7, 0x2f, - 0xf8, 0xe9, 0x93, 0x53, 0x04, 0x51, 0x5b, 0x4c, 0xc2, 0x3e, 0xff, 0xc4, 0xb3, 0xad, 0x70, 0x69, - 0xf9, 0x27, 0x98, 0x80, 0x7f, 0xfe, 0xb3, 0xd9, 0x83, 0xef, 0xbd, 0xc5, 0xd3, 0x9b, 0xe5, 0x62, - 0xf5, 0x2f, 0x69, 0xc8, 0xf2, 0xfd, 0xb6, 0x06, 0xa5, 0xc1, 0xe3, 0x9e, 0xac, 0x36, 0xbb, 0xc7, - 0x07, 0x6d, 0x19, 0xa5, 0x70, 0x05, 0x80, 0x0b, 0x1e, 0xb4, 0xbb, 0xf5, 0x01, 0x4a, 0x47, 0xe3, - 0x56, 0x67, 0x70, 0xe7, 0x16, 0xca, 0x44, 0x06, 0xc7, 0x42, 0x90, 0x8d, 0x03, 0x6e, 0xee, 0xa1, - 0x1c, 0x46, 0x50, 0x16, 0x0e, 0x5a, 0x8f, 0xe4, 0xe6, 0x9d, 0x5b, 0x28, 0x9f, 0x94, 0xdc, 0xdc, - 0x43, 0x2b, 0x78, 0x15, 0x8a, 0x5c, 0x72, 0xd0, 0xed, 0xb6, 0x51, 0x21, 0xf2, 0xd9, 0x1f, 0x28, - 0xad, 0xce, 0x21, 0x2a, 0x46, 0x3e, 0x0f, 0x95, 0xee, 0x71, 0x0f, 0x41, 0xe4, 0xe1, 0x48, 0xee, - 0xf7, 0xeb, 0x87, 0x32, 0x2a, 0x45, 0x88, 0x83, 0xc7, 0x03, 0xb9, 0x8f, 0xca, 0x89, 0xb0, 0x6e, - 0xee, 0xa1, 0xd5, 0xe8, 0x13, 0x72, 0xe7, 0xf8, 0x08, 0x55, 0xf0, 0x3a, 0xac, 0x8a, 0x4f, 0x84, - 0x41, 0xac, 0xcd, 0x88, 0xee, 0xdc, 0x42, 0x68, 0x1a, 0x88, 0xf0, 0xb2, 0x9e, 0x10, 0xdc, 0xb9, - 0x85, 0x70, 0xb5, 0x01, 0x39, 0xce, 0x2e, 0x8c, 0xa1, 0xd2, 0xae, 0x1f, 0xc8, 0x6d, 0xb5, 0xdb, - 0x1b, 0xb4, 0xba, 0x9d, 0x7a, 0x1b, 0xa5, 0xa6, 0x32, 0x45, 0xfe, 0xd5, 0x71, 0x4b, 0x91, 0x9b, - 0x28, 0x1d, 0x97, 0xf5, 0xe4, 0xfa, 0x40, 0x6e, 0xa2, 0x4c, 0x55, 0x83, 0xcd, 0x45, 0xe7, 0xcc, - 0xc2, 0x9d, 0x11, 0x5b, 0xe2, 0xf4, 0x92, 0x25, 0xe6, 0xbe, 0xe6, 0x96, 0xf8, 0xdb, 0x14, 0x6c, - 0x2c, 0x38, 0x6b, 0x17, 0x7e, 0xe4, 0x17, 0x90, 0x13, 0x14, 0x15, 0xd5, 0xe7, 0x93, 0x85, 0x87, - 0x36, 0x27, 0xec, 0x5c, 0x05, 0xe2, 0x76, 0xf1, 0x0a, 0x9c, 0x59, 0x52, 0x81, 0x99, 0x8b, 0xb9, - 0x20, 0x7f, 0x93, 0x02, 0x69, 0x99, 0xef, 0xd7, 0x1c, 0x14, 0xe9, 0xc4, 0x41, 0x71, 0x6f, 0x36, - 0x80, 0x2b, 0xcb, 0xe7, 0x30, 0x17, 0xc5, 0x77, 0x29, 0xb8, 0xb0, 0xb8, 0x51, 0x59, 0x18, 0xc3, - 0x7d, 0xc8, 0x8f, 0xa9, 0x7f, 0x6a, 0x87, 0xc5, 0xfa, 0xe3, 0x05, 0x25, 0x80, 0xa9, 0x67, 0x73, - 0x15, 0x58, 0xc5, 0x6b, 0x48, 0x66, 0x59, 0xb7, 0x21, 0xa2, 0x99, 0x8b, 0xf4, 0xb7, 0x69, 0x78, - 0x7b, 0xa1, 0xf3, 0x85, 0x81, 0xbe, 0x07, 0x60, 0x58, 0xce, 0xc4, 0x17, 0x05, 0x59, 0x9c, 0x4f, - 0x45, 0x2e, 0xe1, 0x7b, 0x9f, 0x9d, 0x3d, 0x13, 0x3f, 0xd2, 0x67, 0xb8, 0x1e, 0x84, 0x88, 0x03, - 0xee, 0x4e, 0x03, 0xcd, 0xf2, 0x40, 0xdf, 0x5f, 0x32, 0xd3, 0xb9, 0x5a, 0xf7, 0x29, 0x20, 0xcd, - 0x34, 0xa8, 0xe5, 0xab, 0x9e, 0xef, 0x52, 0x32, 0x36, 0xac, 0x11, 0x3f, 0x80, 0x0b, 0xfb, 0xb9, - 0x21, 0x31, 0x3d, 0xaa, 0xac, 0x09, 0x75, 0x3f, 0xd4, 0x32, 0x0b, 0x5e, 0x65, 0xdc, 0x98, 0x45, - 0x3e, 0x61, 0x21, 0xd4, 0x91, 0x45, 0xf5, 0xef, 0x2b, 0x50, 0x8a, 0xb5, 0x75, 0xf8, 0x0a, 0x94, - 0x9f, 0x90, 0x67, 0x44, 0x0d, 0x5b, 0x75, 0x91, 0x89, 0x12, 0x93, 0xf5, 0x82, 0x76, 0xfd, 0x53, - 0xd8, 0xe4, 0x10, 0x7b, 0xe2, 0x53, 0x57, 0xd5, 0x4c, 0xe2, 0x79, 0x3c, 0x69, 0x05, 0x0e, 0xc5, - 0x4c, 0xd7, 0x65, 0xaa, 0x46, 0xa8, 0xc1, 0xb7, 0x61, 0x83, 0x5b, 0x8c, 0x27, 0xa6, 0x6f, 0x38, - 0x26, 0x55, 0xd9, 0xe5, 0xc1, 0xe3, 0x07, 0x71, 0x14, 0xd9, 0x3a, 0x43, 0x1c, 0x05, 0x00, 0x16, - 0x91, 0x87, 0x9b, 0xf0, 0x1e, 0x37, 0x1b, 0x51, 0x8b, 0xba, 0xc4, 0xa7, 0x2a, 0xfd, 0x7a, 0x42, - 0x4c, 0x4f, 0x25, 0x96, 0xae, 0x9e, 0x12, 0xef, 0x54, 0xda, 0x64, 0x0e, 0x0e, 0xd2, 0x52, 0x4a, - 0xb9, 0xc4, 0x80, 0x87, 0x01, 0x4e, 0xe6, 0xb0, 0xba, 0xa5, 0x7f, 0x41, 0xbc, 0x53, 0xbc, 0x0f, - 0x17, 0xb8, 0x17, 0xcf, 0x77, 0x0d, 0x6b, 0xa4, 0x6a, 0xa7, 0x54, 0x7b, 0xaa, 0x4e, 0xfc, 0xe1, - 0x5d, 0xe9, 0x9d, 0xf8, 0xf7, 0x79, 0x84, 0x7d, 0x8e, 0x69, 0x30, 0xc8, 0xb1, 0x3f, 0xbc, 0x8b, - 0xfb, 0x50, 0x66, 0x8b, 0x31, 0x36, 0xbe, 0xa1, 0xea, 0xd0, 0x76, 0x79, 0x65, 0xa9, 0x2c, 0xd8, - 0xd9, 0xb1, 0x0c, 0xd6, 0xba, 0x81, 0xc1, 0x91, 0xad, 0xd3, 0xfd, 0x5c, 0xbf, 0x27, 0xcb, 0x4d, - 0xa5, 0x14, 0x7a, 0x79, 0x60, 0xbb, 0x8c, 0x50, 0x23, 0x3b, 0x4a, 0x70, 0x49, 0x10, 0x6a, 0x64, - 0x87, 0xe9, 0xbd, 0x0d, 0x1b, 0x9a, 0x26, 0xe6, 0x6c, 0x68, 0x6a, 0xd0, 0xe2, 0x7b, 0x12, 0x4a, - 0x24, 0x4b, 0xd3, 0x0e, 0x05, 0x20, 0xe0, 0xb8, 0x87, 0x3f, 0x87, 0xb7, 0xa7, 0xc9, 0x8a, 0x1b, - 0xae, 0xcf, 0xcd, 0x72, 0xd6, 0xf4, 0x36, 0x6c, 0x38, 0x67, 0xf3, 0x86, 0x38, 0xf1, 0x45, 0xe7, - 0x6c, 0xd6, 0xec, 0x23, 0x7e, 0x6d, 0x73, 0xa9, 0x46, 0x7c, 0xaa, 0x4b, 0x17, 0xe3, 0xe8, 0x98, - 0x02, 0xef, 0x02, 0xd2, 0x34, 0x95, 0x5a, 0xe4, 0xc4, 0xa4, 0x2a, 0x71, 0xa9, 0x45, 0x3c, 0xe9, - 0x72, 0x1c, 0x5c, 0xd1, 0x34, 0x99, 0x6b, 0xeb, 0x5c, 0x89, 0xaf, 0xc1, 0xba, 0x7d, 0xf2, 0x44, - 0x13, 0xcc, 0x52, 0x1d, 0x97, 0x0e, 0x8d, 0x17, 0xd2, 0x87, 0x3c, 0x4d, 0x6b, 0x4c, 0xc1, 0x79, - 0xd5, 0xe3, 0x62, 0xfc, 0x09, 0x20, 0xcd, 0x3b, 0x25, 0xae, 0xc3, 0x4b, 0xbb, 0xe7, 0x10, 0x8d, - 0x4a, 0x1f, 0x09, 0xa8, 0x90, 0x77, 0x42, 0x31, 0x63, 0xb6, 0xf7, 0xdc, 0x18, 0xfa, 0xa1, 0xc7, - 0xab, 0x82, 0xd9, 0x5c, 0x16, 0x78, 0xdb, 0x01, 0xe4, 0x9c, 0x3a, 0xc9, 0x0f, 0xef, 0x70, 0x58, - 0xc5, 0x39, 0x75, 0xe2, 0xdf, 0x7d, 0x04, 0x9b, 0x13, 0xcb, 0xb0, 0x7c, 0xea, 0x3a, 0x2e, 0x65, - 0xed, 0xbe, 0xd8, 0xb3, 0xd2, 0xbf, 0x57, 0x96, 0x34, 0xec, 0xc7, 0x71, 0xb4, 0xa0, 0x8a, 0xb2, - 0x31, 0x99, 0x17, 0x56, 0xf7, 0xa1, 0x1c, 0x67, 0x10, 0x2e, 0x82, 0xe0, 0x10, 0x4a, 0xb1, 0x6a, - 0xdc, 0xe8, 0x36, 0x59, 0x1d, 0xfd, 0x4a, 0x46, 0x69, 0x56, 0xcf, 0xdb, 0xad, 0x81, 0xac, 0x2a, - 0xc7, 0x9d, 0x41, 0xeb, 0x48, 0x46, 0x99, 0x6b, 0xc5, 0xc2, 0x7f, 0x56, 0xd0, 0xcb, 0x97, 0x2f, - 0x5f, 0xa6, 0x1f, 0x66, 0x0b, 0x1f, 0xa3, 0xab, 0xd5, 0xef, 0xd3, 0x50, 0x49, 0x76, 0xd2, 0xf8, - 0xe7, 0x70, 0x31, 0xbc, 0xf6, 0x7a, 0xd4, 0x57, 0x9f, 0x1b, 0x2e, 0xa7, 0xf6, 0x98, 0x88, 0x5e, - 0x34, 0x5a, 0x95, 0xcd, 0x00, 0xd5, 0xa7, 0xfe, 0x97, 0x86, 0xcb, 0x88, 0x3b, 0x26, 0x3e, 0x6e, - 0xc3, 0x65, 0xcb, 0x56, 0x3d, 0x9f, 0x58, 0x3a, 0x71, 0x75, 0x75, 0xfa, 0xe0, 0xa0, 0x12, 0x4d, - 0xa3, 0x9e, 0x67, 0x8b, 0x92, 0x12, 0x79, 0x79, 0xd7, 0xb2, 0xfb, 0x01, 0x78, 0x7a, 0xd6, 0xd6, - 0x03, 0xe8, 0x0c, 0x83, 0x32, 0xcb, 0x18, 0xf4, 0x0e, 0x14, 0xc7, 0xc4, 0x51, 0xa9, 0xe5, 0xbb, - 0x67, 0xbc, 0xff, 0x2b, 0x28, 0x85, 0x31, 0x71, 0x64, 0x36, 0x7e, 0x73, 0x2b, 0x91, 0xcc, 0x66, - 0x01, 0x15, 0x1f, 0x66, 0x0b, 0x45, 0x04, 0xd5, 0x7f, 0x66, 0xa0, 0x1c, 0xef, 0x07, 0x59, 0x7b, - 0xad, 0xf1, 0xb3, 0x3f, 0xc5, 0x4f, 0x87, 0x0f, 0x5e, 0xd9, 0x3d, 0xd6, 0x1a, 0xac, 0x28, 0xec, - 0xe7, 0x45, 0x97, 0xa6, 0x08, 0x4b, 0x56, 0x90, 0xd9, 0x79, 0x40, 0x45, 0xef, 0x5f, 0x50, 0x82, - 0x11, 0x3e, 0x84, 0xfc, 0x13, 0x8f, 0xfb, 0xce, 0x73, 0xdf, 0x1f, 0xbe, 0xda, 0xf7, 0xc3, 0x3e, - 0x77, 0x5e, 0x7c, 0xd8, 0x57, 0x3b, 0x5d, 0xe5, 0xa8, 0xde, 0x56, 0x02, 0x73, 0x7c, 0x09, 0xb2, - 0x26, 0xf9, 0xe6, 0x2c, 0x59, 0x3e, 0xb8, 0xe8, 0xbc, 0x8b, 0x70, 0x09, 0xb2, 0xcf, 0x29, 0x79, - 0x9a, 0x3c, 0xb4, 0xb9, 0xe8, 0x0d, 0x6e, 0x86, 0x5d, 0xc8, 0xf1, 0x7c, 0x61, 0x80, 0x20, 0x63, - 0xe8, 0x2d, 0x5c, 0x80, 0x6c, 0xa3, 0xab, 0xb0, 0x0d, 0x81, 0xa0, 0x2c, 0xa4, 0x6a, 0xaf, 0x25, - 0x37, 0x64, 0x94, 0xae, 0xde, 0x86, 0xbc, 0x48, 0x02, 0xdb, 0x2c, 0x51, 0x1a, 0xd0, 0x5b, 0xc1, - 0x30, 0xf0, 0x91, 0x0a, 0xb5, 0xc7, 0x47, 0x07, 0xb2, 0x82, 0xd2, 0xc9, 0xa5, 0xce, 0xa2, 0x5c, - 0xd5, 0x83, 0x72, 0xbc, 0x21, 0xfc, 0x51, 0x58, 0x56, 0xfd, 0x6b, 0x0a, 0x4a, 0xb1, 0x06, 0x8f, - 0xb5, 0x16, 0xc4, 0x34, 0xed, 0xe7, 0x2a, 0x31, 0x0d, 0xe2, 0x05, 0xd4, 0x00, 0x2e, 0xaa, 0x33, - 0xc9, 0x79, 0x97, 0xee, 0x47, 0xda, 0x22, 0x39, 0x94, 0xaf, 0xfe, 0x29, 0x05, 0x68, 0xb6, 0x45, - 0x9c, 0x09, 0x33, 0xf5, 0x53, 0x86, 0x59, 0xfd, 0x63, 0x0a, 0x2a, 0xc9, 0xbe, 0x70, 0x26, 0xbc, - 0x2b, 0x3f, 0x69, 0x78, 0xff, 0x48, 0xc3, 0x6a, 0xa2, 0x1b, 0x3c, 0x6f, 0x74, 0x5f, 0xc3, 0xba, - 0xa1, 0xd3, 0xb1, 0x63, 0xfb, 0xd4, 0xd2, 0xce, 0x54, 0x93, 0x3e, 0xa3, 0xa6, 0x54, 0xe5, 0x87, - 0xc6, 0xee, 0xab, 0xfb, 0xcd, 0x5a, 0x6b, 0x6a, 0xd7, 0x66, 0x66, 0xfb, 0x1b, 0xad, 0xa6, 0x7c, - 0xd4, 0xeb, 0x0e, 0xe4, 0x4e, 0xe3, 0xb1, 0x7a, 0xdc, 0xf9, 0x65, 0xa7, 0xfb, 0x65, 0x47, 0x41, - 0xc6, 0x0c, 0xec, 0x0d, 0x6e, 0xfb, 0x1e, 0xa0, 0xd9, 0xa0, 0xf0, 0x45, 0x58, 0x14, 0x16, 0x7a, - 0x0b, 0x6f, 0xc0, 0x5a, 0xa7, 0xab, 0xf6, 0x5b, 0x4d, 0x59, 0x95, 0x1f, 0x3c, 0x90, 0x1b, 0x83, - 0xbe, 0xb8, 0x80, 0x47, 0xe8, 0x41, 0x62, 0x83, 0x57, 0xff, 0x90, 0x81, 0x8d, 0x05, 0x91, 0xe0, - 0x7a, 0xd0, 0xfb, 0x8b, 0xeb, 0xc8, 0x8d, 0xf3, 0x44, 0x5f, 0x63, 0xdd, 0x45, 0x8f, 0xb8, 0x7e, - 0x70, 0x55, 0xf8, 0x04, 0x58, 0x96, 0x2c, 0xdf, 0x18, 0x1a, 0xd4, 0x0d, 0xde, 0x2b, 0xc4, 0x85, - 0x60, 0x6d, 0x2a, 0x17, 0x4f, 0x16, 0x3f, 0x03, 0xec, 0xd8, 0x9e, 0xe1, 0x1b, 0xcf, 0xa8, 0x6a, - 0x58, 0xe1, 0xe3, 0x06, 0xbb, 0x20, 0x64, 0x15, 0x14, 0x6a, 0x5a, 0x96, 0x1f, 0xa1, 0x2d, 0x3a, - 0x22, 0x33, 0x68, 0x76, 0x98, 0x67, 0x14, 0x14, 0x6a, 0x22, 0xf4, 0x15, 0x28, 0xeb, 0xf6, 0x84, - 0xb5, 0x5b, 0x02, 0xc7, 0x6a, 0x47, 0x4a, 0x29, 0x09, 0x59, 0x04, 0x09, 0xfa, 0xe1, 0xe9, 0xab, - 0x4a, 0x59, 0x29, 0x09, 0x99, 0x80, 0x5c, 0x85, 0x35, 0x32, 0x1a, 0xb9, 0xcc, 0x79, 0xe8, 0x48, - 0x74, 0xf8, 0x95, 0x48, 0xcc, 0x81, 0x5b, 0x0f, 0xa1, 0x10, 0xe6, 0x81, 0x95, 0x6a, 0x96, 0x09, - 0xd5, 0x11, 0x6f, 0x5b, 0xe9, 0x9d, 0xa2, 0x52, 0xb0, 0x42, 0xe5, 0x15, 0x28, 0x1b, 0x9e, 0x3a, - 0x7d, 0x64, 0x4d, 0x6f, 0xa7, 0x77, 0x0a, 0x4a, 0xc9, 0xf0, 0xa2, 0x57, 0xb5, 0xea, 0x77, 0x69, - 0xa8, 0x24, 0x1f, 0x89, 0x71, 0x13, 0x0a, 0xa6, 0xad, 0x11, 0x4e, 0x2d, 0xf1, 0x0b, 0xc5, 0xce, - 0x6b, 0xde, 0x95, 0x6b, 0xed, 0x00, 0xaf, 0x44, 0x96, 0x5b, 0x7f, 0x4b, 0x41, 0x21, 0x14, 0xe3, - 0x0b, 0x90, 0x75, 0x88, 0x7f, 0xca, 0xdd, 0xe5, 0x0e, 0xd2, 0x28, 0xa5, 0xf0, 0x31, 0x93, 0x7b, - 0x0e, 0xb1, 0x38, 0x05, 0x02, 0x39, 0x1b, 0xb3, 0x75, 0x35, 0x29, 0xd1, 0xf9, 0xf5, 0xc1, 0x1e, - 0x8f, 0xa9, 0xe5, 0x7b, 0xe1, 0xba, 0x06, 0xf2, 0x46, 0x20, 0xc6, 0xd7, 0x61, 0xdd, 0x77, 0x89, - 0x61, 0x26, 0xb0, 0x59, 0x8e, 0x45, 0xa1, 0x22, 0x02, 0xef, 0xc3, 0xa5, 0xd0, 0xaf, 0x4e, 0x7d, - 0xa2, 0x9d, 0x52, 0x7d, 0x6a, 0x94, 0xe7, 0x2f, 0x90, 0x17, 0x03, 0x40, 0x33, 0xd0, 0x87, 0xb6, - 0xd5, 0xef, 0x53, 0xb0, 0x1e, 0x5e, 0x78, 0xf4, 0x28, 0x59, 0x47, 0x00, 0xc4, 0xb2, 0x6c, 0x3f, - 0x9e, 0xae, 0x79, 0x2a, 0xcf, 0xd9, 0xd5, 0xea, 0x91, 0x91, 0x12, 0x73, 0xb0, 0x35, 0x06, 0x98, - 0x6a, 0x96, 0xa6, 0xed, 0x32, 0x94, 0x82, 0x5f, 0x00, 0xf8, 0xcf, 0x48, 0xe2, 0x8a, 0x0c, 0x42, - 0xc4, 0x6e, 0x46, 0x78, 0x13, 0x72, 0x27, 0x74, 0x64, 0x58, 0xc1, 0xbb, 0xa4, 0x18, 0x84, 0xaf, - 0x9d, 0xd9, 0xe8, 0xb5, 0xf3, 0xe0, 0x77, 0x29, 0xd8, 0xd0, 0xec, 0xf1, 0x6c, 0xbc, 0x07, 0x68, - 0xe6, 0x9e, 0xee, 0x7d, 0x91, 0xfa, 0xea, 0xfe, 0xc8, 0xf0, 0x4f, 0x27, 0x27, 0x35, 0xcd, 0x1e, - 0xef, 0x8e, 0x6c, 0x93, 0x58, 0xa3, 0xe9, 0xef, 0x60, 0xfc, 0x1f, 0xed, 0xc6, 0x88, 0x5a, 0x37, - 0x46, 0x76, 0xec, 0x57, 0xb1, 0x7b, 0xd3, 0x7f, 0xbf, 0x4d, 0x67, 0x0e, 0x7b, 0x07, 0x7f, 0x4e, - 0x6f, 0x1d, 0x8a, 0x6f, 0xf5, 0xc2, 0xdc, 0x28, 0x74, 0x68, 0x52, 0x8d, 0xcd, 0xf7, 0x7f, 0x01, - 0x00, 0x00, 0xff, 0xff, 0x8e, 0x54, 0xe7, 0xef, 0x60, 0x1b, 0x00, 0x00, +func init() { + proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor_descriptor_4df4cb5f42392df6) +} + +var fileDescriptor_descriptor_4df4cb5f42392df6 = []byte{ + // 2555 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xdd, 0x6e, 0x1b, 0xc7, + 0xf5, 0xcf, 0xf2, 0x4b, 0xe4, 0x21, 0x45, 0x8d, 0x46, 0x8a, 0xbd, 0x56, 0x3e, 0x2c, 0x33, 0x1f, + 0x96, 0x9d, 0x7f, 0xa8, 0xc0, 0xb1, 0x1d, 0x47, 0xfe, 0x23, 0x2d, 0x45, 0xae, 0x15, 0xaa, 0x12, + 0xc9, 0x2e, 0xa9, 0xe6, 0x03, 0x28, 0x16, 0xa3, 0xdd, 0x21, 0xb9, 0xf6, 0x72, 0x77, 0xb3, 0xbb, + 0xb4, 0xad, 0xa0, 0x17, 0x06, 0x7a, 0xd5, 0xab, 0xde, 0x16, 0x45, 0xd1, 0x8b, 0xde, 0x04, 0xe8, + 0x03, 0x14, 0xc8, 0x5d, 0x9f, 0xa0, 0x40, 0xde, 0xa0, 0x68, 0x0b, 0xb4, 0x8f, 0xd0, 0xcb, 0x62, + 0x66, 0x76, 0x97, 0xbb, 0x24, 0x15, 0x2b, 0x01, 0xe2, 0x5c, 0x91, 0xf3, 0x9b, 0xdf, 0x39, 0x73, + 0xe6, 0xcc, 0x99, 0x33, 0x67, 0x66, 0x61, 0x7b, 0xe4, 0x38, 0x23, 0x8b, 0xee, 0xba, 0x9e, 0x13, + 0x38, 0xa7, 0xd3, 0xe1, 0xae, 0x41, 0x7d, 0xdd, 0x33, 0xdd, 0xc0, 0xf1, 0xea, 0x1c, 0xc3, 0x6b, + 0x82, 0x51, 0x8f, 0x18, 0xb5, 0x63, 0x58, 0x7f, 0x60, 0x5a, 0xb4, 0x15, 0x13, 0xfb, 0x34, 0xc0, + 0xf7, 0x20, 0x37, 0x34, 0x2d, 0x2a, 0x4b, 0xdb, 0xd9, 0x9d, 0xf2, 0xad, 0x37, 0xeb, 0x73, 0x42, + 0xf5, 0xb4, 0x44, 0x8f, 0xc1, 0x2a, 0x97, 0xa8, 0xfd, 0x2b, 0x07, 0x1b, 0x4b, 0x7a, 0x31, 0x86, + 0x9c, 0x4d, 0x26, 0x4c, 0xa3, 0xb4, 0x53, 0x52, 0xf9, 0x7f, 0x2c, 0xc3, 0x8a, 0x4b, 0xf4, 0x47, + 0x64, 0x44, 0xe5, 0x0c, 0x87, 0xa3, 0x26, 0x7e, 0x1d, 0xc0, 0xa0, 0x2e, 0xb5, 0x0d, 0x6a, 0xeb, + 0x67, 0x72, 0x76, 0x3b, 0xbb, 0x53, 0x52, 0x13, 0x08, 0x7e, 0x07, 0xd6, 0xdd, 0xe9, 0xa9, 0x65, + 0xea, 0x5a, 0x82, 0x06, 0xdb, 0xd9, 0x9d, 0xbc, 0x8a, 0x44, 0x47, 0x6b, 0x46, 0xbe, 0x0e, 0x6b, + 0x4f, 0x28, 0x79, 0x94, 0xa4, 0x96, 0x39, 0xb5, 0xca, 0xe0, 0x04, 0xb1, 0x09, 0x95, 0x09, 0xf5, + 0x7d, 0x32, 0xa2, 0x5a, 0x70, 0xe6, 0x52, 0x39, 0xc7, 0x67, 0xbf, 0xbd, 0x30, 0xfb, 0xf9, 0x99, + 0x97, 0x43, 0xa9, 0xc1, 0x99, 0x4b, 0x71, 0x03, 0x4a, 0xd4, 0x9e, 0x4e, 0x84, 0x86, 0xfc, 0x39, + 0xfe, 0x53, 0xec, 0xe9, 0x64, 0x5e, 0x4b, 0x91, 0x89, 0x85, 0x2a, 0x56, 0x7c, 0xea, 0x3d, 0x36, + 0x75, 0x2a, 0x17, 0xb8, 0x82, 0xeb, 0x0b, 0x0a, 0xfa, 0xa2, 0x7f, 0x5e, 0x47, 0x24, 0x87, 0x9b, + 0x50, 0xa2, 0x4f, 0x03, 0x6a, 0xfb, 0xa6, 0x63, 0xcb, 0x2b, 0x5c, 0xc9, 0x5b, 0x4b, 0x56, 0x91, + 0x5a, 0xc6, 0xbc, 0x8a, 0x99, 0x1c, 0xbe, 0x0b, 0x2b, 0x8e, 0x1b, 0x98, 0x8e, 0xed, 0xcb, 0xc5, + 0x6d, 0x69, 0xa7, 0x7c, 0xeb, 0xd5, 0xa5, 0x81, 0xd0, 0x15, 0x1c, 0x35, 0x22, 0xe3, 0x36, 0x20, + 0xdf, 0x99, 0x7a, 0x3a, 0xd5, 0x74, 0xc7, 0xa0, 0x9a, 0x69, 0x0f, 0x1d, 0xb9, 0xc4, 0x15, 0x5c, + 0x5d, 0x9c, 0x08, 0x27, 0x36, 0x1d, 0x83, 0xb6, 0xed, 0xa1, 0xa3, 0x56, 0xfd, 0x54, 0x1b, 0x5f, + 0x82, 0x82, 0x7f, 0x66, 0x07, 0xe4, 0xa9, 0x5c, 0xe1, 0x11, 0x12, 0xb6, 0x6a, 0x5f, 0x17, 0x60, + 0xed, 0x22, 0x21, 0x76, 0x1f, 0xf2, 0x43, 0x36, 0x4b, 0x39, 0xf3, 0x5d, 0x7c, 0x20, 0x64, 0xd2, + 0x4e, 0x2c, 0x7c, 0x4f, 0x27, 0x36, 0xa0, 0x6c, 0x53, 0x3f, 0xa0, 0x86, 0x88, 0x88, 0xec, 0x05, + 0x63, 0x0a, 0x84, 0xd0, 0x62, 0x48, 0xe5, 0xbe, 0x57, 0x48, 0x7d, 0x0a, 0x6b, 0xb1, 0x49, 0x9a, + 0x47, 0xec, 0x51, 0x14, 0x9b, 0xbb, 0xcf, 0xb3, 0xa4, 0xae, 0x44, 0x72, 0x2a, 0x13, 0x53, 0xab, + 0x34, 0xd5, 0xc6, 0x2d, 0x00, 0xc7, 0xa6, 0xce, 0x50, 0x33, 0xa8, 0x6e, 0xc9, 0xc5, 0x73, 0xbc, + 0xd4, 0x65, 0x94, 0x05, 0x2f, 0x39, 0x02, 0xd5, 0x2d, 0xfc, 0xe1, 0x2c, 0xd4, 0x56, 0xce, 0x89, + 0x94, 0x63, 0xb1, 0xc9, 0x16, 0xa2, 0xed, 0x04, 0xaa, 0x1e, 0x65, 0x71, 0x4f, 0x8d, 0x70, 0x66, + 0x25, 0x6e, 0x44, 0xfd, 0xb9, 0x33, 0x53, 0x43, 0x31, 0x31, 0xb1, 0x55, 0x2f, 0xd9, 0xc4, 0x6f, + 0x40, 0x0c, 0x68, 0x3c, 0xac, 0x80, 0x67, 0xa1, 0x4a, 0x04, 0x76, 0xc8, 0x84, 0x6e, 0x7d, 0x09, + 0xd5, 0xb4, 0x7b, 0xf0, 0x26, 0xe4, 0xfd, 0x80, 0x78, 0x01, 0x8f, 0xc2, 0xbc, 0x2a, 0x1a, 0x18, + 0x41, 0x96, 0xda, 0x06, 0xcf, 0x72, 0x79, 0x95, 0xfd, 0xc5, 0x3f, 0x9d, 0x4d, 0x38, 0xcb, 0x27, + 0xfc, 0xf6, 0xe2, 0x8a, 0xa6, 0x34, 0xcf, 0xcf, 0x7b, 0xeb, 0x03, 0x58, 0x4d, 0x4d, 0xe0, 0xa2, + 0x43, 0xd7, 0x7e, 0x05, 0x2f, 0x2f, 0x55, 0x8d, 0x3f, 0x85, 0xcd, 0xa9, 0x6d, 0xda, 0x01, 0xf5, + 0x5c, 0x8f, 0xb2, 0x88, 0x15, 0x43, 0xc9, 0xff, 0x5e, 0x39, 0x27, 0xe6, 0x4e, 0x92, 0x6c, 0xa1, + 0x45, 0xdd, 0x98, 0x2e, 0x82, 0x37, 0x4b, 0xc5, 0xff, 0xac, 0xa0, 0x67, 0xcf, 0x9e, 0x3d, 0xcb, + 0xd4, 0x7e, 0x57, 0x80, 0xcd, 0x65, 0x7b, 0x66, 0xe9, 0xf6, 0xbd, 0x04, 0x05, 0x7b, 0x3a, 0x39, + 0xa5, 0x1e, 0x77, 0x52, 0x5e, 0x0d, 0x5b, 0xb8, 0x01, 0x79, 0x8b, 0x9c, 0x52, 0x4b, 0xce, 0x6d, + 0x4b, 0x3b, 0xd5, 0x5b, 0xef, 0x5c, 0x68, 0x57, 0xd6, 0x8f, 0x98, 0x88, 0x2a, 0x24, 0xf1, 0x47, + 0x90, 0x0b, 0x53, 0x34, 0xd3, 0x70, 0xf3, 0x62, 0x1a, 0xd8, 0x5e, 0x52, 0xb9, 0x1c, 0x7e, 0x05, + 0x4a, 0xec, 0x57, 0xc4, 0x46, 0x81, 0xdb, 0x5c, 0x64, 0x00, 0x8b, 0x0b, 0xbc, 0x05, 0x45, 0xbe, + 0x4d, 0x0c, 0x1a, 0x1d, 0x6d, 0x71, 0x9b, 0x05, 0x96, 0x41, 0x87, 0x64, 0x6a, 0x05, 0xda, 0x63, + 0x62, 0x4d, 0x29, 0x0f, 0xf8, 0x92, 0x5a, 0x09, 0xc1, 0x5f, 0x30, 0x0c, 0x5f, 0x85, 0xb2, 0xd8, + 0x55, 0xa6, 0x6d, 0xd0, 0xa7, 0x3c, 0x7b, 0xe6, 0x55, 0xb1, 0xd1, 0xda, 0x0c, 0x61, 0xc3, 0x3f, + 0xf4, 0x1d, 0x3b, 0x0a, 0x4d, 0x3e, 0x04, 0x03, 0xf8, 0xf0, 0x1f, 0xcc, 0x27, 0xee, 0xd7, 0x96, + 0x4f, 0x6f, 0x3e, 0xa6, 0x6a, 0x7f, 0xc9, 0x40, 0x8e, 0xe7, 0x8b, 0x35, 0x28, 0x0f, 0x3e, 0xeb, + 0x29, 0x5a, 0xab, 0x7b, 0xb2, 0x7f, 0xa4, 0x20, 0x09, 0x57, 0x01, 0x38, 0xf0, 0xe0, 0xa8, 0xdb, + 0x18, 0xa0, 0x4c, 0xdc, 0x6e, 0x77, 0x06, 0x77, 0x6f, 0xa3, 0x6c, 0x2c, 0x70, 0x22, 0x80, 0x5c, + 0x92, 0xf0, 0xfe, 0x2d, 0x94, 0xc7, 0x08, 0x2a, 0x42, 0x41, 0xfb, 0x53, 0xa5, 0x75, 0xf7, 0x36, + 0x2a, 0xa4, 0x91, 0xf7, 0x6f, 0xa1, 0x15, 0xbc, 0x0a, 0x25, 0x8e, 0xec, 0x77, 0xbb, 0x47, 0xa8, + 0x18, 0xeb, 0xec, 0x0f, 0xd4, 0x76, 0xe7, 0x00, 0x95, 0x62, 0x9d, 0x07, 0x6a, 0xf7, 0xa4, 0x87, + 0x20, 0xd6, 0x70, 0xac, 0xf4, 0xfb, 0x8d, 0x03, 0x05, 0x95, 0x63, 0xc6, 0xfe, 0x67, 0x03, 0xa5, + 0x8f, 0x2a, 0x29, 0xb3, 0xde, 0xbf, 0x85, 0x56, 0xe3, 0x21, 0x94, 0xce, 0xc9, 0x31, 0xaa, 0xe2, + 0x75, 0x58, 0x15, 0x43, 0x44, 0x46, 0xac, 0xcd, 0x41, 0x77, 0x6f, 0x23, 0x34, 0x33, 0x44, 0x68, + 0x59, 0x4f, 0x01, 0x77, 0x6f, 0x23, 0x5c, 0x6b, 0x42, 0x9e, 0x47, 0x17, 0xc6, 0x50, 0x3d, 0x6a, + 0xec, 0x2b, 0x47, 0x5a, 0xb7, 0x37, 0x68, 0x77, 0x3b, 0x8d, 0x23, 0x24, 0xcd, 0x30, 0x55, 0xf9, + 0xf9, 0x49, 0x5b, 0x55, 0x5a, 0x28, 0x93, 0xc4, 0x7a, 0x4a, 0x63, 0xa0, 0xb4, 0x50, 0xb6, 0xa6, + 0xc3, 0xe6, 0xb2, 0x3c, 0xb9, 0x74, 0x67, 0x24, 0x96, 0x38, 0x73, 0xce, 0x12, 0x73, 0x5d, 0x0b, + 0x4b, 0xfc, 0xcf, 0x0c, 0x6c, 0x2c, 0x39, 0x2b, 0x96, 0x0e, 0xf2, 0x13, 0xc8, 0x8b, 0x10, 0x15, + 0xa7, 0xe7, 0x8d, 0xa5, 0x87, 0x0e, 0x0f, 0xd8, 0x85, 0x13, 0x94, 0xcb, 0x25, 0x2b, 0x88, 0xec, + 0x39, 0x15, 0x04, 0x53, 0xb1, 0x90, 0xd3, 0x7f, 0xb9, 0x90, 0xd3, 0xc5, 0xb1, 0x77, 0xf7, 0x22, + 0xc7, 0x1e, 0xc7, 0xbe, 0x5b, 0x6e, 0xcf, 0x2f, 0xc9, 0xed, 0xf7, 0x61, 0x7d, 0x41, 0xd1, 0x85, + 0x73, 0xec, 0xaf, 0x25, 0x90, 0xcf, 0x73, 0xce, 0x73, 0x32, 0x5d, 0x26, 0x95, 0xe9, 0xee, 0xcf, + 0x7b, 0xf0, 0xda, 0xf9, 0x8b, 0xb0, 0xb0, 0xd6, 0x5f, 0x49, 0x70, 0x69, 0x79, 0xa5, 0xb8, 0xd4, + 0x86, 0x8f, 0xa0, 0x30, 0xa1, 0xc1, 0xd8, 0x89, 0xaa, 0xa5, 0xb7, 0x97, 0x9c, 0xc1, 0xac, 0x7b, + 0x7e, 0xb1, 0x43, 0xa9, 0xe4, 0x21, 0x9e, 0x3d, 0xaf, 0xdc, 0x13, 0xd6, 0x2c, 0x58, 0xfa, 0x9b, + 0x0c, 0xbc, 0xbc, 0x54, 0xf9, 0x52, 0x43, 0x5f, 0x03, 0x30, 0x6d, 0x77, 0x1a, 0x88, 0x8a, 0x48, + 0x24, 0xd8, 0x12, 0x47, 0x78, 0xf2, 0x62, 0xc9, 0x73, 0x1a, 0xc4, 0xfd, 0x59, 0xde, 0x0f, 0x02, + 0xe2, 0x84, 0x7b, 0x33, 0x43, 0x73, 0xdc, 0xd0, 0xd7, 0xcf, 0x99, 0xe9, 0x42, 0x60, 0xbe, 0x07, + 0x48, 0xb7, 0x4c, 0x6a, 0x07, 0x9a, 0x1f, 0x78, 0x94, 0x4c, 0x4c, 0x7b, 0xc4, 0x4f, 0x90, 0xe2, + 0x5e, 0x7e, 0x48, 0x2c, 0x9f, 0xaa, 0x6b, 0xa2, 0xbb, 0x1f, 0xf5, 0x32, 0x09, 0x1e, 0x40, 0x5e, + 0x42, 0xa2, 0x90, 0x92, 0x10, 0xdd, 0xb1, 0x44, 0xed, 0xeb, 0x22, 0x94, 0x13, 0x75, 0x35, 0xbe, + 0x06, 0x95, 0x87, 0xe4, 0x31, 0xd1, 0xa2, 0xbb, 0x92, 0xf0, 0x44, 0x99, 0x61, 0xbd, 0xf0, 0xbe, + 0xf4, 0x1e, 0x6c, 0x72, 0x8a, 0x33, 0x0d, 0xa8, 0xa7, 0xe9, 0x16, 0xf1, 0x7d, 0xee, 0xb4, 0x22, + 0xa7, 0x62, 0xd6, 0xd7, 0x65, 0x5d, 0xcd, 0xa8, 0x07, 0xdf, 0x81, 0x0d, 0x2e, 0x31, 0x99, 0x5a, + 0x81, 0xe9, 0x5a, 0x54, 0x63, 0xb7, 0x37, 0x9f, 0x9f, 0x24, 0xb1, 0x65, 0xeb, 0x8c, 0x71, 0x1c, + 0x12, 0x98, 0x45, 0x3e, 0x6e, 0xc1, 0x6b, 0x5c, 0x6c, 0x44, 0x6d, 0xea, 0x91, 0x80, 0x6a, 0xf4, + 0x8b, 0x29, 0xb1, 0x7c, 0x8d, 0xd8, 0x86, 0x36, 0x26, 0xfe, 0x58, 0xde, 0x64, 0x0a, 0xf6, 0x33, + 0xb2, 0xa4, 0x5e, 0x61, 0xc4, 0x83, 0x90, 0xa7, 0x70, 0x5a, 0xc3, 0x36, 0x3e, 0x26, 0xfe, 0x18, + 0xef, 0xc1, 0x25, 0xae, 0xc5, 0x0f, 0x3c, 0xd3, 0x1e, 0x69, 0xfa, 0x98, 0xea, 0x8f, 0xb4, 0x69, + 0x30, 0xbc, 0x27, 0xbf, 0x92, 0x1c, 0x9f, 0x5b, 0xd8, 0xe7, 0x9c, 0x26, 0xa3, 0x9c, 0x04, 0xc3, + 0x7b, 0xb8, 0x0f, 0x15, 0xb6, 0x18, 0x13, 0xf3, 0x4b, 0xaa, 0x0d, 0x1d, 0x8f, 0x1f, 0x8d, 0xd5, + 0x25, 0xa9, 0x29, 0xe1, 0xc1, 0x7a, 0x37, 0x14, 0x38, 0x76, 0x0c, 0xba, 0x97, 0xef, 0xf7, 0x14, + 0xa5, 0xa5, 0x96, 0x23, 0x2d, 0x0f, 0x1c, 0x8f, 0x05, 0xd4, 0xc8, 0x89, 0x1d, 0x5c, 0x16, 0x01, + 0x35, 0x72, 0x22, 0xf7, 0xde, 0x81, 0x0d, 0x5d, 0x17, 0x73, 0x36, 0x75, 0x2d, 0xbc, 0x63, 0xf9, + 0x32, 0x4a, 0x39, 0x4b, 0xd7, 0x0f, 0x04, 0x21, 0x8c, 0x71, 0x1f, 0x7f, 0x08, 0x2f, 0xcf, 0x9c, + 0x95, 0x14, 0x5c, 0x5f, 0x98, 0xe5, 0xbc, 0xe8, 0x1d, 0xd8, 0x70, 0xcf, 0x16, 0x05, 0x71, 0x6a, + 0x44, 0xf7, 0x6c, 0x5e, 0xec, 0x03, 0xd8, 0x74, 0xc7, 0xee, 0xa2, 0xdc, 0xcd, 0xa4, 0x1c, 0x76, + 0xc7, 0xee, 0xbc, 0xe0, 0x5b, 0xfc, 0xc2, 0xed, 0x51, 0x9d, 0x04, 0xd4, 0x90, 0x2f, 0x27, 0xe9, + 0x89, 0x0e, 0xbc, 0x0b, 0x48, 0xd7, 0x35, 0x6a, 0x93, 0x53, 0x8b, 0x6a, 0xc4, 0xa3, 0x36, 0xf1, + 0xe5, 0xab, 0x49, 0x72, 0x55, 0xd7, 0x15, 0xde, 0xdb, 0xe0, 0x9d, 0xf8, 0x26, 0xac, 0x3b, 0xa7, + 0x0f, 0x75, 0x11, 0x92, 0x9a, 0xeb, 0xd1, 0xa1, 0xf9, 0x54, 0x7e, 0x93, 0xfb, 0x77, 0x8d, 0x75, + 0xf0, 0x80, 0xec, 0x71, 0x18, 0xdf, 0x00, 0xa4, 0xfb, 0x63, 0xe2, 0xb9, 0x3c, 0x27, 0xfb, 0x2e, + 0xd1, 0xa9, 0xfc, 0x96, 0xa0, 0x0a, 0xbc, 0x13, 0xc1, 0x6c, 0x4b, 0xf8, 0x4f, 0xcc, 0x61, 0x10, + 0x69, 0xbc, 0x2e, 0xb6, 0x04, 0xc7, 0x42, 0x6d, 0x3b, 0x80, 0x98, 0x2b, 0x52, 0x03, 0xef, 0x70, + 0x5a, 0xd5, 0x1d, 0xbb, 0xc9, 0x71, 0xdf, 0x80, 0x55, 0xc6, 0x9c, 0x0d, 0x7a, 0x43, 0x14, 0x64, + 0xee, 0x38, 0x31, 0xe2, 0x0f, 0x56, 0x1b, 0xd7, 0xf6, 0xa0, 0x92, 0x8c, 0x4f, 0x5c, 0x02, 0x11, + 0xa1, 0x48, 0x62, 0xc5, 0x4a, 0xb3, 0xdb, 0x62, 0x65, 0xc6, 0xe7, 0x0a, 0xca, 0xb0, 0x72, 0xe7, + 0xa8, 0x3d, 0x50, 0x34, 0xf5, 0xa4, 0x33, 0x68, 0x1f, 0x2b, 0x28, 0x9b, 0xa8, 0xab, 0x0f, 0x73, + 0xc5, 0xb7, 0xd1, 0xf5, 0xda, 0x37, 0x19, 0xa8, 0xa6, 0x2f, 0x4a, 0xf8, 0xff, 0xe1, 0x72, 0xf4, + 0xaa, 0xe1, 0xd3, 0x40, 0x7b, 0x62, 0x7a, 0x7c, 0xe3, 0x4c, 0x88, 0x38, 0xc4, 0xe2, 0xa5, 0xdb, + 0x0c, 0x59, 0x7d, 0x1a, 0x7c, 0x62, 0x7a, 0x6c, 0x5b, 0x4c, 0x48, 0x80, 0x8f, 0xe0, 0xaa, 0xed, + 0x68, 0x7e, 0x40, 0x6c, 0x83, 0x78, 0x86, 0x36, 0x7b, 0x4f, 0xd2, 0x88, 0xae, 0x53, 0xdf, 0x77, + 0xc4, 0x81, 0x15, 0x6b, 0x79, 0xd5, 0x76, 0xfa, 0x21, 0x79, 0x96, 0xc9, 0x1b, 0x21, 0x75, 0x2e, + 0xcc, 0xb2, 0xe7, 0x85, 0xd9, 0x2b, 0x50, 0x9a, 0x10, 0x57, 0xa3, 0x76, 0xe0, 0x9d, 0xf1, 0xf2, + 0xb8, 0xa8, 0x16, 0x27, 0xc4, 0x55, 0x58, 0xfb, 0x85, 0xdc, 0x52, 0x0e, 0x73, 0xc5, 0x22, 0x2a, + 0x1d, 0xe6, 0x8a, 0x25, 0x04, 0xb5, 0x7f, 0x64, 0xa1, 0x92, 0x2c, 0x97, 0xd9, 0xed, 0x43, 0xe7, + 0x27, 0x8b, 0xc4, 0x73, 0xcf, 0x1b, 0xdf, 0x5a, 0x5c, 0xd7, 0x9b, 0xec, 0xc8, 0xd9, 0x2b, 0x88, + 0x22, 0x56, 0x15, 0x92, 0xec, 0xb8, 0x67, 0xd9, 0x86, 0x8a, 0xa2, 0xa1, 0xa8, 0x86, 0x2d, 0x7c, + 0x00, 0x85, 0x87, 0x3e, 0xd7, 0x5d, 0xe0, 0xba, 0xdf, 0xfc, 0x76, 0xdd, 0x87, 0x7d, 0xae, 0xbc, + 0x74, 0xd8, 0xd7, 0x3a, 0x5d, 0xf5, 0xb8, 0x71, 0xa4, 0x86, 0xe2, 0xf8, 0x0a, 0xe4, 0x2c, 0xf2, + 0xe5, 0x59, 0xfa, 0x70, 0xe2, 0xd0, 0x45, 0x17, 0xe1, 0x0a, 0xe4, 0x9e, 0x50, 0xf2, 0x28, 0x7d, + 0x24, 0x70, 0xe8, 0x07, 0xdc, 0x0c, 0xbb, 0x90, 0xe7, 0xfe, 0xc2, 0x00, 0xa1, 0xc7, 0xd0, 0x4b, + 0xb8, 0x08, 0xb9, 0x66, 0x57, 0x65, 0x1b, 0x02, 0x41, 0x45, 0xa0, 0x5a, 0xaf, 0xad, 0x34, 0x15, + 0x94, 0xa9, 0xdd, 0x81, 0x82, 0x70, 0x02, 0xdb, 0x2c, 0xb1, 0x1b, 0xd0, 0x4b, 0x61, 0x33, 0xd4, + 0x21, 0x45, 0xbd, 0x27, 0xc7, 0xfb, 0x8a, 0x8a, 0x32, 0xe9, 0xa5, 0xce, 0xa1, 0x7c, 0xcd, 0x87, + 0x4a, 0xb2, 0x5e, 0x7e, 0x31, 0x77, 0xe1, 0xbf, 0x4a, 0x50, 0x4e, 0xd4, 0xbf, 0xac, 0x70, 0x21, + 0x96, 0xe5, 0x3c, 0xd1, 0x88, 0x65, 0x12, 0x3f, 0x0c, 0x0d, 0xe0, 0x50, 0x83, 0x21, 0x17, 0x5d, + 0xba, 0x17, 0xb4, 0x45, 0xf2, 0xa8, 0x50, 0xfb, 0xa3, 0x04, 0x68, 0xbe, 0x00, 0x9d, 0x33, 0x53, + 0xfa, 0x31, 0xcd, 0xac, 0xfd, 0x41, 0x82, 0x6a, 0xba, 0xea, 0x9c, 0x33, 0xef, 0xda, 0x8f, 0x6a, + 0xde, 0xdf, 0x33, 0xb0, 0x9a, 0xaa, 0x35, 0x2f, 0x6a, 0xdd, 0x17, 0xb0, 0x6e, 0x1a, 0x74, 0xe2, + 0x3a, 0x01, 0xb5, 0xf5, 0x33, 0xcd, 0xa2, 0x8f, 0xa9, 0x25, 0xd7, 0x78, 0xd2, 0xd8, 0xfd, 0xf6, + 0x6a, 0xb6, 0xde, 0x9e, 0xc9, 0x1d, 0x31, 0xb1, 0xbd, 0x8d, 0x76, 0x4b, 0x39, 0xee, 0x75, 0x07, + 0x4a, 0xa7, 0xf9, 0x99, 0x76, 0xd2, 0xf9, 0x59, 0xa7, 0xfb, 0x49, 0x47, 0x45, 0xe6, 0x1c, 0xed, + 0x07, 0xdc, 0xf6, 0x3d, 0x40, 0xf3, 0x46, 0xe1, 0xcb, 0xb0, 0xcc, 0x2c, 0xf4, 0x12, 0xde, 0x80, + 0xb5, 0x4e, 0x57, 0xeb, 0xb7, 0x5b, 0x8a, 0xa6, 0x3c, 0x78, 0xa0, 0x34, 0x07, 0x7d, 0xf1, 0x3e, + 0x11, 0xb3, 0x07, 0xa9, 0x0d, 0x5e, 0xfb, 0x7d, 0x16, 0x36, 0x96, 0x58, 0x82, 0x1b, 0xe1, 0xcd, + 0x42, 0x5c, 0x76, 0xde, 0xbd, 0x88, 0xf5, 0x75, 0x56, 0x10, 0xf4, 0x88, 0x17, 0x84, 0x17, 0x91, + 0x1b, 0xc0, 0xbc, 0x64, 0x07, 0xe6, 0xd0, 0xa4, 0x5e, 0xf8, 0x9c, 0x23, 0xae, 0x1b, 0x6b, 0x33, + 0x5c, 0xbc, 0xe8, 0xfc, 0x1f, 0x60, 0xd7, 0xf1, 0xcd, 0xc0, 0x7c, 0x4c, 0x35, 0xd3, 0x8e, 0xde, + 0x7e, 0xd8, 0xf5, 0x23, 0xa7, 0xa2, 0xa8, 0xa7, 0x6d, 0x07, 0x31, 0xdb, 0xa6, 0x23, 0x32, 0xc7, + 0x66, 0xc9, 0x3c, 0xab, 0xa2, 0xa8, 0x27, 0x66, 0x5f, 0x83, 0x8a, 0xe1, 0x4c, 0x59, 0x4d, 0x26, + 0x78, 0xec, 0xec, 0x90, 0xd4, 0xb2, 0xc0, 0x62, 0x4a, 0x58, 0x6d, 0xcf, 0x1e, 0x9d, 0x2a, 0x6a, + 0x59, 0x60, 0x82, 0x72, 0x1d, 0xd6, 0xc8, 0x68, 0xe4, 0x31, 0xe5, 0x91, 0x22, 0x71, 0x7f, 0xa8, + 0xc6, 0x30, 0x27, 0x6e, 0x1d, 0x42, 0x31, 0xf2, 0x03, 0x3b, 0xaa, 0x99, 0x27, 0x34, 0x57, 0x5c, + 0x8a, 0x33, 0x3b, 0x25, 0xb5, 0x68, 0x47, 0x9d, 0xd7, 0xa0, 0x62, 0xfa, 0xda, 0xec, 0x0d, 0x3d, + 0xb3, 0x9d, 0xd9, 0x29, 0xaa, 0x65, 0xd3, 0x8f, 0xdf, 0x1f, 0x6b, 0x5f, 0x65, 0xa0, 0x9a, 0xfe, + 0x06, 0x80, 0x5b, 0x50, 0xb4, 0x1c, 0x9d, 0xf0, 0xd0, 0x12, 0x1f, 0xa0, 0x76, 0x9e, 0xf3, 0xd9, + 0xa0, 0x7e, 0x14, 0xf2, 0xd5, 0x58, 0x72, 0xeb, 0x6f, 0x12, 0x14, 0x23, 0x18, 0x5f, 0x82, 0x9c, + 0x4b, 0x82, 0x31, 0x57, 0x97, 0xdf, 0xcf, 0x20, 0x49, 0xe5, 0x6d, 0x86, 0xfb, 0x2e, 0xb1, 0x79, + 0x08, 0x84, 0x38, 0x6b, 0xb3, 0x75, 0xb5, 0x28, 0x31, 0xf8, 0xe5, 0xc4, 0x99, 0x4c, 0xa8, 0x1d, + 0xf8, 0xd1, 0xba, 0x86, 0x78, 0x33, 0x84, 0xf1, 0x3b, 0xb0, 0x1e, 0x78, 0xc4, 0xb4, 0x52, 0xdc, + 0x1c, 0xe7, 0xa2, 0xa8, 0x23, 0x26, 0xef, 0xc1, 0x95, 0x48, 0xaf, 0x41, 0x03, 0xa2, 0x8f, 0xa9, + 0x31, 0x13, 0x2a, 0xf0, 0x47, 0x88, 0xcb, 0x21, 0xa1, 0x15, 0xf6, 0x47, 0xb2, 0xb5, 0x6f, 0x24, + 0x58, 0x8f, 0xae, 0x53, 0x46, 0xec, 0xac, 0x63, 0x00, 0x62, 0xdb, 0x4e, 0x90, 0x74, 0xd7, 0x62, + 0x28, 0x2f, 0xc8, 0xd5, 0x1b, 0xb1, 0x90, 0x9a, 0x50, 0xb0, 0x35, 0x01, 0x98, 0xf5, 0x9c, 0xeb, + 0xb6, 0xab, 0x50, 0x0e, 0x3f, 0xf0, 0xf0, 0xaf, 0x84, 0xe2, 0x02, 0x0e, 0x02, 0x62, 0xf7, 0x2e, + 0xbc, 0x09, 0xf9, 0x53, 0x3a, 0x32, 0xed, 0xf0, 0xd9, 0x56, 0x34, 0xa2, 0x67, 0x92, 0x5c, 0xfc, + 0x4c, 0xb2, 0xff, 0x5b, 0x09, 0x36, 0x74, 0x67, 0x32, 0x6f, 0xef, 0x3e, 0x9a, 0x7b, 0x05, 0xf0, + 0x3f, 0x96, 0x3e, 0xff, 0x68, 0x64, 0x06, 0xe3, 0xe9, 0x69, 0x5d, 0x77, 0x26, 0xbb, 0x23, 0xc7, + 0x22, 0xf6, 0x68, 0xf6, 0x99, 0x93, 0xff, 0xd1, 0xdf, 0x1d, 0x51, 0xfb, 0xdd, 0x91, 0x93, 0xf8, + 0xe8, 0x79, 0x7f, 0xf6, 0xf7, 0xbf, 0x92, 0xf4, 0xa7, 0x4c, 0xf6, 0xa0, 0xb7, 0xff, 0xe7, 0xcc, + 0xd6, 0x81, 0x18, 0xae, 0x17, 0xb9, 0x47, 0xa5, 0x43, 0x8b, 0xea, 0x6c, 0xca, 0xff, 0x0b, 0x00, + 0x00, 0xff, 0xff, 0x1a, 0x28, 0x25, 0x79, 0x42, 0x1d, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto new file mode 100644 index 00000000..8697a50d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto @@ -0,0 +1,872 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; +option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; + optional int32 end = 2; + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + }; + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + }; + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default=false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default=false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default=false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default=false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default=SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default=false]; + optional bool java_generic_services = 17 [default=false]; + optional bool py_generic_services = 18 [default=false]; + optional bool php_generic_services = 42 [default=false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default=false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default=false]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default=false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default=false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default=false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementions still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default=false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default=false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default=false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default=false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default=false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = + 34 [default=IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed=true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed=true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed=true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile deleted file mode 100644 index b5715c35..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -include $(GOROOT)/src/Make.inc - -TARG=github.com/golang/protobuf/compiler/generator -GOFILES=\ - generator.go\ - -DEPS=../descriptor ../plugin ../../proto - -include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go index 211ab5d3..e0aba85f 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go @@ -40,19 +40,24 @@ import ( "bufio" "bytes" "compress/gzip" + "crypto/sha256" + "encoding/hex" "fmt" + "go/build" "go/parser" "go/printer" "go/token" "log" "os" "path" + "sort" "strconv" "strings" "unicode" "unicode/utf8" "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/generator/internal/remap" "github.com/golang/protobuf/protoc-gen-go/descriptor" plugin "github.com/golang/protobuf/protoc-gen-go/plugin" @@ -88,6 +93,14 @@ func RegisterPlugin(p Plugin) { plugins = append(plugins, p) } +// A GoImportPath is the import path of a Go package. e.g., "google.golang.org/genproto/protobuf". +type GoImportPath string + +func (p GoImportPath) String() string { return strconv.Quote(string(p)) } + +// A GoPackageName is the name of a Go package. e.g., "protobuf". +type GoPackageName string + // Each type we import as a protocol buffer (other than FileDescriptorProto) needs // a pointer to the FileDescriptorProto that represents it. These types achieve that // wrapping by placing each Proto inside a struct with the pointer to its File. The @@ -96,19 +109,21 @@ func RegisterPlugin(p Plugin) { // The file and package name method are common to messages and enums. type common struct { - file *descriptor.FileDescriptorProto // File this object comes from. + file *FileDescriptor // File this object comes from. } -// PackageName is name in the package clause in the generated file. -func (c *common) PackageName() string { return uniquePackageOf(c.file) } +// GoImportPath is the import path of the Go package containing the type. +func (c *common) GoImportPath() GoImportPath { + return c.file.importPath +} -func (c *common) File() *descriptor.FileDescriptorProto { return c.file } +func (c *common) File() *FileDescriptor { return c.file } func fileIsProto3(file *descriptor.FileDescriptorProto) bool { return file.GetSyntax() == "proto3" } -func (c *common) proto3() bool { return fileIsProto3(c.file) } +func (c *common) proto3() bool { return fileIsProto3(c.file.FileDescriptorProto) } // Descriptor represents a protocol buffer message. type Descriptor struct { @@ -134,7 +149,7 @@ func (d *Descriptor) TypeName() []string { for parent := d; parent != nil; parent = parent.parent { n++ } - s := make([]string, n, n) + s := make([]string, n) for parent := d; parent != nil; parent = parent.parent { n-- s[n] = parent.GetName() @@ -256,77 +271,61 @@ type FileDescriptor struct { // This is used for supporting public imports. exported map[Object][]symbol - index int // The index of this file in the list of files to generate code for + fingerprint string // Fingerprint of this file's contents. + importPath GoImportPath // Import path of this file's package. + packageName GoPackageName // Name of this file's Go package. proto3 bool // whether to generate proto3 code for this file } -// PackageName is the package name we'll use in the generated code to refer to this file. -func (d *FileDescriptor) PackageName() string { return uniquePackageOf(d.FileDescriptorProto) } - // VarName is the variable name we'll use in the generated code to refer // to the compressed bytes of this descriptor. It is not exported, so // it is only valid inside the generated package. -func (d *FileDescriptor) VarName() string { return fmt.Sprintf("fileDescriptor%d", d.index) } +func (d *FileDescriptor) VarName() string { + name := strings.Map(badToUnderscore, baseName(d.GetName())) + return fmt.Sprintf("fileDescriptor_%s_%s", name, d.fingerprint) +} // goPackageOption interprets the file's go_package option. // If there is no go_package, it returns ("", "", false). // If there's a simple name, it returns ("", pkg, true). // If the option implies an import path, it returns (impPath, pkg, true). -func (d *FileDescriptor) goPackageOption() (impPath, pkg string, ok bool) { - pkg = d.GetOptions().GetGoPackage() - if pkg == "" { - return +func (d *FileDescriptor) goPackageOption() (impPath GoImportPath, pkg GoPackageName, ok bool) { + opt := d.GetOptions().GetGoPackage() + if opt == "" { + return "", "", false + } + // A semicolon-delimited suffix delimits the import path and package name. + sc := strings.Index(opt, ";") + if sc >= 0 { + return GoImportPath(opt[:sc]), cleanPackageName(opt[sc+1:]), true } - ok = true // The presence of a slash implies there's an import path. - slash := strings.LastIndex(pkg, "/") - if slash < 0 { - return + slash := strings.LastIndex(opt, "/") + if slash >= 0 { + return GoImportPath(opt), cleanPackageName(opt[slash+1:]), true } - impPath, pkg = pkg, pkg[slash+1:] - // A semicolon-delimited suffix overrides the package name. - sc := strings.IndexByte(impPath, ';') - if sc < 0 { - return - } - impPath, pkg = impPath[:sc], impPath[sc+1:] - return -} - -// goPackageName returns the Go package name to use in the -// generated Go file. The result explicit reports whether the name -// came from an option go_package statement. If explicit is false, -// the name was derived from the protocol buffer's package statement -// or the input file name. -func (d *FileDescriptor) goPackageName() (name string, explicit bool) { - // Does the file have a "go_package" option? - if _, pkg, ok := d.goPackageOption(); ok { - return pkg, true - } - - // Does the file have a package clause? - if pkg := d.GetPackage(); pkg != "" { - return pkg, false - } - // Use the file base name. - return baseName(d.GetName()), false + return "", cleanPackageName(opt), true } // goFileName returns the output name for the generated Go file. -func (d *FileDescriptor) goFileName() string { +func (d *FileDescriptor) goFileName(pathType pathType) string { name := *d.Name if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" { name = name[:len(name)-len(ext)] } name += ".pb.go" + if pathType == pathTypeSourceRelative { + return name + } + // Does the file have a "go_package" option? // If it does, it may override the filename. if impPath, _, ok := d.goPackageOption(); ok && impPath != "" { // Replace the existing dirname with the declared import path. _, name = path.Split(name) - name = path.Join(impPath, name) + name = path.Join(string(impPath), name) return name } @@ -341,14 +340,13 @@ func (d *FileDescriptor) addExport(obj Object, sym symbol) { type symbol interface { // GenerateAlias should generate an appropriate alias // for the symbol from the named package. - GenerateAlias(g *Generator, pkg string) + GenerateAlias(g *Generator, pkg GoPackageName) } type messageSymbol struct { sym string hasExtensions, isMessageSet bool - hasOneof bool - getters []getterSymbol + oneofTypes []string } type getterSymbol struct { @@ -358,144 +356,11 @@ type getterSymbol struct { genType bool // whether typ contains a generated type (message/group/enum) } -func (ms *messageSymbol) GenerateAlias(g *Generator, pkg string) { - remoteSym := pkg + "." + ms.sym - - g.P("type ", ms.sym, " ", remoteSym) - g.P("func (m *", ms.sym, ") Reset() { (*", remoteSym, ")(m).Reset() }") - g.P("func (m *", ms.sym, ") String() string { return (*", remoteSym, ")(m).String() }") - g.P("func (*", ms.sym, ") ProtoMessage() {}") - if ms.hasExtensions { - g.P("func (*", ms.sym, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange ", - "{ return (*", remoteSym, ")(nil).ExtensionRangeArray() }") - if ms.isMessageSet { - g.P("func (m *", ms.sym, ") Marshal() ([]byte, error) ", - "{ return (*", remoteSym, ")(m).Marshal() }") - g.P("func (m *", ms.sym, ") Unmarshal(buf []byte) error ", - "{ return (*", remoteSym, ")(m).Unmarshal(buf) }") - } +func (ms *messageSymbol) GenerateAlias(g *Generator, pkg GoPackageName) { + g.P("type ", ms.sym, " = ", pkg, ".", ms.sym) + for _, name := range ms.oneofTypes { + g.P("type ", name, " = ", pkg, ".", name) } - if ms.hasOneof { - // Oneofs and public imports do not mix well. - // We can make them work okay for the binary format, - // but they're going to break weirdly for text/JSON. - enc := "_" + ms.sym + "_OneofMarshaler" - dec := "_" + ms.sym + "_OneofUnmarshaler" - size := "_" + ms.sym + "_OneofSizer" - encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" - decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" - sizeSig := "(msg " + g.Pkg["proto"] + ".Message) int" - g.P("func (m *", ms.sym, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") - g.P("return ", enc, ", ", dec, ", ", size, ", nil") - g.P("}") - - g.P("func ", enc, encSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("enc, _, _, _ := m0.XXX_OneofFuncs()") - g.P("return enc(m0, b)") - g.P("}") - - g.P("func ", dec, decSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, dec, _, _ := m0.XXX_OneofFuncs()") - g.P("return dec(m0, tag, wire, b)") - g.P("}") - - g.P("func ", size, sizeSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, _, size, _ := m0.XXX_OneofFuncs()") - g.P("return size(m0)") - g.P("}") - } - for _, get := range ms.getters { - - if get.typeName != "" { - g.RecordTypeUse(get.typeName) - } - typ := get.typ - val := "(*" + remoteSym + ")(m)." + get.name + "()" - if get.genType { - // typ will be "*pkg.T" (message/group) or "pkg.T" (enum) - // or "map[t]*pkg.T" (map to message/enum). - // The first two of those might have a "[]" prefix if it is repeated. - // Drop any package qualifier since we have hoisted the type into this package. - rep := strings.HasPrefix(typ, "[]") - if rep { - typ = typ[2:] - } - isMap := strings.HasPrefix(typ, "map[") - star := typ[0] == '*' - if !isMap { // map types handled lower down - typ = typ[strings.Index(typ, ".")+1:] - } - if star { - typ = "*" + typ - } - if rep { - // Go does not permit conversion between slice types where both - // element types are named. That means we need to generate a bit - // of code in this situation. - // typ is the element type. - // val is the expression to get the slice from the imported type. - - ctyp := typ // conversion type expression; "Foo" or "(*Foo)" - if star { - ctyp = "(" + typ + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() []", typ, " {") - g.In() - g.P("o := ", val) - g.P("if o == nil {") - g.In() - g.P("return nil") - g.Out() - g.P("}") - g.P("s := make([]", typ, ", len(o))") - g.P("for i, x := range o {") - g.In() - g.P("s[i] = ", ctyp, "(x)") - g.Out() - g.P("}") - g.P("return s") - g.Out() - g.P("}") - continue - } - if isMap { - // Split map[keyTyp]valTyp. - bra, ket := strings.Index(typ, "["), strings.Index(typ, "]") - keyTyp, valTyp := typ[bra+1:ket], typ[ket+1:] - // Drop any package qualifier. - // Only the value type may be foreign. - star := valTyp[0] == '*' - valTyp = valTyp[strings.Index(valTyp, ".")+1:] - if star { - valTyp = "*" + valTyp - } - - typ := "map[" + keyTyp + "]" + valTyp - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " {") - g.P("o := ", val) - g.P("if o == nil { return nil }") - g.P("s := make(", typ, ", len(o))") - g.P("for k, v := range o {") - g.P("s[k] = (", valTyp, ")(v)") - g.P("}") - g.P("return s") - g.P("}") - continue - } - // Convert imported type into the forwarding type. - val = "(" + typ + ")(" + val + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " { return ", val, " }") - } - } type enumSymbol struct { @@ -503,16 +368,11 @@ type enumSymbol struct { proto3 bool // Whether this came from a proto3 file. } -func (es enumSymbol) GenerateAlias(g *Generator, pkg string) { +func (es enumSymbol) GenerateAlias(g *Generator, pkg GoPackageName) { s := es.name - g.P("type ", s, " ", pkg, ".", s) + g.P("type ", s, " = ", pkg, ".", s) g.P("var ", s, "_name = ", pkg, ".", s, "_name") g.P("var ", s, "_value = ", pkg, ".", s, "_value") - g.P("func (x ", s, ") String() string { return (", pkg, ".", s, ")(x).String() }") - if !es.proto3 { - g.P("func (x ", s, ") Enum() *", s, "{ return (*", s, ")((", pkg, ".", s, ")(x).Enum()) }") - g.P("func (x *", s, ") UnmarshalJSON(data []byte) error { return (*", pkg, ".", s, ")(x).UnmarshalJSON(data) }") - } } type constOrVarSymbol struct { @@ -521,8 +381,8 @@ type constOrVarSymbol struct { cast string // if non-empty, a type cast is required (used for enums) } -func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { - v := pkg + "." + cs.sym +func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg GoPackageName) { + v := string(pkg) + "." + cs.sym if cs.cast != "" { v = cs.cast + "(" + v + ")" } @@ -531,21 +391,9 @@ func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { // Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects. type Object interface { - PackageName() string // The name we use in our output (a_b_c), possibly renamed for uniqueness. + GoImportPath() GoImportPath TypeName() []string - File() *descriptor.FileDescriptorProto -} - -// Each package name we generate must be unique. The package we're generating -// gets its own name but every other package must have a unique name that does -// not conflict in the code we generate. These names are chosen globally (although -// they don't have to be, it simplifies things to do them globally). -func uniquePackageOf(fd *descriptor.FileDescriptorProto) string { - s, ok := uniquePackageName[fd] - if !ok { - log.Fatal("internal error: no package name defined for " + fd.GetName()) - } - return s + File() *FileDescriptor } // Generator is the type whose methods generate the output, stored in the associated response structure. @@ -562,18 +410,30 @@ type Generator struct { Pkg map[string]string // The names under which we import support packages - packageName string // What we're calling ourselves. - allFiles []*FileDescriptor // All files in the tree - allFilesByName map[string]*FileDescriptor // All files by filename. - genFiles []*FileDescriptor // Those files we will generate output for. - file *FileDescriptor // The file we are compiling now. - usedPackages map[string]bool // Names of packages used in current file. - typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. - init []string // Lines to emit in the init function. + outputImportPath GoImportPath // Package we're generating code for. + allFiles []*FileDescriptor // All files in the tree + allFilesByName map[string]*FileDescriptor // All files by filename. + genFiles []*FileDescriptor // Those files we will generate output for. + file *FileDescriptor // The file we are compiling now. + packageNames map[GoImportPath]GoPackageName // Imported package names in the current file. + usedPackages map[GoImportPath]bool // Packages used in current file. + usedPackageNames map[GoPackageName]bool // Package names used in the current file. + typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. + init []string // Lines to emit in the init function. indent string + pathType pathType // How to generate output filenames. writeOutput bool + annotateCode bool // whether to store annotations + annotations []*descriptor.GeneratedCodeInfo_Annotation // annotations to store } +type pathType int + +const ( + pathTypeImport pathType = iota + pathTypeSourceRelative +) + // New creates a new generator and allocates the request and response protobufs. func New() *Generator { g := new(Generator) @@ -618,8 +478,21 @@ func (g *Generator) CommandLineParameters(parameter string) { g.ImportPrefix = v case "import_path": g.PackageImportPath = v + case "paths": + switch v { + case "import": + g.pathType = pathTypeImport + case "source_relative": + g.pathType = pathTypeSourceRelative + default: + g.Fail(fmt.Sprintf(`Unknown path type %q: want "import" or "source_relative".`, v)) + } case "plugins": pluginList = v + case "annotate_code": + if v == "true" { + g.annotateCode = true + } default: if len(k) > 0 && k[0] == 'M' { g.ImportMap[k[1:]] = v @@ -646,37 +519,42 @@ func (g *Generator) CommandLineParameters(parameter string) { // If its file is in a different package, it returns the package name we're using for this file, plus ".". // Otherwise it returns the empty string. func (g *Generator) DefaultPackageName(obj Object) string { - pkg := obj.PackageName() - if pkg == g.packageName { + importPath := obj.GoImportPath() + if importPath == g.outputImportPath { return "" } - return pkg + "." + return string(g.GoPackageName(importPath)) + "." } -// For each input file, the unique package name to use, underscored. -var uniquePackageName = make(map[*descriptor.FileDescriptorProto]string) +// GoPackageName returns the name used for a package. +func (g *Generator) GoPackageName(importPath GoImportPath) GoPackageName { + if name, ok := g.packageNames[importPath]; ok { + return name + } + name := cleanPackageName(baseName(string(importPath))) + for i, orig := 1, name; g.usedPackageNames[name]; i++ { + name = orig + GoPackageName(strconv.Itoa(i)) + } + g.packageNames[importPath] = name + g.usedPackageNames[name] = true + return name +} -// Package names already registered. Key is the name from the .proto file; -// value is the name that appears in the generated code. -var pkgNamesInUse = make(map[string]bool) +var globalPackageNames = map[GoPackageName]bool{ + "fmt": true, + "math": true, + "proto": true, +} -// Create and remember a guaranteed unique package name for this file descriptor. -// Pkg is the candidate name. If f is nil, it's a builtin package like "proto" and -// has no file descriptor. +// Create and remember a guaranteed unique package name. Pkg is the candidate name. +// The FileDescriptor parameter is unused. func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { - // Convert dots to underscores before finding a unique alias. - pkg = strings.Map(badToUnderscore, pkg) - - for i, orig := 1, pkg; pkgNamesInUse[pkg]; i++ { - // It's a duplicate; must rename. - pkg = orig + strconv.Itoa(i) + name := cleanPackageName(pkg) + for i, orig := 1, name; globalPackageNames[name]; i++ { + name = orig + GoPackageName(strconv.Itoa(i)) } - // Install it. - pkgNamesInUse[pkg] = true - if f != nil { - uniquePackageName[f.FileDescriptorProto] = pkg - } - return pkg + globalPackageNames[name] = true + return string(name) } var isGoKeyword = map[string]bool{ @@ -707,97 +585,83 @@ var isGoKeyword = map[string]bool{ "var": true, } +func cleanPackageName(name string) GoPackageName { + name = strings.Map(badToUnderscore, name) + // Identifier must not be keyword: insert _. + if isGoKeyword[name] { + name = "_" + name + } + // Identifier must not begin with digit: insert _. + if r, _ := utf8.DecodeRuneInString(name); unicode.IsDigit(r) { + name = "_" + name + } + return GoPackageName(name) +} + // defaultGoPackage returns the package name to use, // derived from the import path of the package we're building code for. -func (g *Generator) defaultGoPackage() string { +func (g *Generator) defaultGoPackage() GoPackageName { p := g.PackageImportPath if i := strings.LastIndex(p, "/"); i >= 0 { p = p[i+1:] } - if p == "" { - return "" - } - - p = strings.Map(badToUnderscore, p) - // Identifier must not be keyword: insert _. - if isGoKeyword[p] { - p = "_" + p - } - // Identifier must not begin with digit: insert _. - if r, _ := utf8.DecodeRuneInString(p); unicode.IsDigit(r) { - p = "_" + p - } - return p + return cleanPackageName(p) } // SetPackageNames sets the package name for this run. // The package name must agree across all files being generated. // It also defines unique package names for all imported files. func (g *Generator) SetPackageNames() { - // Register the name for this package. It will be the first name - // registered so is guaranteed to be unmodified. - pkg, explicit := g.genFiles[0].goPackageName() + g.outputImportPath = g.genFiles[0].importPath - // Check all files for an explicit go_package option. + defaultPackageNames := make(map[GoImportPath]GoPackageName) for _, f := range g.genFiles { - thisPkg, thisExplicit := f.goPackageName() - if thisExplicit { - if !explicit { - // Let this file's go_package option serve for all input files. - pkg, explicit = thisPkg, true - } else if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } + if _, p, ok := f.goPackageOption(); ok { + defaultPackageNames[f.importPath] = p + } + } + for _, f := range g.genFiles { + if _, p, ok := f.goPackageOption(); ok { + // Source file: option go_package = "quux/bar"; + f.packageName = p + } else if p, ok := defaultPackageNames[f.importPath]; ok { + // A go_package option in another file in the same package. + // + // This is a poor choice in general, since every source file should + // contain a go_package option. Supported mainly for historical + // compatibility. + f.packageName = p + } else if p := g.defaultGoPackage(); p != "" { + // Command-line: import_path=quux/bar. + // + // The import_path flag sets a package name for files which don't + // contain a go_package option. + f.packageName = p + } else if p := f.GetPackage(); p != "" { + // Source file: package quux.bar; + f.packageName = cleanPackageName(p) + } else { + // Source filename. + f.packageName = cleanPackageName(baseName(f.GetName())) } } - // If we don't have an explicit go_package option but we have an - // import path, use that. - if !explicit { - p := g.defaultGoPackage() - if p != "" { - pkg, explicit = p, true + // Check that all files have a consistent package name and import path. + for _, f := range g.genFiles[1:] { + if a, b := g.genFiles[0].importPath, f.importPath; a != b { + g.Fail(fmt.Sprintf("inconsistent package import paths: %v, %v", a, b)) + } + if a, b := g.genFiles[0].packageName, f.packageName; a != b { + g.Fail(fmt.Sprintf("inconsistent package names: %v, %v", a, b)) } } - // If there was no go_package and no import path to use, - // double-check that all the inputs have the same implicit - // Go package name. - if !explicit { - for _, f := range g.genFiles { - thisPkg, _ := f.goPackageName() - if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } - } - } - - g.packageName = RegisterUniquePackageName(pkg, g.genFiles[0]) - - // Register the support package names. They might collide with the - // name of a package we import. + // Names of support packages. These never vary (if there are conflicts, + // we rename the conflicting package), so this could be removed someday. g.Pkg = map[string]string{ - "fmt": RegisterUniquePackageName("fmt", nil), - "math": RegisterUniquePackageName("math", nil), - "proto": RegisterUniquePackageName("proto", nil), - } - -AllFiles: - for _, f := range g.allFiles { - for _, genf := range g.genFiles { - if f == genf { - // In this package already. - uniquePackageName[f.FileDescriptorProto] = g.packageName - continue AllFiles - } - } - // The file is a dependency, so we want to ignore its go_package option - // because that is only relevant for its specific generated output. - pkg := f.GetPackage() - if pkg == "" { - pkg = baseName(*f.Name) - } - RegisterUniquePackageName(pkg, f) + "fmt": "fmt", + "math": "math", + "proto": "proto", } } @@ -807,27 +671,51 @@ AllFiles: func (g *Generator) WrapTypes() { g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile)) g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles)) + genFileNames := make(map[string]bool) + for _, n := range g.Request.FileToGenerate { + genFileNames[n] = true + } for _, f := range g.Request.ProtoFile { - // We must wrap the descriptors before we wrap the enums - descs := wrapDescriptors(f) - g.buildNestedDescriptors(descs) - enums := wrapEnumDescriptors(f, descs) - g.buildNestedEnums(descs, enums) - exts := wrapExtensions(f) fd := &FileDescriptor{ FileDescriptorProto: f, - desc: descs, - enum: enums, - ext: exts, exported: make(map[Object][]symbol), proto3: fileIsProto3(f), } + // The import path may be set in a number of ways. + if substitution, ok := g.ImportMap[f.GetName()]; ok { + // Command-line: M=foo.proto=quux/bar. + // + // Explicit mapping of source file to import path. + fd.importPath = GoImportPath(substitution) + } else if genFileNames[f.GetName()] && g.PackageImportPath != "" { + // Command-line: import_path=quux/bar. + // + // The import_path flag sets the import path for every file that + // we generate code for. + fd.importPath = GoImportPath(g.PackageImportPath) + } else if p, _, _ := fd.goPackageOption(); p != "" { + // Source file: option go_package = "quux/bar"; + // + // The go_package option sets the import path. Most users should use this. + fd.importPath = p + } else { + // Source filename. + // + // Last resort when nothing else is available. + fd.importPath = GoImportPath(path.Dir(f.GetName())) + } + // We must wrap the descriptors before we wrap the enums + fd.desc = wrapDescriptors(fd) + g.buildNestedDescriptors(fd.desc) + fd.enum = wrapEnumDescriptors(fd, fd.desc) + g.buildNestedEnums(fd.desc, fd.enum) + fd.ext = wrapExtensions(fd) extractComments(fd) g.allFiles = append(g.allFiles, fd) g.allFilesByName[f.GetName()] = fd } for _, fd := range g.allFiles { - fd.imp = wrapImported(fd.FileDescriptorProto, g) + fd.imp = wrapImported(fd, g) } g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate)) @@ -836,11 +724,27 @@ func (g *Generator) WrapTypes() { if fd == nil { g.Fail("could not find file named", fileName) } - fd.index = len(g.genFiles) + fingerprint, err := fingerprintProto(fd.FileDescriptorProto) + if err != nil { + g.Error(err) + } + fd.fingerprint = fingerprint g.genFiles = append(g.genFiles, fd) } } +// fingerprintProto returns a fingerprint for a message. +// The fingerprint is intended to prevent conflicts between generated fileds, +// not to provide cryptographic security. +func fingerprintProto(m proto.Message) (string, error) { + b, err := proto.Marshal(m) + if err != nil { + return "", err + } + h := sha256.Sum256(b) + return hex.EncodeToString(h[:8]), nil +} + // Scan the descriptors in this file. For each one, build the slice of nested descriptors func (g *Generator) buildNestedDescriptors(descs []*Descriptor) { for _, desc := range descs { @@ -873,7 +777,7 @@ func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescripto } // Construct the Descriptor -func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *Descriptor { +func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *Descriptor { d := &Descriptor{ common: common{file}, DescriptorProto: desc, @@ -910,7 +814,7 @@ func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *d } // Return a slice of all the Descriptors defined within this file -func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { +func wrapDescriptors(file *FileDescriptor) []*Descriptor { sl := make([]*Descriptor, 0, len(file.MessageType)+10) for i, desc := range file.MessageType { sl = wrapThisDescriptor(sl, desc, nil, file, i) @@ -919,7 +823,7 @@ func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { } // Wrap this Descriptor, recursively -func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) []*Descriptor { +func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) []*Descriptor { sl = append(sl, newDescriptor(desc, parent, file, index)) me := sl[len(sl)-1] for i, nested := range desc.NestedType { @@ -929,7 +833,7 @@ func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, pare } // Construct the EnumDescriptor -func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *EnumDescriptor { +func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *EnumDescriptor { ed := &EnumDescriptor{ common: common{file}, EnumDescriptorProto: desc, @@ -945,7 +849,7 @@ func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, } // Return a slice of all the EnumDescriptors defined within this file -func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descriptor) []*EnumDescriptor { +func wrapEnumDescriptors(file *FileDescriptor, descs []*Descriptor) []*EnumDescriptor { sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10) // Top-level enums. for i, enum := range file.EnumType { @@ -961,7 +865,7 @@ func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descript } // Return a slice of all the top-level ExtensionDescriptors defined within this file. -func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor { +func wrapExtensions(file *FileDescriptor) []*ExtensionDescriptor { var sl []*ExtensionDescriptor for _, field := range file.Extension { sl = append(sl, &ExtensionDescriptor{common{file}, field, nil}) @@ -970,7 +874,7 @@ func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor } // Return a slice of all the types that are publicly imported into this file. -func wrapImported(file *descriptor.FileDescriptorProto, g *Generator) (sl []*ImportedDescriptor) { +func wrapImported(file *FileDescriptor, g *Generator) (sl []*ImportedDescriptor) { for _, index := range file.PublicDependency { df := g.fileByName(file.Dependency[index]) for _, d := range df.desc { @@ -1070,35 +974,84 @@ func (g *Generator) ObjectNamed(typeName string) Object { return o } +// AnnotatedAtoms is a list of atoms (as consumed by P) that records the file name and proto AST path from which they originated. +type AnnotatedAtoms struct { + source string + path string + atoms []interface{} +} + +// Annotate records the file name and proto AST path of a list of atoms +// so that a later call to P can emit a link from each atom to its origin. +func Annotate(file *FileDescriptor, path string, atoms ...interface{}) *AnnotatedAtoms { + return &AnnotatedAtoms{source: *file.Name, path: path, atoms: atoms} +} + +// printAtom prints the (atomic, non-annotation) argument to the generated output. +func (g *Generator) printAtom(v interface{}) { + switch v := v.(type) { + case string: + g.WriteString(v) + case *string: + g.WriteString(*v) + case bool: + fmt.Fprint(g, v) + case *bool: + fmt.Fprint(g, *v) + case int: + fmt.Fprint(g, v) + case *int32: + fmt.Fprint(g, *v) + case *int64: + fmt.Fprint(g, *v) + case float64: + fmt.Fprint(g, v) + case *float64: + fmt.Fprint(g, *v) + case GoPackageName: + g.WriteString(string(v)) + case GoImportPath: + g.WriteString(strconv.Quote(string(v))) + default: + g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) + } +} + // P prints the arguments to the generated output. It handles strings and int32s, plus -// handling indirections because they may be *string, etc. +// handling indirections because they may be *string, etc. Any inputs of type AnnotatedAtoms may emit +// annotations in a .meta file in addition to outputting the atoms themselves (if g.annotateCode +// is true). func (g *Generator) P(str ...interface{}) { if !g.writeOutput { return } g.WriteString(g.indent) for _, v := range str { - switch s := v.(type) { - case string: - g.WriteString(s) - case *string: - g.WriteString(*s) - case bool: - fmt.Fprintf(g, "%t", s) - case *bool: - fmt.Fprintf(g, "%t", *s) - case int: - fmt.Fprintf(g, "%d", s) - case *int32: - fmt.Fprintf(g, "%d", *s) - case *int64: - fmt.Fprintf(g, "%d", *s) - case float64: - fmt.Fprintf(g, "%g", s) - case *float64: - fmt.Fprintf(g, "%g", *s) + switch v := v.(type) { + case *AnnotatedAtoms: + begin := int32(g.Len()) + for _, v := range v.atoms { + g.printAtom(v) + } + if g.annotateCode { + end := int32(g.Len()) + var path []int32 + for _, token := range strings.Split(v.path, ",") { + val, err := strconv.ParseInt(token, 10, 32) + if err != nil { + g.Fail("could not parse proto AST path: ", err.Error()) + } + path = append(path, int32(val)) + } + g.annotations = append(g.annotations, &descriptor.GeneratedCodeInfo_Annotation{ + Path: path, + SourceFile: &v.source, + Begin: &begin, + End: &end, + }) + } default: - g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) + g.printAtom(v) } } g.WriteByte('\n') @@ -1135,15 +1088,25 @@ func (g *Generator) GenerateAllFiles() { } for _, file := range g.allFiles { g.Reset() + g.annotations = nil g.writeOutput = genFileMap[file] g.generate(file) if !g.writeOutput { continue } + fname := file.goFileName(g.pathType) g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ - Name: proto.String(file.goFileName()), + Name: proto.String(fname), Content: proto.String(g.String()), }) + if g.annotateCode { + // Store the generated code annotations in text, as the protoc plugin protocol requires that + // strings contain valid UTF-8. + g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ + Name: proto.String(file.goFileName(g.pathType) + ".meta"), + Content: proto.String(proto.CompactTextString(&descriptor.GeneratedCodeInfo{Annotation: g.annotations})), + }) + } } } @@ -1154,32 +1117,24 @@ func (g *Generator) runPlugins(file *FileDescriptor) { } } -// FileOf return the FileDescriptor for this FileDescriptorProto. -func (g *Generator) FileOf(fd *descriptor.FileDescriptorProto) *FileDescriptor { - for _, file := range g.allFiles { - if file.FileDescriptorProto == fd { - return file - } - } - g.Fail("could not find file in table:", fd.GetName()) - return nil -} - // Fill the response protocol buffer with the generated output for all the files we're // supposed to generate. func (g *Generator) generate(file *FileDescriptor) { - g.file = g.FileOf(file.FileDescriptorProto) - g.usedPackages = make(map[string]bool) - - if g.file.index == 0 { - // For one file in the package, assert version compatibility. - g.P("// This is a compile-time assertion to ensure that this generated file") - g.P("// is compatible with the proto package it is being compiled against.") - g.P("// A compilation error at this line likely means your copy of the") - g.P("// proto package needs to be updated.") - g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") - g.P() + g.file = file + g.usedPackages = make(map[GoImportPath]bool) + g.packageNames = make(map[GoImportPath]GoPackageName) + g.usedPackageNames = make(map[GoPackageName]bool) + for name := range globalPackageNames { + g.usedPackageNames[name] = true } + + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the proto package it is being compiled against.") + g.P("// A compilation error at this line likely means your copy of the") + g.P("// proto package needs to be updated.") + g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") + g.P() + for _, td := range g.file.imp { g.generateImported(td) } @@ -1205,24 +1160,36 @@ func (g *Generator) generate(file *FileDescriptor) { // Generate header and imports last, though they appear first in the output. rem := g.Buffer + remAnno := g.annotations g.Buffer = new(bytes.Buffer) + g.annotations = nil g.generateHeader() g.generateImports() if !g.writeOutput { return } + // Adjust the offsets for annotations displaced by the header and imports. + for _, anno := range remAnno { + *anno.Begin += int32(g.Len()) + *anno.End += int32(g.Len()) + g.annotations = append(g.annotations, anno) + } g.Write(rem.Bytes()) - // Reformat generated code. + // Reformat generated code and patch annotation locations. fset := token.NewFileSet() - raw := g.Bytes() - ast, err := parser.ParseFile(fset, "", g, parser.ParseComments) + original := g.Bytes() + if g.annotateCode { + // make a copy independent of g; we'll need it after Reset. + original = append([]byte(nil), original...) + } + ast, err := parser.ParseFile(fset, "", original, parser.ParseComments) if err != nil { // Print out the bad code with line numbers. // This should never happen in practice, but it can while changing generated code, // so consider this a debugging aid. var src bytes.Buffer - s := bufio.NewScanner(bytes.NewReader(raw)) + s := bufio.NewScanner(bytes.NewReader(original)) for line := 1; s.Scan(); line++ { fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) } @@ -1233,55 +1200,59 @@ func (g *Generator) generate(file *FileDescriptor) { if err != nil { g.Fail("generated Go source code could not be reformatted:", err.Error()) } + if g.annotateCode { + m, err := remap.Compute(original, g.Bytes()) + if err != nil { + g.Fail("formatted generated Go source code could not be mapped back to the original code:", err.Error()) + } + for _, anno := range g.annotations { + new, ok := m.Find(int(*anno.Begin), int(*anno.End)) + if !ok { + g.Fail("span in formatted generated Go source code could not be mapped back to the original code") + } + *anno.Begin = int32(new.Pos) + *anno.End = int32(new.End) + } + } } // Generate the header, including package definition func (g *Generator) generateHeader() { g.P("// Code generated by protoc-gen-go. DO NOT EDIT.") - g.P("// source: ", g.file.Name) + if g.file.GetOptions().GetDeprecated() { + g.P("// ", g.file.Name, " is a deprecated file.") + } else { + g.P("// source: ", g.file.Name) + } g.P() - name := g.file.PackageName() + importPath, _, _ := g.file.goPackageOption() + if importPath == "" { + g.P("package ", g.file.packageName) + } else { + g.P("package ", g.file.packageName, " // import ", GoImportPath(g.ImportPrefix)+importPath) + } + g.P() - if g.file.index == 0 { - // Generate package docs for the first file in the package. + if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { g.P("/*") - g.P("Package ", name, " is a generated protocol buffer package.") - g.P() - if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { - // not using g.PrintComments because this is a /* */ comment block. - text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") - for _, line := range strings.Split(text, "\n") { - line = strings.TrimPrefix(line, " ") - // ensure we don't escape from the block comment - line = strings.Replace(line, "*/", "* /", -1) - g.P(line) - } - g.P() - } - var topMsgs []string - g.P("It is generated from these files:") - for _, f := range g.genFiles { - g.P("\t", f.Name) - for _, msg := range f.desc { - if msg.parent != nil { - continue - } - topMsgs = append(topMsgs, CamelCaseSlice(msg.TypeName())) - } - } - g.P() - g.P("It has these top-level messages:") - for _, msg := range topMsgs { - g.P("\t", msg) + // not using g.PrintComments because this is a /* */ comment block. + text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") + for _, line := range strings.Split(text, "\n") { + line = strings.TrimPrefix(line, " ") + // ensure we don't escape from the block comment + line = strings.Replace(line, "*/", "* /", -1) + g.P(line) } g.P("*/") + g.P() } - - g.P("package ", name) - g.P() } +// deprecationComment is the standard comment added to deprecated +// messages, fields, enums, and enum values. +var deprecationComment = "// Deprecated: Do not use." + // PrintComments prints any comments from the source .proto file. // The path is a comma-separated list of integers. // It returns an indication of whether any comments were printed. @@ -1319,35 +1290,46 @@ func (g *Generator) generateImports() { // We almost always need a proto import. Rather than computing when we // do, which is tricky when there's a plugin, just import it and // reference it later. The same argument applies to the fmt and math packages. - g.P("import " + g.Pkg["proto"] + " " + strconv.Quote(g.ImportPrefix+"github.com/golang/protobuf/proto")) + g.P("import "+g.Pkg["proto"]+" ", GoImportPath(g.ImportPrefix)+"github.com/golang/protobuf/proto") g.P("import " + g.Pkg["fmt"] + ` "fmt"`) g.P("import " + g.Pkg["math"] + ` "math"`) + var ( + imports = make(map[GoImportPath]bool) + strongImports = make(map[GoImportPath]bool) + importPaths []string + ) for i, s := range g.file.Dependency { fd := g.fileByName(s) + importPath := fd.importPath // Do not import our own package. - if fd.PackageName() == g.packageName { + if importPath == g.file.importPath { continue } - filename := fd.goFileName() - // By default, import path is the dirname of the Go filename. - importPath := path.Dir(filename) - if substitution, ok := g.ImportMap[s]; ok { - importPath = substitution + if !imports[importPath] { + importPaths = append(importPaths, string(importPath)) } - importPath = g.ImportPrefix + importPath + imports[importPath] = true + if !g.weak(int32(i)) { + strongImports[importPath] = true + } + } + sort.Strings(importPaths) + for i := range importPaths { + importPath := GoImportPath(importPaths[i]) + packageName := g.GoPackageName(importPath) + fullPath := GoImportPath(g.ImportPrefix) + importPath // Skip weak imports. - if g.weak(int32(i)) { - g.P("// skipping weak import ", fd.PackageName(), " ", strconv.Quote(importPath)) + if !strongImports[importPath] { + g.P("// skipping weak import ", packageName, " ", fullPath) continue } // We need to import all the dependencies, even if we don't reference them, // because other code and tools depend on having the full transitive closure // of protocol buffer types in the binary. - pname := fd.PackageName() - if _, ok := g.usedPackages[pname]; !ok { - pname = "_" + if _, ok := g.usedPackages[importPath]; !ok { + packageName = "_" } - g.P("import ", pname, " ", strconv.Quote(importPath)) + g.P("import ", packageName, " ", fullPath) } g.P() // TODO: may need to worry about uniqueness across plugins @@ -1363,26 +1345,24 @@ func (g *Generator) generateImports() { } func (g *Generator) generateImported(id *ImportedDescriptor) { - // Don't generate public import symbols for files that we are generating - // code for, since those symbols will already be in this package. - // We can't simply avoid creating the ImportedDescriptor objects, - // because g.genFiles isn't populated at that stage. tn := id.TypeName() sn := tn[len(tn)-1] - df := g.FileOf(id.o.File()) + df := id.o.File() filename := *df.Name - for _, fd := range g.genFiles { - if *fd.Name == filename { - g.P("// Ignoring public import of ", sn, " from ", filename) - g.P() - return - } + if df.importPath == g.file.importPath { + // Don't generate type aliases for files in the same Go package as this one. + g.P("// Ignoring public import of ", sn, " from ", filename) + g.P() + return + } + if !supportTypeAliases { + g.Fail(fmt.Sprintf("%s: public imports require at least go1.9", filename)) } g.P("// ", sn, " from public import ", filename) - g.usedPackages[df.PackageName()] = true + g.usedPackages[df.importPath] = true for _, sym := range df.exported[id.o] { - sym.GenerateAlias(g, df.PackageName()) + sym.GenerateAlias(g, g.GoPackageName(df.importPath)) } g.P() @@ -1396,16 +1376,26 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) { ccTypeName := CamelCaseSlice(typeName) ccPrefix := enum.prefix() + deprecatedEnum := "" + if enum.GetOptions().GetDeprecated() { + deprecatedEnum = deprecationComment + } g.PrintComments(enum.path) - g.P("type ", ccTypeName, " int32") + g.P("type ", Annotate(enum.file, enum.path, ccTypeName), " int32", deprecatedEnum) g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()}) g.P("const (") g.In() for i, e := range enum.Value { - g.PrintComments(fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)) + etorPath := fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i) + g.PrintComments(etorPath) + + deprecatedValue := "" + if e.GetOptions().GetDeprecated() { + deprecatedValue = deprecationComment + } name := ccPrefix + *e.Name - g.P(name, " ", ccTypeName, " = ", e.Number) + g.P(Annotate(enum.file, etorPath, name), " ", ccTypeName, " = ", e.Number, " ", deprecatedValue) g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName}) } g.Out() @@ -1468,7 +1458,11 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) { indexes = append([]string{strconv.Itoa(m.index)}, indexes...) } indexes = append(indexes, strconv.Itoa(enum.index)) - g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) {") + g.In() + g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}") + g.Out() + g.P("}") if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" { g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`) } @@ -1535,7 +1529,7 @@ func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptor } enum := "" if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM { - // We avoid using obj.PackageName(), because we want to use the + // We avoid using obj.GoPackageName(), because we want to use the // original (proto-world) package name. obj := g.ObjectNamed(field.GetTypeName()) if id, ok := obj.(*ImportedDescriptor); ok { @@ -1617,12 +1611,6 @@ func (g *Generator) TypeName(obj Object) string { return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName()) } -// TypeNameWithPackage is like TypeName, but always includes the package -// name even if the object is in our own package. -func (g *Generator) TypeNameWithPackage(obj Object) string { - return obj.PackageName() + CamelCaseSlice(obj.TypeName()) -} - // GoType returns a string representing the type name, and the wire type func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) { // TODO: Options. @@ -1682,10 +1670,10 @@ func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescripto } func (g *Generator) RecordTypeUse(t string) { - if obj, ok := g.typeNameToObject[t]; ok { + if _, ok := g.typeNameToObject[t]; ok { // Call ObjectNamed to get the true object to record the use. - obj = g.ObjectNamed(t) - g.usedPackages[obj.PackageName()] = true + obj := g.ObjectNamed(t) + g.usedPackages[obj.GoImportPath()] = true } } @@ -1746,8 +1734,19 @@ func (g *Generator) generateMessage(message *Descriptor) { oneofTypeName := make(map[*descriptor.FieldDescriptorProto]string) // without star oneofInsertPoints := make(map[int32]int) // oneof_index => offset of g.Buffer - g.PrintComments(message.path) - g.P("type ", ccTypeName, " struct {") + comments := g.PrintComments(message.path) + + // Guarantee deprecation comments appear after user-provided comments. + if message.GetOptions().GetDeprecated() { + if comments { + // Convention: Separate deprecation comments from original + // comments with an empty line. + g.P("//") + } + g.P(deprecationComment) + } + + g.P("type ", Annotate(message.file, message.path, ccTypeName), " struct {") g.In() // allocNames finds a conflict-free variation of the given strings, @@ -1794,7 +1793,8 @@ func (g *Generator) generateMessage(message *Descriptor) { // This is the first field of a oneof we haven't seen before. // Generate the union field. - com := g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)) + oneofFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex) + com := g.PrintComments(oneofFullPath) if com { g.P("//") } @@ -1807,7 +1807,7 @@ func (g *Generator) generateMessage(message *Descriptor) { oneofFieldName[*field.OneofIndex] = fname oneofDisc[*field.OneofIndex] = dname tag := `protobuf_oneof:"` + odp.GetName() + `"` - g.P(fname, " ", dname, " `", tag, "`") + g.P(Annotate(message.file, oneofFullPath, fname), " ", dname, " `", tag, "`") } if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { @@ -1871,16 +1871,26 @@ func (g *Generator) generateMessage(message *Descriptor) { continue } - g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)) - g.P(fieldName, "\t", typename, "\t`", tag, "`") + fieldDeprecated := "" + if field.GetOptions().GetDeprecated() { + fieldDeprecated = deprecationComment + } + + fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i) + g.PrintComments(fieldFullPath) + g.P(Annotate(message.file, fieldFullPath, fieldName), "\t", typename, "\t`", tag, "`", fieldDeprecated) g.RecordTypeUse(field.GetTypeName()) } + g.P("XXX_NoUnkeyedLiteral\tstruct{} `json:\"-\"`") // prevent unkeyed struct literals if len(message.ExtensionRange) > 0 { - g.P(g.Pkg["proto"], ".XXX_InternalExtensions `json:\"-\"`") - } - if !message.proto3() { - g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + messageset := "" + if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { + messageset = "protobuf_messageset:\"1\" " + } + g.P(g.Pkg["proto"], ".XXX_InternalExtensions `", messageset, "json:\"-\"`") } + g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + g.P("XXX_sizecache\tint32 `json:\"-\"`") g.Out() g.P("}") @@ -1892,12 +1902,25 @@ func (g *Generator) generateMessage(message *Descriptor) { all := g.Buffer.Bytes() rem := all[ip:] g.Buffer = bytes.NewBuffer(all[:ip:ip]) // set cap so we don't scribble on rem + oldLen := g.Buffer.Len() for _, field := range message.Field { if field.OneofIndex == nil || *field.OneofIndex != oi { continue } g.P("//\t*", oneofTypeName[field]) } + // If we've inserted text, we also need to fix up affected annotations (as + // they contain offsets that may need to be changed). + offset := int32(g.Buffer.Len() - oldLen) + ip32 := int32(ip) + for _, anno := range g.annotations { + if *anno.Begin >= ip32 { + *anno.Begin += offset + } + if *anno.End >= ip32 { + *anno.End += offset + } + } g.Buffer.Write(rem) } @@ -1909,7 +1932,11 @@ func (g *Generator) generateMessage(message *Descriptor) { for m := message; m != nil; m = m.parent { indexes = append([]string{strconv.Itoa(m.index)}, indexes...) } - g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) {") + g.In() + g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}") + g.Out() + g.P("}") // TODO: Revisit the decision to use a XXX_WellKnownType method // if we change proto.MessageName to work with multiple equivalents. if message.file.GetPackage() == "google.protobuf" && wellKnownTypes[message.GetName()] { @@ -1924,16 +1951,6 @@ func (g *Generator) generateMessage(message *Descriptor) { if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { isMessageSet = true g.P() - g.P("func (m *", ccTypeName, ") Marshal() ([]byte, error) {") - g.In() - g.P("return ", g.Pkg["proto"], ".MarshalMessageSet(&m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") Unmarshal(buf []byte) error {") - g.In() - g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSet(buf, &m.XXX_InternalExtensions)") - g.Out() - g.P("}") g.P("func (m *", ccTypeName, ") MarshalJSON() ([]byte, error) {") g.In() g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)") @@ -1944,9 +1961,6 @@ func (g *Generator) generateMessage(message *Descriptor) { g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)") g.Out() g.P("}") - g.P("// ensure ", ccTypeName, " satisfies proto.Marshaler and proto.Unmarshaler") - g.P("var _ ", g.Pkg["proto"], ".Marshaler = (*", ccTypeName, ")(nil)") - g.P("var _ ", g.Pkg["proto"], ".Unmarshaler = (*", ccTypeName, ")(nil)") } g.P() @@ -1954,7 +1968,7 @@ func (g *Generator) generateMessage(message *Descriptor) { g.In() for _, r := range message.ExtensionRange { end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends - g.P("{", r.Start, ", ", end, "},") + g.P("{Start: ", r.Start, ", End: ", end, "},") } g.Out() g.P("}") @@ -1965,6 +1979,45 @@ func (g *Generator) generateMessage(message *Descriptor) { g.P("}") } + // TODO: It does not scale to keep adding another method for every + // operation on protos that we want to switch over to using the + // table-driven approach. Instead, we should only add a single method + // that allows getting access to the *InternalMessageInfo struct and then + // calling Unmarshal, Marshal, Merge, Size, and Discard directly on that. + + // Wrapper for table-driven marshaling and unmarshaling. + g.P("func (m *", ccTypeName, ") XXX_Unmarshal(b []byte) error {") + g.In() + g.P("return xxx_messageInfo_", ccTypeName, ".Unmarshal(m, b)") + g.Out() + g.P("}") + + g.P("func (m *", ccTypeName, ") XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {") + g.In() + g.P("return xxx_messageInfo_", ccTypeName, ".Marshal(b, m, deterministic)") + g.Out() + g.P("}") + + g.P("func (dst *", ccTypeName, ") XXX_Merge(src ", g.Pkg["proto"], ".Message) {") + g.In() + g.P("xxx_messageInfo_", ccTypeName, ".Merge(dst, src)") + g.Out() + g.P("}") + + g.P("func (m *", ccTypeName, ") XXX_Size() int {") // avoid name clash with "Size" field in some message + g.In() + g.P("return xxx_messageInfo_", ccTypeName, ".Size(m)") + g.Out() + g.P("}") + + g.P("func (m *", ccTypeName, ") XXX_DiscardUnknown() {") + g.In() + g.P("xxx_messageInfo_", ccTypeName, ".DiscardUnknown(m)") + g.Out() + g.P("}") + + g.P("var xxx_messageInfo_", ccTypeName, " ", g.Pkg["proto"], ".InternalMessageInfo") + // Default constants defNames := make(map[*descriptor.FieldDescriptorProto]string) for _, field := range message.Field { @@ -1984,7 +2037,7 @@ func (g *Generator) generateMessage(message *Descriptor) { case typename == "string": def = strconv.Quote(def) case typename == "[]byte": - def = "[]byte(" + strconv.Quote(def) + ")" + def = "[]byte(" + strconv.Quote(unescape(def)) + ")" kind = "var " case def == "inf", def == "-inf", def == "nan": // These names are known to, and defined by, the protocol language. @@ -2029,17 +2082,24 @@ func (g *Generator) generateMessage(message *Descriptor) { // TODO: Revisit this and consider reverting back to anonymous interfaces. for oi := range message.OneofDecl { dname := oneofDisc[int32(oi)] - g.P("type ", dname, " interface { ", dname, "() }") + g.P("type ", dname, " interface {") + g.In() + g.P(dname, "()") + g.Out() + g.P("}") } g.P() - for _, field := range message.Field { + var oneofTypes []string + for i, field := range message.Field { if field.OneofIndex == nil { continue } _, wiretype := g.GoType(message, field) tag := "protobuf:" + g.goTag(message, field, wiretype) - g.P("type ", oneofTypeName[field], " struct{ ", fieldNames[field], " ", fieldTypes[field], " `", tag, "` }") + fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i) + g.P("type ", Annotate(message.file, fieldFullPath, oneofTypeName[field]), " struct{ ", Annotate(message.file, fieldFullPath, fieldNames[field]), " ", fieldTypes[field], " `", tag, "` }") g.RecordTypeUse(field.GetTypeName()) + oneofTypes = append(oneofTypes, oneofTypeName[field]) } g.P() for _, field := range message.Field { @@ -2051,7 +2111,8 @@ func (g *Generator) generateMessage(message *Descriptor) { g.P() for oi := range message.OneofDecl { fname := oneofFieldName[int32(oi)] - g.P("func (m *", ccTypeName, ") Get", fname, "() ", oneofDisc[int32(oi)], " {") + oneofFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, oi) + g.P("func (m *", ccTypeName, ") ", Annotate(message.file, oneofFullPath, "Get"+fname), "() ", oneofDisc[int32(oi)], " {") g.P("if m != nil { return m.", fname, " }") g.P("return nil") g.P("}") @@ -2059,8 +2120,7 @@ func (g *Generator) generateMessage(message *Descriptor) { g.P() // Field getters - var getters []getterSymbol - for _, field := range message.Field { + for i, field := range message.Field { oneof := field.OneofIndex != nil fname := fieldNames[field] @@ -2074,38 +2134,13 @@ func (g *Generator) generateMessage(message *Descriptor) { typename = typename[1:] star = "*" } + fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i) - // Only export getter symbols for basic types, - // and for messages and enums in the same package. - // Groups are not exported. - // Foreign types can't be hoisted through a public import because - // the importer may not already be importing the defining .proto. - // As an example, imagine we have an import tree like this: - // A.proto -> B.proto -> C.proto - // If A publicly imports B, we need to generate the getters from B in A's output, - // but if one such getter returns something from C then we cannot do that - // because A is not importing C already. - var getter, genType bool - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_GROUP: - getter = false - case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_ENUM: - // Only export getter if its return type is in this package. - getter = g.ObjectNamed(field.GetTypeName()).PackageName() == message.PackageName() - genType = true - default: - getter = true - } - if getter { - getters = append(getters, getterSymbol{ - name: mname, - typ: typename, - typeName: field.GetTypeName(), - genType: genType, - }) + if field.GetOptions().GetDeprecated() { + g.P(deprecationComment) } - g.P("func (m *", ccTypeName, ") "+mname+"() "+typename+" {") + g.P("func (m *", ccTypeName, ") ", Annotate(message.file, fieldFullPath, mname), "() "+typename+" {") g.In() def, hasDef := defNames[field] typeDefaultIsNil := false // whether this field type's default value is a literal nil unless specified @@ -2203,8 +2238,7 @@ func (g *Generator) generateMessage(message *Descriptor) { sym: ccTypeName, hasExtensions: hasExtensions, isMessageSet: isMessageSet, - hasOneof: len(message.OneofDecl) > 0, - getters: getters, + oneofTypes: oneofTypes, } g.file.addExport(message, ms) } @@ -2424,58 +2458,49 @@ func (g *Generator) generateMessage(message *Descriptor) { } g.P("case *", oneofTypeName[field], ":") val := "x." + fieldNames[field] - var wire, varint, fixed string + var varint, fixed string switch *field.Type { case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - wire = "WireFixed64" fixed = "8" case descriptor.FieldDescriptorProto_TYPE_FLOAT: - wire = "WireFixed32" fixed = "4" case descriptor.FieldDescriptorProto_TYPE_INT64, descriptor.FieldDescriptorProto_TYPE_UINT64, descriptor.FieldDescriptorProto_TYPE_INT32, descriptor.FieldDescriptorProto_TYPE_UINT32, descriptor.FieldDescriptorProto_TYPE_ENUM: - wire = "WireVarint" varint = val case descriptor.FieldDescriptorProto_TYPE_FIXED64, descriptor.FieldDescriptorProto_TYPE_SFIXED64: - wire = "WireFixed64" fixed = "8" case descriptor.FieldDescriptorProto_TYPE_FIXED32, descriptor.FieldDescriptorProto_TYPE_SFIXED32: - wire = "WireFixed32" fixed = "4" case descriptor.FieldDescriptorProto_TYPE_BOOL: - wire = "WireVarint" fixed = "1" case descriptor.FieldDescriptorProto_TYPE_STRING: - wire = "WireBytes" fixed = "len(" + val + ")" varint = fixed case descriptor.FieldDescriptorProto_TYPE_GROUP: - wire = "WireStartGroup" fixed = g.Pkg["proto"] + ".Size(" + val + ")" case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - wire = "WireBytes" g.P("s := ", g.Pkg["proto"], ".Size(", val, ")") fixed = "s" varint = fixed case descriptor.FieldDescriptorProto_TYPE_BYTES: - wire = "WireBytes" fixed = "len(" + val + ")" varint = fixed case descriptor.FieldDescriptorProto_TYPE_SINT32: - wire = "WireVarint" varint = "(uint32(" + val + ") << 1) ^ uint32((int32(" + val + ") >> 31))" case descriptor.FieldDescriptorProto_TYPE_SINT64: - wire = "WireVarint" varint = "uint64(" + val + " << 1) ^ uint64((int64(" + val + ") >> 63))" default: g.Fail("unhandled oneof field type ", field.Type.String()) } - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") + // Tag and wire varint is known statically, + // so don't generate code for that part of the size computation. + tagAndWireSize := proto.SizeVarint(uint64(*field.Number << 3)) // wire doesn't affect varint size + g.P("n += ", tagAndWireSize, " // tag and wire") if varint != "" { g.P("n += ", g.Pkg["proto"], ".SizeVarint(uint64(", varint, "))") } @@ -2483,7 +2508,7 @@ func (g *Generator) generateMessage(message *Descriptor) { g.P("n += ", fixed) } if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + g.P("n += ", tagAndWireSize, " // tag and wire") } } g.P("case nil:") @@ -2506,6 +2531,88 @@ func (g *Generator) generateMessage(message *Descriptor) { } g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], ccTypeName, fullName) + // Register types for native map types. + for _, k := range mapFieldKeys(mapFieldTypes) { + fullName := strings.TrimPrefix(*k.TypeName, ".") + g.addInitf("%s.RegisterMapType((%s)(nil), %q)", g.Pkg["proto"], mapFieldTypes[k], fullName) + } +} + +type byTypeName []*descriptor.FieldDescriptorProto + +func (a byTypeName) Len() int { return len(a) } +func (a byTypeName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byTypeName) Less(i, j int) bool { return *a[i].TypeName < *a[j].TypeName } + +// mapFieldKeys returns the keys of m in a consistent order. +func mapFieldKeys(m map[*descriptor.FieldDescriptorProto]string) []*descriptor.FieldDescriptorProto { + keys := make([]*descriptor.FieldDescriptorProto, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Sort(byTypeName(keys)) + return keys +} + +var escapeChars = [256]byte{ + 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v', '\\': '\\', '"': '"', '\'': '\'', '?': '?', +} + +// unescape reverses the "C" escaping that protoc does for default values of bytes fields. +// It is best effort in that it effectively ignores malformed input. Seemingly invalid escape +// sequences are conveyed, unmodified, into the decoded result. +func unescape(s string) string { + // NB: Sadly, we can't use strconv.Unquote because protoc will escape both + // single and double quotes, but strconv.Unquote only allows one or the + // other (based on actual surrounding quotes of its input argument). + + var out []byte + for len(s) > 0 { + // regular character, or too short to be valid escape + if s[0] != '\\' || len(s) < 2 { + out = append(out, s[0]) + s = s[1:] + } else if c := escapeChars[s[1]]; c != 0 { + // escape sequence + out = append(out, c) + s = s[2:] + } else if s[1] == 'x' || s[1] == 'X' { + // hex escape, e.g. "\x80 + if len(s) < 4 { + // too short to be valid + out = append(out, s[:2]...) + s = s[2:] + continue + } + v, err := strconv.ParseUint(s[2:4], 16, 8) + if err != nil { + out = append(out, s[:4]...) + } else { + out = append(out, byte(v)) + } + s = s[4:] + } else if '0' <= s[1] && s[1] <= '7' { + // octal escape, can vary from 1 to 3 octal digits; e.g., "\0" "\40" or "\164" + // so consume up to 2 more bytes or up to end-of-string + n := len(s[1:]) - len(strings.TrimLeft(s[1:], "01234567")) + if n > 3 { + n = 3 + } + v, err := strconv.ParseUint(s[1:1+n], 8, 8) + if err != nil { + out = append(out, s[:1+n]...) + } else { + out = append(out, byte(v)) + } + s = s[1+n:] + } else { + // bad escape, just propagate the slash as-is + out = append(out, s[0]) + s = s[1:] + } + } + + return string(out) } func (g *Generator) generateExtension(ext *ExtensionDescriptor) { @@ -2533,10 +2640,15 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) { typeName := ext.TypeName() // Special case for proto2 message sets: If this extension is extending - // proto2_bridge.MessageSet, and its final name component is "message_set_extension", + // proto2.bridge.MessageSet, and its final name component is "message_set_extension", // then drop that last component. + // + // TODO: This should be implemented in the text formatter rather than the generator. + // In addition, the situation for when to apply this special case is implemented + // differently in other languages: + // https://github.com/google/protobuf/blob/aff10976/src/google/protobuf/text_format.cc#L1560 mset := false - if extendedType == "*proto2_bridge.MessageSet" && typeName[len(typeName)-1] == "message_set_extension" { + if extDesc.GetOptions().GetMessageSetWireFormat() && typeName[len(typeName)-1] == "message_set_extension" { typeName = typeName[:len(typeName)-1] mset = true } @@ -2803,3 +2915,14 @@ const ( // tag numbers in EnumDescriptorProto enumValuePath = 2 // value ) + +var supportTypeAliases bool + +func init() { + for _, tag := range build.Default.ReleaseTags { + if tag == "go1.9" { + supportTypeAliases = true + return + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go new file mode 100644 index 00000000..a9b61036 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go @@ -0,0 +1,117 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package remap handles tracking the locations of Go tokens in a source text +across a rewrite by the Go formatter. +*/ +package remap + +import ( + "fmt" + "go/scanner" + "go/token" +) + +// A Location represents a span of byte offsets in the source text. +type Location struct { + Pos, End int // End is exclusive +} + +// A Map represents a mapping between token locations in an input source text +// and locations in the correspnding output text. +type Map map[Location]Location + +// Find reports whether the specified span is recorded by m, and if so returns +// the new location it was mapped to. If the input span was not found, the +// returned location is the same as the input. +func (m Map) Find(pos, end int) (Location, bool) { + key := Location{ + Pos: pos, + End: end, + } + if loc, ok := m[key]; ok { + return loc, true + } + return key, false +} + +func (m Map) add(opos, oend, npos, nend int) { + m[Location{Pos: opos, End: oend}] = Location{Pos: npos, End: nend} +} + +// Compute constructs a location mapping from input to output. An error is +// reported if any of the tokens of output cannot be mapped. +func Compute(input, output []byte) (Map, error) { + itok := tokenize(input) + otok := tokenize(output) + if len(itok) != len(otok) { + return nil, fmt.Errorf("wrong number of tokens, %d ≠ %d", len(itok), len(otok)) + } + m := make(Map) + for i, ti := range itok { + to := otok[i] + if ti.Token != to.Token { + return nil, fmt.Errorf("token %d type mismatch: %s ≠ %s", i+1, ti, to) + } + m.add(ti.pos, ti.end, to.pos, to.end) + } + return m, nil +} + +// tokinfo records the span and type of a source token. +type tokinfo struct { + pos, end int + token.Token +} + +func tokenize(src []byte) []tokinfo { + fs := token.NewFileSet() + var s scanner.Scanner + s.Init(fs.AddFile("src", fs.Base(), len(src)), src, nil, scanner.ScanComments) + var info []tokinfo + for { + pos, next, lit := s.Scan() + switch next { + case token.SEMICOLON: + continue + } + info = append(info, tokinfo{ + pos: int(pos - 1), + end: int(pos + token.Pos(len(lit)) - 1), + Token: next, + }) + if next == token.EOF { + break + } + } + return info +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go similarity index 57% rename from vendor/github.com/golang/protobuf/proto/testdata/golden_test.go rename to vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go index 7172d0e9..ccc7fca0 100644 --- a/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2012 The Go Authors. All rights reserved. +// Copyright 2017 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,58 +29,54 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Verify that the compiler output for test.proto is unchanged. - -package testdata +package remap import ( - "crypto/sha1" - "fmt" - "io/ioutil" - "os" - "os/exec" - "path/filepath" + "go/format" "testing" ) -// sum returns in string form (for easy comparison) the SHA-1 hash of the named file. -func sum(t *testing.T, name string) string { - data, err := ioutil.ReadFile(name) - if err != nil { - t.Fatal(err) +func TestErrors(t *testing.T) { + tests := []struct { + in, out string + }{ + {"", "x"}, + {"x", ""}, + {"var x int = 5\n", "var x = 5\n"}, + {"these are \"one\" thing", "those are 'another' thing"}, } - t.Logf("sum(%q): length is %d", name, len(data)) - hash := sha1.New() - _, err = hash.Write(data) - if err != nil { - t.Fatal(err) - } - return fmt.Sprintf("% x", hash.Sum(nil)) -} - -func run(t *testing.T, name string, args ...string) { - cmd := exec.Command(name, args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - err := cmd.Run() - if err != nil { - t.Fatal(err) + for _, test := range tests { + m, err := Compute([]byte(test.in), []byte(test.out)) + if err != nil { + t.Logf("Got expected error: %v", err) + continue + } + t.Errorf("Compute(%q, %q): got %+v, wanted error", test.in, test.out, m) } } -func TestGolden(t *testing.T) { - // Compute the original checksum. - goldenSum := sum(t, "test.pb.go") - // Run the proto compiler. - run(t, "protoc", "--go_out="+os.TempDir(), "test.proto") - newFile := filepath.Join(os.TempDir(), "test.pb.go") - defer os.Remove(newFile) - // Compute the new checksum. - newSum := sum(t, newFile) - // Verify - if newSum != goldenSum { - run(t, "diff", "-u", "test.pb.go", newFile) - t.Fatal("Code generated by protoc-gen-go has changed; update test.pb.go") +func TestMatching(t *testing.T) { + // The input is a source text that will be rearranged by the formatter. + const input = `package foo +var s int +func main(){} +` + + output, err := format.Source([]byte(input)) + if err != nil { + t.Fatalf("Formatting failed: %v", err) + } + m, err := Compute([]byte(input), output) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + // Verify that the mapped locations have the same text. + for key, val := range m { + want := input[key.Pos:key.End] + got := string(output[val.Pos:val.End]) + if got != want { + t.Errorf("Token at %d:%d: got %q, want %q", key.Pos, key.End, got, want) + } } } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go index a5ebc853..571147cf 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go @@ -59,9 +59,10 @@ func TestCamelCase(t *testing.T) { func TestGoPackageOption(t *testing.T) { tests := []struct { - in string - impPath, pkg string - ok bool + in string + impPath GoImportPath + pkg GoPackageName + ok bool }{ {"", "", "", false}, {"foo", "", "foo", true}, @@ -83,3 +84,32 @@ func TestGoPackageOption(t *testing.T) { } } } + +func TestUnescape(t *testing.T) { + tests := []struct { + in string + out string + }{ + // successful cases, including all kinds of escapes + {"", ""}, + {"foo bar baz frob nitz", "foo bar baz frob nitz"}, + {`\000\001\002\003\004\005\006\007`, string([]byte{0, 1, 2, 3, 4, 5, 6, 7})}, + {`\a\b\f\n\r\t\v\\\?\'\"`, string([]byte{'\a', '\b', '\f', '\n', '\r', '\t', '\v', '\\', '?', '\'', '"'})}, + {`\x10\x20\x30\x40\x50\x60\x70\x80`, string([]byte{16, 32, 48, 64, 80, 96, 112, 128})}, + // variable length octal escapes + {`\0\018\222\377\3\04\005\6\07`, string([]byte{0, 1, '8', 0222, 255, 3, 4, 5, 6, 7})}, + // malformed escape sequences left as is + {"foo \\g bar", "foo \\g bar"}, + {"foo \\xg0 bar", "foo \\xg0 bar"}, + {"\\", "\\"}, + {"\\x", "\\x"}, + {"\\xf", "\\xf"}, + {"\\777", "\\777"}, // overflows byte + } + for _, tc := range tests { + s := unescape(tc.in) + if s != tc.out { + t.Errorf("doUnescape(%q) = %q; should have been %q", tc.in, s, tc.out) + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go new file mode 100644 index 00000000..2630de68 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go @@ -0,0 +1,422 @@ +package main + +import ( + "bytes" + "flag" + "fmt" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "regexp" + "runtime" + "strings" + "testing" +) + +// Set --regenerate to regenerate the golden files. +var regenerate = flag.Bool("regenerate", false, "regenerate golden files") + +// When the environment variable RUN_AS_PROTOC_GEN_GO is set, we skip running +// tests and instead act as protoc-gen-go. This allows the test binary to +// pass itself to protoc. +func init() { + if os.Getenv("RUN_AS_PROTOC_GEN_GO") != "" { + main() + os.Exit(0) + } +} + +func TestGolden(t *testing.T) { + workdir, err := ioutil.TempDir("", "proto-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(workdir) + + // Find all the proto files we need to compile. We assume that each directory + // contains the files for a single package. + supportTypeAliases := hasReleaseTag("go1.9") + packages := map[string][]string{} + err = filepath.Walk("testdata", func(path string, info os.FileInfo, err error) error { + if filepath.Base(path) == "import_public" && !supportTypeAliases { + // Public imports require type alias support. + return filepath.SkipDir + } + if !strings.HasSuffix(path, ".proto") { + return nil + } + dir := filepath.Dir(path) + packages[dir] = append(packages[dir], path) + return nil + }) + if err != nil { + t.Fatal(err) + } + + // Compile each package, using this binary as protoc-gen-go. + for _, sources := range packages { + args := []string{"-Itestdata", "--go_out=plugins=grpc,paths=source_relative:" + workdir} + args = append(args, sources...) + protoc(t, args) + } + + // Compare each generated file to the golden version. + filepath.Walk(workdir, func(genPath string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + + // For each generated file, figure out the path to the corresponding + // golden file in the testdata directory. + relPath, err := filepath.Rel(workdir, genPath) + if err != nil { + t.Errorf("filepath.Rel(%q, %q): %v", workdir, genPath, err) + return nil + } + if filepath.SplitList(relPath)[0] == ".." { + t.Errorf("generated file %q is not relative to %q", genPath, workdir) + } + goldenPath := filepath.Join("testdata", relPath) + + got, err := ioutil.ReadFile(genPath) + if err != nil { + t.Error(err) + return nil + } + if *regenerate { + // If --regenerate set, just rewrite the golden files. + err := ioutil.WriteFile(goldenPath, got, 0666) + if err != nil { + t.Error(err) + } + return nil + } + + want, err := ioutil.ReadFile(goldenPath) + if err != nil { + t.Error(err) + return nil + } + + want = fdescRE.ReplaceAll(want, nil) + got = fdescRE.ReplaceAll(got, nil) + if bytes.Equal(got, want) { + return nil + } + + cmd := exec.Command("diff", "-u", goldenPath, genPath) + out, _ := cmd.CombinedOutput() + t.Errorf("golden file differs: %v\n%v", relPath, string(out)) + return nil + }) +} + +var fdescRE = regexp.MustCompile(`(?ms)^var fileDescriptor.*}`) + +// Source files used by TestParameters. +const ( + aProto = ` +syntax = "proto3"; +package test.alpha; +option go_package = "package/alpha"; +import "beta/b.proto"; +message M { test.beta.M field = 1; }` + + bProto = ` +syntax = "proto3"; +package test.beta; +// no go_package option +message M {}` +) + +func TestParameters(t *testing.T) { + for _, test := range []struct { + parameters string + wantFiles map[string]bool + wantImportsA map[string]bool + wantPackageA string + wantPackageB string + }{{ + parameters: "", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + "github.com/golang/protobuf/proto": true, + "beta": true, + }, + }, { + parameters: "import_prefix=prefix", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + // This really doesn't seem like useful behavior. + "prefixgithub.com/golang/protobuf/proto": true, + "prefixbeta": true, + }, + }, { + // import_path only affects the 'package' line. + parameters: "import_path=import/path/of/pkg", + wantPackageA: "alpha", + wantPackageB: "pkg", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + }, { + parameters: "Mbeta/b.proto=package/gamma", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + "github.com/golang/protobuf/proto": true, + // Rewritten by the M parameter. + "package/gamma": true, + }, + }, { + parameters: "import_prefix=prefix,Mbeta/b.proto=package/gamma", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + // import_prefix applies after M. + "prefixpackage/gamma": true, + }, + }, { + parameters: "paths=source_relative", + wantFiles: map[string]bool{ + "alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + }, { + parameters: "paths=source_relative,import_prefix=prefix", + wantFiles: map[string]bool{ + // import_prefix doesn't affect filenames. + "alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + }} { + name := test.parameters + if name == "" { + name = "defaults" + } + // TODO: Switch to t.Run when we no longer support Go 1.6. + t.Logf("TEST: %v", name) + workdir, err := ioutil.TempDir("", "proto-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(workdir) + + for _, dir := range []string{"alpha", "beta", "out"} { + if err := os.MkdirAll(filepath.Join(workdir, dir), 0777); err != nil { + t.Fatal(err) + } + } + + if err := ioutil.WriteFile(filepath.Join(workdir, "alpha", "a.proto"), []byte(aProto), 0666); err != nil { + t.Fatal(err) + } + + if err := ioutil.WriteFile(filepath.Join(workdir, "beta", "b.proto"), []byte(bProto), 0666); err != nil { + t.Fatal(err) + } + + protoc(t, []string{ + "-I" + workdir, + "--go_out=" + test.parameters + ":" + filepath.Join(workdir, "out"), + filepath.Join(workdir, "alpha", "a.proto"), + }) + protoc(t, []string{ + "-I" + workdir, + "--go_out=" + test.parameters + ":" + filepath.Join(workdir, "out"), + filepath.Join(workdir, "beta", "b.proto"), + }) + + contents := make(map[string]string) + gotFiles := make(map[string]bool) + outdir := filepath.Join(workdir, "out") + filepath.Walk(outdir, func(p string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + base := filepath.Base(p) + if base == "a.pb.go" || base == "b.pb.go" { + b, err := ioutil.ReadFile(p) + if err != nil { + t.Fatal(err) + } + contents[base] = string(b) + } + relPath, _ := filepath.Rel(outdir, p) + gotFiles[relPath] = true + return nil + }) + for got := range gotFiles { + if runtime.GOOS == "windows" { + got = filepath.ToSlash(got) + } + if !test.wantFiles[got] { + t.Errorf("unexpected output file: %v", got) + } + } + for want := range test.wantFiles { + if runtime.GOOS == "windows" { + want = filepath.FromSlash(want) + } + if !gotFiles[want] { + t.Errorf("missing output file: %v", want) + } + } + gotPackageA, gotImports, err := parseFile(contents["a.pb.go"]) + if err != nil { + t.Fatal(err) + } + gotPackageB, _, err := parseFile(contents["b.pb.go"]) + if err != nil { + t.Fatal(err) + } + if got, want := gotPackageA, test.wantPackageA; want != got { + t.Errorf("output file a.pb.go is package %q, want %q", got, want) + } + if got, want := gotPackageB, test.wantPackageB; want != got { + t.Errorf("output file b.pb.go is package %q, want %q", got, want) + } + missingImport := false + WantImport: + for want := range test.wantImportsA { + for _, imp := range gotImports { + if `"`+want+`"` == imp { + continue WantImport + } + } + t.Errorf("output file a.pb.go does not contain expected import %q", want) + missingImport = true + } + if missingImport { + t.Error("got imports:") + for _, imp := range gotImports { + t.Errorf(" %v", imp) + } + } + } +} + +func TestPackageComment(t *testing.T) { + workdir, err := ioutil.TempDir("", "proto-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(workdir) + + var packageRE = regexp.MustCompile(`(?m)^package .*`) + + for i, test := range []struct { + goPackageOption string + wantPackage string + }{{ + goPackageOption: ``, + wantPackage: `package proto_package`, + }, { + goPackageOption: `option go_package = "go_package";`, + wantPackage: `package go_package`, + }, { + goPackageOption: `option go_package = "import/path/of/go_package";`, + wantPackage: `package go_package // import "import/path/of/go_package"`, + }, { + goPackageOption: `option go_package = "import/path/of/something;go_package";`, + wantPackage: `package go_package // import "import/path/of/something"`, + }, { + goPackageOption: `option go_package = "import_path;go_package";`, + wantPackage: `package go_package // import "import_path"`, + }} { + srcName := filepath.Join(workdir, fmt.Sprintf("%d.proto", i)) + tgtName := filepath.Join(workdir, fmt.Sprintf("%d.pb.go", i)) + + buf := &bytes.Buffer{} + fmt.Fprintln(buf, `syntax = "proto3";`) + fmt.Fprintln(buf, `package proto_package;`) + fmt.Fprintln(buf, test.goPackageOption) + if err := ioutil.WriteFile(srcName, buf.Bytes(), 0666); err != nil { + t.Fatal(err) + } + + protoc(t, []string{"-I" + workdir, "--go_out=paths=source_relative:" + workdir, srcName}) + + out, err := ioutil.ReadFile(tgtName) + if err != nil { + t.Fatal(err) + } + + pkg := packageRE.Find(out) + if pkg == nil { + t.Errorf("generated .pb.go contains no package line\n\nsource:\n%v\n\noutput:\n%v", buf.String(), string(out)) + continue + } + + if got, want := string(pkg), test.wantPackage; got != want { + t.Errorf("unexpected package statement with go_package = %q\n got: %v\nwant: %v", test.goPackageOption, got, want) + } + } +} + +// parseFile returns a file's package name and a list of all packages it imports. +func parseFile(source string) (packageName string, imports []string, err error) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "", source, parser.ImportsOnly) + if err != nil { + return "", nil, err + } + for _, imp := range f.Imports { + imports = append(imports, imp.Path.Value) + } + return f.Name.Name, imports, nil +} + +func protoc(t *testing.T, args []string) { + cmd := exec.Command("protoc", "--plugin=protoc-gen-go="+os.Args[0]) + cmd.Args = append(cmd.Args, args...) + // We set the RUN_AS_PROTOC_GEN_GO environment variable to indicate that + // the subprocess should act as a proto compiler rather than a test. + cmd.Env = append(os.Environ(), "RUN_AS_PROTOC_GEN_GO=1") + out, err := cmd.CombinedOutput() + if len(out) > 0 || err != nil { + t.Log("RUNNING: ", strings.Join(cmd.Args, " ")) + } + if len(out) > 0 { + t.Log(string(out)) + } + if err != nil { + t.Fatalf("protoc: %v", err) + } +} + +func hasReleaseTag(want string) bool { + for _, tag := range build.Default.ReleaseTags { + if tag == want { + return true + } + } + return false +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go index 2660e47a..1723680a 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go @@ -130,19 +130,23 @@ func (g *grpc) GenerateImports(file *generator.FileDescriptor) { return } g.P("import (") - g.P(contextPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, contextPkgPath))) - g.P(grpcPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, grpcPkgPath))) + g.P(contextPkg, " ", generator.GoImportPath(path.Join(string(g.gen.ImportPrefix), contextPkgPath))) + g.P(grpcPkg, " ", generator.GoImportPath(path.Join(string(g.gen.ImportPrefix), grpcPkgPath))) g.P(")") g.P() } // reservedClientName records whether a client name is reserved on the client side. var reservedClientName = map[string]bool{ -// TODO: do we need any in gRPC? + // TODO: do we need any in gRPC? } func unexport(s string) string { return strings.ToLower(s[:1]) + s[1:] } +// deprecationComment is the standard comment added to deprecated +// messages, fields, enums, and enum values. +var deprecationComment = "// Deprecated: Do not use." + // generateService generates all the code for the named service. func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.ServiceDescriptorProto, index int) { path := fmt.Sprintf("6,%d", index) // 6 means service. @@ -153,12 +157,16 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi fullServName = pkg + "." + fullServName } servName := generator.CamelCase(origServName) + deprecated := service.GetOptions().GetDeprecated() g.P() g.P("// Client API for ", servName, " service") g.P() // Client interface. + if deprecated { + g.P(deprecationComment) + } g.P("type ", servName, "Client interface {") for i, method := range service.Method { g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. @@ -174,6 +182,9 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P() // NewClient factory. + if deprecated { + g.P(deprecationComment) + } g.P("func New", servName, "Client (cc *", grpcPkg, ".ClientConn) ", servName, "Client {") g.P("return &", unexport(servName), "Client{cc}") g.P("}") @@ -200,6 +211,9 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P() // Server interface. + if deprecated { + g.P(deprecationComment) + } serverType := servName + "Server" g.P("type ", serverType, " interface {") for i, method := range service.Method { @@ -210,6 +224,9 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P() // Server registration. + if deprecated { + g.P(deprecationComment) + } g.P("func Register", servName, "Server(s *", grpcPkg, ".Server, srv ", serverType, ") {") g.P("s.RegisterService(&", serviceDescVar, `, srv)`) g.P("}") @@ -283,6 +300,9 @@ func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar strin inType := g.typeName(method.GetInputType()) outType := g.typeName(method.GetOutputType()) + if method.GetOptions().GetDeprecated() { + g.P(deprecationComment) + } g.P("func (c *", unexport(servName), "Client) ", g.generateClientSignature(servName, method), "{") if !method.GetServerStreaming() && !method.GetClientStreaming() { g.P("out := new(", outType, ")") diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile deleted file mode 100644 index 4095623e..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but plugin.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/compiler/plugin.proto -# Also we need to fix an import. -regenerate: - @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=Mgoogle/protobuf/descriptor.proto=github.com/golang/protobuf/protoc-gen-go/descriptor:../../../../.. \ - -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/compiler/plugin.proto - -restore: - cp plugin.pb.golden plugin.pb.go - -preserve: - cp plugin.pb.go plugin.pb.golden diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go index c608a248..61bfc10e 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go @@ -37,14 +37,33 @@ type Version struct { Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should // be empty for mainline stable releases. - Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` - XXX_unrecognized []byte `json:"-"` + Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Version) Reset() { *m = Version{} } func (m *Version) String() string { return proto.CompactTextString(m) } func (*Version) ProtoMessage() {} func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (m *Version) Unmarshal(b []byte) error { + return xxx_messageInfo_Version.Unmarshal(m, b) +} +func (m *Version) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Version.Marshal(b, m, deterministic) +} +func (dst *Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_Version.Merge(dst, src) +} +func (m *Version) XXX_Size() int { + return xxx_messageInfo_Version.Size(m) +} +func (m *Version) XXX_DiscardUnknown() { + xxx_messageInfo_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_Version proto.InternalMessageInfo func (m *Version) GetMajor() int32 { if m != nil && m.Major != nil { @@ -98,14 +117,33 @@ type CodeGeneratorRequest struct { // fully qualified. ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"` // The version number of protocol compiler. - CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` - XXX_unrecognized []byte `json:"-"` + CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} } func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorRequest) ProtoMessage() {} func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *CodeGeneratorRequest) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorRequest.Unmarshal(m, b) +} +func (m *CodeGeneratorRequest) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorRequest.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorRequest.Merge(dst, src) +} +func (m *CodeGeneratorRequest) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorRequest.Size(m) +} +func (m *CodeGeneratorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorRequest proto.InternalMessageInfo func (m *CodeGeneratorRequest) GetFileToGenerate() []string { if m != nil { @@ -145,15 +183,34 @@ type CodeGeneratorResponse struct { // problem in protoc itself -- such as the input CodeGeneratorRequest being // unparseable -- should be reported by writing a message to stderr and // exiting with a non-zero status code. - Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` - File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` + Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} } func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorResponse) ProtoMessage() {} func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *CodeGeneratorResponse) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorResponse.Unmarshal(m, b) +} +func (m *CodeGeneratorResponse) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorResponse.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorResponse.Merge(dst, src) +} +func (m *CodeGeneratorResponse) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorResponse.Size(m) +} +func (m *CodeGeneratorResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorResponse proto.InternalMessageInfo func (m *CodeGeneratorResponse) GetError() string { if m != nil && m.Error != nil { @@ -222,14 +279,33 @@ type CodeGeneratorResponse_File struct { // If |insertion_point| is present, |name| must also be present. InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"` // The file contents. - Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` - XXX_unrecognized []byte `json:"-"` + Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} } func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorResponse_File) ProtoMessage() {} func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } +func (m *CodeGeneratorResponse_File) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorResponse_File.Unmarshal(m, b) +} +func (m *CodeGeneratorResponse_File) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorResponse_File.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorResponse_File.Merge(dst, src) +} +func (m *CodeGeneratorResponse_File) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorResponse_File.Size(m) +} +func (m *CodeGeneratorResponse_File) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorResponse_File.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorResponse_File proto.InternalMessageInfo func (m *CodeGeneratorResponse_File) GetName() string { if m != nil && m.Name != nil { diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto new file mode 100644 index 00000000..5b557452 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto @@ -0,0 +1,167 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to +// change. +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +syntax = "proto2"; +package google.protobuf.compiler; +option java_package = "com.google.protobuf.compiler"; +option java_outer_classname = "PluginProtos"; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go"; + +import "google/protobuf/descriptor.proto"; + +// The version number of protocol compiler. +message Version { + optional int32 major = 1; + optional int32 minor = 2; + optional int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + optional string suffix = 4; +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + repeated FileDescriptorProto proto_file = 15; + + // The version number of protocol compiler. + optional Version compiler_version = 3; + +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + } + repeated File file = 15; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile deleted file mode 100644 index a0bf9fef..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile +++ /dev/null @@ -1,73 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -all: - @echo run make test - -include ../../Make.protobuf - -test: golden testbuild - -#test: golden testbuild extension_test -# ./extension_test -# @echo PASS - -my_test/test.pb.go: my_test/test.proto - protoc --go_out=Mmulti/multi1.proto=github.com/golang/protobuf/protoc-gen-go/testdata/multi:. $< - -golden: - make -B my_test/test.pb.go - sed -i -e '/return.*fileDescriptor/d' my_test/test.pb.go - sed -i -e '/^var fileDescriptor/,/^}/d' my_test/test.pb.go - sed -i -e '/proto.RegisterFile.*fileDescriptor/d' my_test/test.pb.go - gofmt -w my_test/test.pb.go - diff -w my_test/test.pb.go my_test/test.pb.go.golden - -nuke: clean - -testbuild: regenerate - go test - -regenerate: - # Invoke protoc once to generate three independent .pb.go files in the same package. - protoc --go_out=. multi/multi1.proto multi/multi2.proto multi/multi3.proto - -#extension_test: extension_test.$O -# $(LD) -L. -o $@ $< - -#multi.a: multi3.pb.$O multi2.pb.$O multi1.pb.$O -# rm -f multi.a -# $(QUOTED_GOBIN)/gopack grc $@ $< - -#test.pb.go: imp.pb.go -#multi1.pb.go: multi2.pb.go multi3.pb.go -#main.$O: imp.pb.$O test.pb.$O multi.a -#extension_test.$O: extension_base.pb.$O extension_extra.pb.$O extension_user.pb.$O diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go new file mode 100644 index 00000000..6ebae9da --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go @@ -0,0 +1,232 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// deprecated/deprecated.proto is a deprecated file. + +package deprecated // import "github.com/golang/protobuf/protoc-gen-go/testdata/deprecated" + +/* +package deprecated contains only deprecated messages and services. +*/ + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// DeprecatedEnum contains deprecated values. +type DeprecatedEnum int32 // Deprecated: Do not use. +const ( + // DEPRECATED is the iota value of this enum. + DeprecatedEnum_DEPRECATED DeprecatedEnum = 0 // Deprecated: Do not use. +) + +var DeprecatedEnum_name = map[int32]string{ + 0: "DEPRECATED", +} +var DeprecatedEnum_value = map[string]int32{ + "DEPRECATED": 0, +} + +func (x DeprecatedEnum) String() string { + return proto.EnumName(DeprecatedEnum_name, int32(x)) +} +func (DeprecatedEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_deprecated_9e1889ba21817fad, []int{0} +} + +// DeprecatedRequest is a request to DeprecatedCall. +// +// Deprecated: Do not use. +type DeprecatedRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeprecatedRequest) Reset() { *m = DeprecatedRequest{} } +func (m *DeprecatedRequest) String() string { return proto.CompactTextString(m) } +func (*DeprecatedRequest) ProtoMessage() {} +func (*DeprecatedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_deprecated_9e1889ba21817fad, []int{0} +} +func (m *DeprecatedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeprecatedRequest.Unmarshal(m, b) +} +func (m *DeprecatedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeprecatedRequest.Marshal(b, m, deterministic) +} +func (dst *DeprecatedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeprecatedRequest.Merge(dst, src) +} +func (m *DeprecatedRequest) XXX_Size() int { + return xxx_messageInfo_DeprecatedRequest.Size(m) +} +func (m *DeprecatedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeprecatedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeprecatedRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +type DeprecatedResponse struct { + // DeprecatedField contains a DeprecatedEnum. + DeprecatedField DeprecatedEnum `protobuf:"varint,1,opt,name=deprecated_field,json=deprecatedField,enum=deprecated.DeprecatedEnum" json:"deprecated_field,omitempty"` // Deprecated: Do not use. + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeprecatedResponse) Reset() { *m = DeprecatedResponse{} } +func (m *DeprecatedResponse) String() string { return proto.CompactTextString(m) } +func (*DeprecatedResponse) ProtoMessage() {} +func (*DeprecatedResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_deprecated_9e1889ba21817fad, []int{1} +} +func (m *DeprecatedResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeprecatedResponse.Unmarshal(m, b) +} +func (m *DeprecatedResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeprecatedResponse.Marshal(b, m, deterministic) +} +func (dst *DeprecatedResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeprecatedResponse.Merge(dst, src) +} +func (m *DeprecatedResponse) XXX_Size() int { + return xxx_messageInfo_DeprecatedResponse.Size(m) +} +func (m *DeprecatedResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeprecatedResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeprecatedResponse proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeprecatedResponse) GetDeprecatedField() DeprecatedEnum { + if m != nil { + return m.DeprecatedField + } + return DeprecatedEnum_DEPRECATED +} + +func init() { + proto.RegisterType((*DeprecatedRequest)(nil), "deprecated.DeprecatedRequest") + proto.RegisterType((*DeprecatedResponse)(nil), "deprecated.DeprecatedResponse") + proto.RegisterEnum("deprecated.DeprecatedEnum", DeprecatedEnum_name, DeprecatedEnum_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for DeprecatedService service + +// Deprecated: Do not use. +type DeprecatedServiceClient interface { + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + DeprecatedCall(ctx context.Context, in *DeprecatedRequest, opts ...grpc.CallOption) (*DeprecatedResponse, error) +} + +type deprecatedServiceClient struct { + cc *grpc.ClientConn +} + +// Deprecated: Do not use. +func NewDeprecatedServiceClient(cc *grpc.ClientConn) DeprecatedServiceClient { + return &deprecatedServiceClient{cc} +} + +// Deprecated: Do not use. +func (c *deprecatedServiceClient) DeprecatedCall(ctx context.Context, in *DeprecatedRequest, opts ...grpc.CallOption) (*DeprecatedResponse, error) { + out := new(DeprecatedResponse) + err := grpc.Invoke(ctx, "/deprecated.DeprecatedService/DeprecatedCall", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for DeprecatedService service + +// Deprecated: Do not use. +type DeprecatedServiceServer interface { + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + DeprecatedCall(context.Context, *DeprecatedRequest) (*DeprecatedResponse, error) +} + +// Deprecated: Do not use. +func RegisterDeprecatedServiceServer(s *grpc.Server, srv DeprecatedServiceServer) { + s.RegisterService(&_DeprecatedService_serviceDesc, srv) +} + +func _DeprecatedService_DeprecatedCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeprecatedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeprecatedServiceServer).DeprecatedCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/deprecated.DeprecatedService/DeprecatedCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeprecatedServiceServer).DeprecatedCall(ctx, req.(*DeprecatedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DeprecatedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "deprecated.DeprecatedService", + HandlerType: (*DeprecatedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeprecatedCall", + Handler: _DeprecatedService_DeprecatedCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "deprecated/deprecated.proto", +} + +func init() { + proto.RegisterFile("deprecated/deprecated.proto", fileDescriptor_deprecated_9e1889ba21817fad) +} + +var fileDescriptor_deprecated_9e1889ba21817fad = []byte{ + // 248 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0x49, 0x2d, 0x28, + 0x4a, 0x4d, 0x4e, 0x2c, 0x49, 0x4d, 0xd1, 0x47, 0x30, 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, + 0xb8, 0x10, 0x22, 0x4a, 0xe2, 0x5c, 0x82, 0x2e, 0x70, 0x5e, 0x50, 0x6a, 0x61, 0x69, 0x6a, 0x71, + 0x89, 0x15, 0x93, 0x04, 0xa3, 0x52, 0x32, 0x97, 0x10, 0xb2, 0x44, 0x71, 0x41, 0x7e, 0x5e, 0x71, + 0xaa, 0x90, 0x27, 0x97, 0x00, 0x42, 0x73, 0x7c, 0x5a, 0x66, 0x6a, 0x4e, 0x8a, 0x04, 0xa3, 0x02, + 0xa3, 0x06, 0x9f, 0x91, 0x94, 0x1e, 0x92, 0x3d, 0x08, 0x9d, 0xae, 0x79, 0xa5, 0xb9, 0x4e, 0x4c, + 0x12, 0x8c, 0x41, 0xfc, 0x08, 0x69, 0x37, 0x90, 0x36, 0x90, 0x25, 0x5a, 0x1a, 0x5c, 0x7c, 0xa8, + 0x4a, 0x85, 0x84, 0xb8, 0xb8, 0x5c, 0x5c, 0x03, 0x82, 0x5c, 0x9d, 0x1d, 0x43, 0x5c, 0x5d, 0x04, + 0x18, 0xa4, 0x98, 0x38, 0x18, 0xa5, 0x98, 0x24, 0x18, 0x8d, 0xf2, 0x90, 0xdd, 0x19, 0x9c, 0x5a, + 0x54, 0x96, 0x99, 0x9c, 0x2a, 0x14, 0x82, 0xac, 0xdd, 0x39, 0x31, 0x27, 0x47, 0x48, 0x16, 0xbb, + 0x2b, 0xa0, 0x1e, 0x93, 0x92, 0xc3, 0x25, 0x0d, 0xf1, 0x9e, 0x12, 0x73, 0x07, 0x13, 0xa3, 0x14, + 0x88, 0x70, 0x72, 0x8c, 0xb2, 0x49, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, + 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x07, 0x07, 0x5f, 0x52, 0x69, 0x1a, 0x84, 0x91, 0xac, + 0x9b, 0x9e, 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x5f, 0x92, 0x5a, 0x5c, 0x92, 0x92, 0x58, 0x92, 0x88, + 0x14, 0xd2, 0x3b, 0x18, 0x19, 0x93, 0xd8, 0xc0, 0xaa, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x0e, 0xf5, 0x6c, 0x87, 0x8c, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto new file mode 100644 index 00000000..b314166d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto @@ -0,0 +1,69 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +// package deprecated contains only deprecated messages and services. +package deprecated; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/deprecated"; + +option deprecated = true; // file-level deprecation + +// DeprecatedRequest is a request to DeprecatedCall. +message DeprecatedRequest { + option deprecated = true; +} + +message DeprecatedResponse { + // comment for DeprecatedResponse is omitted to guarantee deprecation + // message doesn't append unnecessary comments. + option deprecated = true; + // DeprecatedField contains a DeprecatedEnum. + DeprecatedEnum deprecated_field = 1 [deprecated=true]; +} + +// DeprecatedEnum contains deprecated values. +enum DeprecatedEnum { + option deprecated = true; + // DEPRECATED is the iota value of this enum. + DEPRECATED = 0 [deprecated=true]; +} + +// DeprecatedService is for making DeprecatedCalls +service DeprecatedService { + option deprecated = true; + + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + rpc DeprecatedCall(DeprecatedRequest) returns (DeprecatedResponse) { + option deprecated = true; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go new file mode 100644 index 00000000..a08e8eda --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_base/extension_base.proto + +package extension_base // import "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type BaseMessage struct { + Height *int32 `protobuf:"varint,1,opt,name=height" json:"height,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BaseMessage) Reset() { *m = BaseMessage{} } +func (m *BaseMessage) String() string { return proto.CompactTextString(m) } +func (*BaseMessage) ProtoMessage() {} +func (*BaseMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_base_41d3c712c9fc37fc, []int{0} +} + +var extRange_BaseMessage = []proto.ExtensionRange{ + {Start: 4, End: 9}, + {Start: 16, End: 536870911}, +} + +func (*BaseMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_BaseMessage +} +func (m *BaseMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BaseMessage.Unmarshal(m, b) +} +func (m *BaseMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BaseMessage.Marshal(b, m, deterministic) +} +func (dst *BaseMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_BaseMessage.Merge(dst, src) +} +func (m *BaseMessage) XXX_Size() int { + return xxx_messageInfo_BaseMessage.Size(m) +} +func (m *BaseMessage) XXX_DiscardUnknown() { + xxx_messageInfo_BaseMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_BaseMessage proto.InternalMessageInfo + +func (m *BaseMessage) GetHeight() int32 { + if m != nil && m.Height != nil { + return *m.Height + } + return 0 +} + +// Another message that may be extended, using message_set_wire_format. +type OldStyleMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldStyleMessage) Reset() { *m = OldStyleMessage{} } +func (m *OldStyleMessage) String() string { return proto.CompactTextString(m) } +func (*OldStyleMessage) ProtoMessage() {} +func (*OldStyleMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_base_41d3c712c9fc37fc, []int{1} +} + +func (m *OldStyleMessage) MarshalJSON() ([]byte, error) { + return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) +} +func (m *OldStyleMessage) UnmarshalJSON(buf []byte) error { + return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +} + +var extRange_OldStyleMessage = []proto.ExtensionRange{ + {Start: 100, End: 2147483646}, +} + +func (*OldStyleMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OldStyleMessage +} +func (m *OldStyleMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldStyleMessage.Unmarshal(m, b) +} +func (m *OldStyleMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldStyleMessage.Marshal(b, m, deterministic) +} +func (dst *OldStyleMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldStyleMessage.Merge(dst, src) +} +func (m *OldStyleMessage) XXX_Size() int { + return xxx_messageInfo_OldStyleMessage.Size(m) +} +func (m *OldStyleMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OldStyleMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OldStyleMessage proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BaseMessage)(nil), "extension_base.BaseMessage") + proto.RegisterType((*OldStyleMessage)(nil), "extension_base.OldStyleMessage") +} + +func init() { + proto.RegisterFile("extension_base/extension_base.proto", fileDescriptor_extension_base_41d3c712c9fc37fc) +} + +var fileDescriptor_extension_base_41d3c712c9fc37fc = []byte{ + // 179 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4e, 0xad, 0x28, 0x49, + 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x8b, 0x4f, 0x4a, 0x2c, 0x4e, 0xd5, 0x47, 0xe5, 0xea, 0x15, 0x14, + 0xe5, 0x97, 0xe4, 0x0b, 0xf1, 0xa1, 0x8a, 0x2a, 0x99, 0x72, 0x71, 0x3b, 0x25, 0x16, 0xa7, 0xfa, + 0xa6, 0x16, 0x17, 0x27, 0xa6, 0xa7, 0x0a, 0x89, 0x71, 0xb1, 0x65, 0xa4, 0x66, 0xa6, 0x67, 0x94, + 0x48, 0x30, 0x2a, 0x30, 0x6a, 0xb0, 0x06, 0x41, 0x79, 0x5a, 0x2c, 0x1c, 0x2c, 0x02, 0x5c, 0x5a, + 0x1c, 0x1c, 0x02, 0x02, 0x0d, 0x0d, 0x0d, 0x0d, 0x4c, 0x4a, 0xf2, 0x5c, 0xfc, 0xfe, 0x39, 0x29, + 0xc1, 0x25, 0x95, 0x39, 0x30, 0xad, 0x5a, 0x1c, 0x1c, 0x29, 0x02, 0xff, 0xff, 0xff, 0xff, 0xcf, + 0x6e, 0xc5, 0xc4, 0xc1, 0xe8, 0xe4, 0x14, 0xe5, 0x90, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, + 0x9c, 0x9f, 0xab, 0x9f, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x0f, 0x76, 0x42, 0x52, 0x69, 0x1a, + 0x84, 0x91, 0xac, 0x9b, 0x9e, 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x5f, 0x92, 0x5a, 0x5c, 0x92, 0x92, + 0x58, 0x92, 0x88, 0xe6, 0x62, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x7f, 0xb7, 0x2a, 0xd1, + 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto similarity index 95% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto index 94acfc1b..0ba74def 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package extension_base; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base"; + message BaseMessage { optional int32 height = 1; extensions 4 to 9; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go new file mode 100644 index 00000000..b3732169 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go @@ -0,0 +1,78 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_extra/extension_extra.proto + +package extension_extra // import "github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ExtraMessage struct { + Width *int32 `protobuf:"varint,1,opt,name=width" json:"width,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtraMessage) Reset() { *m = ExtraMessage{} } +func (m *ExtraMessage) String() string { return proto.CompactTextString(m) } +func (*ExtraMessage) ProtoMessage() {} +func (*ExtraMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_extra_83adf2410f49f816, []int{0} +} +func (m *ExtraMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtraMessage.Unmarshal(m, b) +} +func (m *ExtraMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtraMessage.Marshal(b, m, deterministic) +} +func (dst *ExtraMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtraMessage.Merge(dst, src) +} +func (m *ExtraMessage) XXX_Size() int { + return xxx_messageInfo_ExtraMessage.Size(m) +} +func (m *ExtraMessage) XXX_DiscardUnknown() { + xxx_messageInfo_ExtraMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtraMessage proto.InternalMessageInfo + +func (m *ExtraMessage) GetWidth() int32 { + if m != nil && m.Width != nil { + return *m.Width + } + return 0 +} + +func init() { + proto.RegisterType((*ExtraMessage)(nil), "extension_extra.ExtraMessage") +} + +func init() { + proto.RegisterFile("extension_extra/extension_extra.proto", fileDescriptor_extension_extra_83adf2410f49f816) +} + +var fileDescriptor_extension_extra_83adf2410f49f816 = []byte{ + // 133 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4d, 0xad, 0x28, 0x49, + 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x8b, 0x4f, 0xad, 0x28, 0x29, 0x4a, 0xd4, 0x47, 0xe3, 0xeb, 0x15, + 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xf1, 0xa3, 0x09, 0x2b, 0xa9, 0x70, 0xf1, 0xb8, 0x82, 0x18, 0xbe, + 0xa9, 0xc5, 0xc5, 0x89, 0xe9, 0xa9, 0x42, 0x22, 0x5c, 0xac, 0xe5, 0x99, 0x29, 0x25, 0x19, 0x12, + 0x8c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x93, 0x73, 0x94, 0x63, 0x7a, 0x66, 0x49, 0x46, + 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x3e, 0xd8, 0xc4, + 0xa4, 0xd2, 0x34, 0x08, 0x23, 0x59, 0x37, 0x3d, 0x35, 0x4f, 0x37, 0x3d, 0x5f, 0xbf, 0x24, 0xb5, + 0xb8, 0x24, 0x25, 0xb1, 0x04, 0xc3, 0x05, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf1, 0xec, 0xe3, + 0xb7, 0xa3, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto similarity index 95% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto index fca7f600..1dd03e70 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package extension_extra; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra"; + message ExtraMessage { optional int32 width = 1; } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go index 86e9c118..05247299 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go @@ -33,16 +33,14 @@ package testdata -/* - import ( "bytes" "regexp" "testing" "github.com/golang/protobuf/proto" - base "extension_base.pb" - user "extension_user.pb" + base "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base" + user "github.com/golang/protobuf/protoc-gen-go/testdata/extension_user" ) func TestSingleFieldExtension(t *testing.T) { @@ -206,5 +204,3 @@ func main() { []testing.InternalBenchmark{}, []testing.InternalExample{}) } - -*/ diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go new file mode 100644 index 00000000..c7187921 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go @@ -0,0 +1,401 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_user/extension_user.proto + +package extension_user // import "github.com/golang/protobuf/protoc-gen-go/testdata/extension_user" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import extension_base "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base" +import extension_extra "github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type UserMessage struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Rank *string `protobuf:"bytes,2,opt,name=rank" json:"rank,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserMessage) Reset() { *m = UserMessage{} } +func (m *UserMessage) String() string { return proto.CompactTextString(m) } +func (*UserMessage) ProtoMessage() {} +func (*UserMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{0} +} +func (m *UserMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserMessage.Unmarshal(m, b) +} +func (m *UserMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserMessage.Marshal(b, m, deterministic) +} +func (dst *UserMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserMessage.Merge(dst, src) +} +func (m *UserMessage) XXX_Size() int { + return xxx_messageInfo_UserMessage.Size(m) +} +func (m *UserMessage) XXX_DiscardUnknown() { + xxx_messageInfo_UserMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_UserMessage proto.InternalMessageInfo + +func (m *UserMessage) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *UserMessage) GetRank() string { + if m != nil && m.Rank != nil { + return *m.Rank + } + return "" +} + +// Extend inside the scope of another type +type LoudMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoudMessage) Reset() { *m = LoudMessage{} } +func (m *LoudMessage) String() string { return proto.CompactTextString(m) } +func (*LoudMessage) ProtoMessage() {} +func (*LoudMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{1} +} + +var extRange_LoudMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*LoudMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_LoudMessage +} +func (m *LoudMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoudMessage.Unmarshal(m, b) +} +func (m *LoudMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoudMessage.Marshal(b, m, deterministic) +} +func (dst *LoudMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoudMessage.Merge(dst, src) +} +func (m *LoudMessage) XXX_Size() int { + return xxx_messageInfo_LoudMessage.Size(m) +} +func (m *LoudMessage) XXX_DiscardUnknown() { + xxx_messageInfo_LoudMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_LoudMessage proto.InternalMessageInfo + +var E_LoudMessage_Volume = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 8, + Name: "extension_user.LoudMessage.volume", + Tag: "varint,8,opt,name=volume", + Filename: "extension_user/extension_user.proto", +} + +// Extend inside the scope of another type, using a message. +type LoginMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoginMessage) Reset() { *m = LoginMessage{} } +func (m *LoginMessage) String() string { return proto.CompactTextString(m) } +func (*LoginMessage) ProtoMessage() {} +func (*LoginMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{2} +} +func (m *LoginMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoginMessage.Unmarshal(m, b) +} +func (m *LoginMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoginMessage.Marshal(b, m, deterministic) +} +func (dst *LoginMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoginMessage.Merge(dst, src) +} +func (m *LoginMessage) XXX_Size() int { + return xxx_messageInfo_LoginMessage.Size(m) +} +func (m *LoginMessage) XXX_DiscardUnknown() { + xxx_messageInfo_LoginMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_LoginMessage proto.InternalMessageInfo + +var E_LoginMessage_UserMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*UserMessage)(nil), + Field: 16, + Name: "extension_user.LoginMessage.user_message", + Tag: "bytes,16,opt,name=user_message,json=userMessage", + Filename: "extension_user/extension_user.proto", +} + +type Detail struct { + Color *string `protobuf:"bytes,1,opt,name=color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Detail) Reset() { *m = Detail{} } +func (m *Detail) String() string { return proto.CompactTextString(m) } +func (*Detail) ProtoMessage() {} +func (*Detail) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{3} +} +func (m *Detail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Detail.Unmarshal(m, b) +} +func (m *Detail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Detail.Marshal(b, m, deterministic) +} +func (dst *Detail) XXX_Merge(src proto.Message) { + xxx_messageInfo_Detail.Merge(dst, src) +} +func (m *Detail) XXX_Size() int { + return xxx_messageInfo_Detail.Size(m) +} +func (m *Detail) XXX_DiscardUnknown() { + xxx_messageInfo_Detail.DiscardUnknown(m) +} + +var xxx_messageInfo_Detail proto.InternalMessageInfo + +func (m *Detail) GetColor() string { + if m != nil && m.Color != nil { + return *m.Color + } + return "" +} + +// An extension of an extension +type Announcement struct { + Words *string `protobuf:"bytes,1,opt,name=words" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Announcement) Reset() { *m = Announcement{} } +func (m *Announcement) String() string { return proto.CompactTextString(m) } +func (*Announcement) ProtoMessage() {} +func (*Announcement) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{4} +} +func (m *Announcement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Announcement.Unmarshal(m, b) +} +func (m *Announcement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Announcement.Marshal(b, m, deterministic) +} +func (dst *Announcement) XXX_Merge(src proto.Message) { + xxx_messageInfo_Announcement.Merge(dst, src) +} +func (m *Announcement) XXX_Size() int { + return xxx_messageInfo_Announcement.Size(m) +} +func (m *Announcement) XXX_DiscardUnknown() { + xxx_messageInfo_Announcement.DiscardUnknown(m) +} + +var xxx_messageInfo_Announcement proto.InternalMessageInfo + +func (m *Announcement) GetWords() string { + if m != nil && m.Words != nil { + return *m.Words + } + return "" +} + +var E_Announcement_LoudExt = &proto.ExtensionDesc{ + ExtendedType: (*LoudMessage)(nil), + ExtensionType: (*Announcement)(nil), + Field: 100, + Name: "extension_user.Announcement.loud_ext", + Tag: "bytes,100,opt,name=loud_ext,json=loudExt", + Filename: "extension_user/extension_user.proto", +} + +// Something that can be put in a message set. +type OldStyleParcel struct { + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + Height *int32 `protobuf:"varint,2,opt,name=height" json:"height,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldStyleParcel) Reset() { *m = OldStyleParcel{} } +func (m *OldStyleParcel) String() string { return proto.CompactTextString(m) } +func (*OldStyleParcel) ProtoMessage() {} +func (*OldStyleParcel) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_user_af41b5e0bdfb7846, []int{5} +} +func (m *OldStyleParcel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldStyleParcel.Unmarshal(m, b) +} +func (m *OldStyleParcel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldStyleParcel.Marshal(b, m, deterministic) +} +func (dst *OldStyleParcel) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldStyleParcel.Merge(dst, src) +} +func (m *OldStyleParcel) XXX_Size() int { + return xxx_messageInfo_OldStyleParcel.Size(m) +} +func (m *OldStyleParcel) XXX_DiscardUnknown() { + xxx_messageInfo_OldStyleParcel.DiscardUnknown(m) +} + +var xxx_messageInfo_OldStyleParcel proto.InternalMessageInfo + +func (m *OldStyleParcel) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *OldStyleParcel) GetHeight() int32 { + if m != nil && m.Height != nil { + return *m.Height + } + return 0 +} + +var E_OldStyleParcel_MessageSetExtension = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.OldStyleMessage)(nil), + ExtensionType: (*OldStyleParcel)(nil), + Field: 2001, + Name: "extension_user.OldStyleParcel", + Tag: "bytes,2001,opt,name=message_set_extension,json=messageSetExtension", + Filename: "extension_user/extension_user.proto", +} + +var E_UserMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*UserMessage)(nil), + Field: 5, + Name: "extension_user.user_message", + Tag: "bytes,5,opt,name=user_message,json=userMessage", + Filename: "extension_user/extension_user.proto", +} + +var E_ExtraMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*extension_extra.ExtraMessage)(nil), + Field: 9, + Name: "extension_user.extra_message", + Tag: "bytes,9,opt,name=extra_message,json=extraMessage", + Filename: "extension_user/extension_user.proto", +} + +var E_Width = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 6, + Name: "extension_user.width", + Tag: "varint,6,opt,name=width", + Filename: "extension_user/extension_user.proto", +} + +var E_Area = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 7, + Name: "extension_user.area", + Tag: "varint,7,opt,name=area", + Filename: "extension_user/extension_user.proto", +} + +var E_Detail = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: ([]*Detail)(nil), + Field: 17, + Name: "extension_user.detail", + Tag: "bytes,17,rep,name=detail", + Filename: "extension_user/extension_user.proto", +} + +func init() { + proto.RegisterType((*UserMessage)(nil), "extension_user.UserMessage") + proto.RegisterType((*LoudMessage)(nil), "extension_user.LoudMessage") + proto.RegisterType((*LoginMessage)(nil), "extension_user.LoginMessage") + proto.RegisterType((*Detail)(nil), "extension_user.Detail") + proto.RegisterType((*Announcement)(nil), "extension_user.Announcement") + proto.RegisterMessageSetType((*OldStyleParcel)(nil), 2001, "extension_user.OldStyleParcel") + proto.RegisterType((*OldStyleParcel)(nil), "extension_user.OldStyleParcel") + proto.RegisterExtension(E_LoudMessage_Volume) + proto.RegisterExtension(E_LoginMessage_UserMessage) + proto.RegisterExtension(E_Announcement_LoudExt) + proto.RegisterExtension(E_OldStyleParcel_MessageSetExtension) + proto.RegisterExtension(E_UserMessage) + proto.RegisterExtension(E_ExtraMessage) + proto.RegisterExtension(E_Width) + proto.RegisterExtension(E_Area) + proto.RegisterExtension(E_Detail) +} + +func init() { + proto.RegisterFile("extension_user/extension_user.proto", fileDescriptor_extension_user_af41b5e0bdfb7846) +} + +var fileDescriptor_extension_user_af41b5e0bdfb7846 = []byte{ + // 492 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0x94, 0x40, + 0x10, 0x0e, 0x6d, 0x8f, 0x5e, 0x87, 0x6b, 0xad, 0xa8, 0xcd, 0xa5, 0x6a, 0x25, 0x18, 0x13, 0x62, + 0xd2, 0x23, 0x62, 0x7c, 0xe1, 0x49, 0x2f, 0xde, 0x93, 0x67, 0x34, 0x54, 0x5f, 0xf4, 0x81, 0xec, + 0xc1, 0xc8, 0x91, 0xc2, 0xae, 0xd9, 0x5d, 0xec, 0xe9, 0xd3, 0xfd, 0x26, 0xff, 0x89, 0xff, 0xc8, + 0xb0, 0x2c, 0x2d, 0x87, 0xc9, 0xc5, 0xbe, 0x90, 0xfd, 0x86, 0x6f, 0xbe, 0x99, 0xfd, 0x66, 0x00, + 0x9e, 0xe2, 0x4a, 0x22, 0x15, 0x39, 0xa3, 0x71, 0x25, 0x90, 0xfb, 0x9b, 0x70, 0xf2, 0x9d, 0x33, + 0xc9, 0xec, 0xa3, 0xcd, 0xe8, 0x69, 0x27, 0x69, 0x41, 0x04, 0xfa, 0x9b, 0xb0, 0x49, 0x3a, 0x7d, + 0x76, 0x13, 0xc5, 0x95, 0xe4, 0xc4, 0xef, 0xe1, 0x86, 0xe6, 0xbe, 0x02, 0xeb, 0xb3, 0x40, 0xfe, + 0x1e, 0x85, 0x20, 0x19, 0xda, 0x36, 0xec, 0x51, 0x52, 0xe2, 0xd8, 0x70, 0x0c, 0xef, 0x20, 0x52, + 0xe7, 0x3a, 0xc6, 0x09, 0xbd, 0x1c, 0xef, 0x34, 0xb1, 0xfa, 0xec, 0xce, 0xc1, 0x9a, 0xb3, 0x2a, + 0xd5, 0x69, 0xcf, 0x87, 0xc3, 0xf4, 0x78, 0xbd, 0x5e, 0xaf, 0x77, 0x82, 0x97, 0x60, 0xfe, 0x60, + 0x45, 0x55, 0xa2, 0xfd, 0x70, 0xd2, 0xeb, 0x6b, 0x4a, 0x04, 0xea, 0x84, 0xf1, 0xd0, 0x31, 0xbc, + 0xc3, 0x48, 0x53, 0xdd, 0x4b, 0x18, 0xcd, 0x59, 0x96, 0x53, 0xfd, 0x36, 0xf8, 0x0a, 0xa3, 0xfa, + 0xa2, 0x71, 0xa9, 0xbb, 0xda, 0x2a, 0x75, 0xec, 0x18, 0x9e, 0x15, 0x74, 0x29, 0xca, 0xba, 0xce, + 0xad, 0x22, 0xab, 0xba, 0x01, 0xee, 0x19, 0x98, 0x6f, 0x51, 0x92, 0xbc, 0xb0, 0xef, 0xc3, 0x20, + 0x61, 0x05, 0xe3, 0xfa, 0xb6, 0x0d, 0x70, 0x7f, 0xc1, 0xe8, 0x0d, 0xa5, 0xac, 0xa2, 0x09, 0x96, + 0x48, 0x65, 0xcd, 0xba, 0x62, 0x3c, 0x15, 0x2d, 0x4b, 0x81, 0xe0, 0x13, 0x0c, 0x0b, 0x56, 0xa5, + 0xb5, 0x97, 0xf6, 0x3f, 0xb5, 0x3b, 0xd6, 0x8c, 0x53, 0xd5, 0xde, 0xa3, 0x3e, 0xa5, 0x5b, 0x22, + 0xda, 0xaf, 0xa5, 0x66, 0x2b, 0xe9, 0xfe, 0x36, 0xe0, 0xe8, 0x43, 0x91, 0x5e, 0xc8, 0x9f, 0x05, + 0x7e, 0x24, 0x3c, 0xc1, 0xa2, 0x33, 0x91, 0x9d, 0xeb, 0x89, 0x9c, 0x80, 0xb9, 0xc4, 0x3c, 0x5b, + 0x4a, 0x35, 0x93, 0x41, 0xa4, 0x51, 0x20, 0xe1, 0x81, 0xb6, 0x2c, 0x16, 0x28, 0xe3, 0xeb, 0x92, + 0xf6, 0x93, 0xbe, 0x81, 0x6d, 0x91, 0xb6, 0xcb, 0x3f, 0x77, 0x54, 0x9b, 0x67, 0xfd, 0x36, 0x37, + 0x9b, 0x89, 0xee, 0x69, 0xf9, 0x0b, 0x94, 0xb3, 0x96, 0x18, 0xde, 0x6a, 0x5a, 0x83, 0xdb, 0x4d, + 0x2b, 0x8c, 0xe1, 0x50, 0xad, 0xeb, 0xff, 0xa9, 0x1f, 0x28, 0xf5, 0xc7, 0x93, 0xfe, 0xae, 0xcf, + 0xea, 0x67, 0xab, 0x3f, 0xc2, 0x0e, 0x0a, 0x5f, 0xc0, 0xe0, 0x2a, 0x4f, 0xe5, 0x72, 0xbb, 0xb0, + 0xa9, 0x7c, 0x6e, 0x98, 0xa1, 0x0f, 0x7b, 0x84, 0x23, 0xd9, 0x9e, 0xb1, 0xef, 0x18, 0xde, 0x6e, + 0xa4, 0x88, 0xe1, 0x3b, 0x30, 0xd3, 0x66, 0xe5, 0xb6, 0xa6, 0xdc, 0x75, 0x76, 0x3d, 0x2b, 0x38, + 0xe9, 0x7b, 0xd3, 0x6c, 0x6b, 0xa4, 0x25, 0xa6, 0xd3, 0x2f, 0xaf, 0xb3, 0x5c, 0x2e, 0xab, 0xc5, + 0x24, 0x61, 0xa5, 0x9f, 0xb1, 0x82, 0xd0, 0xcc, 0x57, 0x1f, 0xf3, 0xa2, 0xfa, 0xd6, 0x1c, 0x92, + 0xf3, 0x0c, 0xe9, 0x79, 0xc6, 0x7c, 0x89, 0x42, 0xa6, 0x44, 0x92, 0xde, 0x7f, 0xe5, 0x6f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xdf, 0x18, 0x64, 0x15, 0x77, 0x04, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto similarity index 94% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto index ff65873d..033c186c 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto @@ -31,11 +31,13 @@ syntax = "proto2"; -import "extension_base.proto"; -import "extension_extra.proto"; +import "extension_base/extension_base.proto"; +import "extension_extra/extension_extra.proto"; package extension_user; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_user"; + message UserMessage { optional string name = 1; optional string rank = 2; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go new file mode 100644 index 00000000..0bb4cbfd --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go @@ -0,0 +1,444 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/grpc.proto + +package testing // import "github.com/golang/protobuf/protoc-gen-go/testdata/grpc" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SimpleRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_65bf3902e49ee873, []int{0} +} +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (dst *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(dst, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +type SimpleResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_65bf3902e49ee873, []int{1} +} +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(dst, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +type StreamMsg struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamMsg) Reset() { *m = StreamMsg{} } +func (m *StreamMsg) String() string { return proto.CompactTextString(m) } +func (*StreamMsg) ProtoMessage() {} +func (*StreamMsg) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_65bf3902e49ee873, []int{2} +} +func (m *StreamMsg) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamMsg.Unmarshal(m, b) +} +func (m *StreamMsg) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamMsg.Marshal(b, m, deterministic) +} +func (dst *StreamMsg) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamMsg.Merge(dst, src) +} +func (m *StreamMsg) XXX_Size() int { + return xxx_messageInfo_StreamMsg.Size(m) +} +func (m *StreamMsg) XXX_DiscardUnknown() { + xxx_messageInfo_StreamMsg.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamMsg proto.InternalMessageInfo + +type StreamMsg2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamMsg2) Reset() { *m = StreamMsg2{} } +func (m *StreamMsg2) String() string { return proto.CompactTextString(m) } +func (*StreamMsg2) ProtoMessage() {} +func (*StreamMsg2) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_65bf3902e49ee873, []int{3} +} +func (m *StreamMsg2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamMsg2.Unmarshal(m, b) +} +func (m *StreamMsg2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamMsg2.Marshal(b, m, deterministic) +} +func (dst *StreamMsg2) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamMsg2.Merge(dst, src) +} +func (m *StreamMsg2) XXX_Size() int { + return xxx_messageInfo_StreamMsg2.Size(m) +} +func (m *StreamMsg2) XXX_DiscardUnknown() { + xxx_messageInfo_StreamMsg2.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamMsg2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") + proto.RegisterType((*StreamMsg)(nil), "grpc.testing.StreamMsg") + proto.RegisterType((*StreamMsg2)(nil), "grpc.testing.StreamMsg2") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for Test service + +type TestClient interface { + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // This RPC streams from the server only. + Downstream(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (Test_DownstreamClient, error) + // This RPC streams from the client. + Upstream(ctx context.Context, opts ...grpc.CallOption) (Test_UpstreamClient, error) + // This one streams in both directions. + Bidi(ctx context.Context, opts ...grpc.CallOption) (Test_BidiClient, error) +} + +type testClient struct { + cc *grpc.ClientConn +} + +func NewTestClient(cc *grpc.ClientConn) TestClient { + return &testClient{cc} +} + +func (c *testClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := grpc.Invoke(ctx, "/grpc.testing.Test/UnaryCall", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testClient) Downstream(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (Test_DownstreamClient, error) { + stream, err := grpc.NewClientStream(ctx, &_Test_serviceDesc.Streams[0], c.cc, "/grpc.testing.Test/Downstream", opts...) + if err != nil { + return nil, err + } + x := &testDownstreamClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Test_DownstreamClient interface { + Recv() (*StreamMsg, error) + grpc.ClientStream +} + +type testDownstreamClient struct { + grpc.ClientStream +} + +func (x *testDownstreamClient) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testClient) Upstream(ctx context.Context, opts ...grpc.CallOption) (Test_UpstreamClient, error) { + stream, err := grpc.NewClientStream(ctx, &_Test_serviceDesc.Streams[1], c.cc, "/grpc.testing.Test/Upstream", opts...) + if err != nil { + return nil, err + } + x := &testUpstreamClient{stream} + return x, nil +} + +type Test_UpstreamClient interface { + Send(*StreamMsg) error + CloseAndRecv() (*SimpleResponse, error) + grpc.ClientStream +} + +type testUpstreamClient struct { + grpc.ClientStream +} + +func (x *testUpstreamClient) Send(m *StreamMsg) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testUpstreamClient) CloseAndRecv() (*SimpleResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testClient) Bidi(ctx context.Context, opts ...grpc.CallOption) (Test_BidiClient, error) { + stream, err := grpc.NewClientStream(ctx, &_Test_serviceDesc.Streams[2], c.cc, "/grpc.testing.Test/Bidi", opts...) + if err != nil { + return nil, err + } + x := &testBidiClient{stream} + return x, nil +} + +type Test_BidiClient interface { + Send(*StreamMsg) error + Recv() (*StreamMsg2, error) + grpc.ClientStream +} + +type testBidiClient struct { + grpc.ClientStream +} + +func (x *testBidiClient) Send(m *StreamMsg) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testBidiClient) Recv() (*StreamMsg2, error) { + m := new(StreamMsg2) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// Server API for Test service + +type TestServer interface { + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // This RPC streams from the server only. + Downstream(*SimpleRequest, Test_DownstreamServer) error + // This RPC streams from the client. + Upstream(Test_UpstreamServer) error + // This one streams in both directions. + Bidi(Test_BidiServer) error +} + +func RegisterTestServer(s *grpc.Server, srv TestServer) { + s.RegisterService(&_Test_serviceDesc, srv) +} + +func _Test_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.Test/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Test_Downstream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SimpleRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TestServer).Downstream(m, &testDownstreamServer{stream}) +} + +type Test_DownstreamServer interface { + Send(*StreamMsg) error + grpc.ServerStream +} + +type testDownstreamServer struct { + grpc.ServerStream +} + +func (x *testDownstreamServer) Send(m *StreamMsg) error { + return x.ServerStream.SendMsg(m) +} + +func _Test_Upstream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServer).Upstream(&testUpstreamServer{stream}) +} + +type Test_UpstreamServer interface { + SendAndClose(*SimpleResponse) error + Recv() (*StreamMsg, error) + grpc.ServerStream +} + +type testUpstreamServer struct { + grpc.ServerStream +} + +func (x *testUpstreamServer) SendAndClose(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testUpstreamServer) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Test_Bidi_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServer).Bidi(&testBidiServer{stream}) +} + +type Test_BidiServer interface { + Send(*StreamMsg2) error + Recv() (*StreamMsg, error) + grpc.ServerStream +} + +type testBidiServer struct { + grpc.ServerStream +} + +func (x *testBidiServer) Send(m *StreamMsg2) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testBidiServer) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Test_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.Test", + HandlerType: (*TestServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnaryCall", + Handler: _Test_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Downstream", + Handler: _Test_Downstream_Handler, + ServerStreams: true, + }, + { + StreamName: "Upstream", + Handler: _Test_Upstream_Handler, + ClientStreams: true, + }, + { + StreamName: "Bidi", + Handler: _Test_Bidi_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc/grpc.proto", +} + +func init() { proto.RegisterFile("grpc/grpc.proto", fileDescriptor_grpc_65bf3902e49ee873) } + +var fileDescriptor_grpc_65bf3902e49ee873 = []byte{ + // 244 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4f, 0x2f, 0x2a, 0x48, + 0xd6, 0x07, 0x11, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x3c, 0x60, 0x76, 0x49, 0x6a, 0x71, + 0x49, 0x66, 0x5e, 0xba, 0x12, 0x3f, 0x17, 0x6f, 0x70, 0x66, 0x6e, 0x41, 0x4e, 0x6a, 0x50, 0x6a, + 0x61, 0x69, 0x6a, 0x71, 0x89, 0x92, 0x00, 0x17, 0x1f, 0x4c, 0xa0, 0xb8, 0x20, 0x3f, 0xaf, 0x38, + 0x55, 0x89, 0x9b, 0x8b, 0x33, 0xb8, 0xa4, 0x28, 0x35, 0x31, 0xd7, 0xb7, 0x38, 0x5d, 0x89, 0x87, + 0x8b, 0x0b, 0xce, 0x31, 0x32, 0x9a, 0xc1, 0xc4, 0xc5, 0x12, 0x92, 0x5a, 0x5c, 0x22, 0xe4, 0xc6, + 0xc5, 0x19, 0x9a, 0x97, 0x58, 0x54, 0xe9, 0x9c, 0x98, 0x93, 0x23, 0x24, 0xad, 0x87, 0x6c, 0x85, + 0x1e, 0x8a, 0xf9, 0x52, 0x32, 0xd8, 0x25, 0x21, 0x76, 0x09, 0xb9, 0x70, 0x71, 0xb9, 0xe4, 0x97, + 0xe7, 0x15, 0x83, 0xad, 0xc0, 0x6f, 0x90, 0x38, 0x9a, 0x24, 0xcc, 0x55, 0x06, 0x8c, 0x42, 0xce, + 0x5c, 0x1c, 0xa1, 0x05, 0x50, 0x33, 0x70, 0x29, 0xc3, 0xef, 0x10, 0x0d, 0x46, 0x21, 0x5b, 0x2e, + 0x16, 0xa7, 0xcc, 0x94, 0x4c, 0xdc, 0x06, 0x48, 0xe0, 0x90, 0x30, 0xd2, 0x60, 0x34, 0x60, 0x74, + 0x72, 0x88, 0xb2, 0x4b, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, + 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x07, 0xc7, 0x40, 0x52, 0x69, 0x1a, 0x84, 0x91, 0xac, 0x9b, 0x9e, + 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x0f, 0x32, 0x22, 0x25, 0xb1, 0x24, 0x11, 0x1c, 0x4d, 0xd6, 0x50, + 0x03, 0x93, 0xd8, 0xc0, 0x8a, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0xb9, 0x95, 0x42, + 0xc2, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto similarity index 96% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto index b8bc41ac..0e5c64a9 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto @@ -33,6 +33,8 @@ syntax = "proto3"; package grpc.testing; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/grpc;testing"; + message SimpleRequest { } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden deleted file mode 100644 index 784a4f86..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden +++ /dev/null @@ -1,113 +0,0 @@ -// Code generated by protoc-gen-go. -// source: imp.proto -// DO NOT EDIT! - -package imp - -import proto "github.com/golang/protobuf/proto" -import "math" -import "os" -import imp1 "imp2.pb" - -// Reference proto & math imports to suppress error if they are not otherwise used. -var _ = proto.GetString -var _ = math.Inf - -// Types from public import imp2.proto -type PubliclyImportedMessage imp1.PubliclyImportedMessage - -func (this *PubliclyImportedMessage) Reset() { (*imp1.PubliclyImportedMessage)(this).Reset() } -func (this *PubliclyImportedMessage) String() string { - return (*imp1.PubliclyImportedMessage)(this).String() -} - -// PubliclyImportedMessage from public import imp.proto - -type ImportedMessage_Owner int32 - -const ( - ImportedMessage_DAVE ImportedMessage_Owner = 1 - ImportedMessage_MIKE ImportedMessage_Owner = 2 -) - -var ImportedMessage_Owner_name = map[int32]string{ - 1: "DAVE", - 2: "MIKE", -} -var ImportedMessage_Owner_value = map[string]int32{ - "DAVE": 1, - "MIKE": 2, -} - -// NewImportedMessage_Owner is deprecated. Use x.Enum() instead. -func NewImportedMessage_Owner(x ImportedMessage_Owner) *ImportedMessage_Owner { - e := ImportedMessage_Owner(x) - return &e -} -func (x ImportedMessage_Owner) Enum() *ImportedMessage_Owner { - p := new(ImportedMessage_Owner) - *p = x - return p -} -func (x ImportedMessage_Owner) String() string { - return proto.EnumName(ImportedMessage_Owner_name, int32(x)) -} - -type ImportedMessage struct { - Field *int64 `protobuf:"varint,1,req,name=field" json:"field,omitempty"` - XXX_extensions map[int32][]byte `json:",omitempty"` - XXX_unrecognized []byte `json:",omitempty"` -} - -func (this *ImportedMessage) Reset() { *this = ImportedMessage{} } -func (this *ImportedMessage) String() string { return proto.CompactTextString(this) } - -var extRange_ImportedMessage = []proto.ExtensionRange{ - proto.ExtensionRange{90, 100}, -} - -func (*ImportedMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_ImportedMessage -} -func (this *ImportedMessage) ExtensionMap() map[int32][]byte { - if this.XXX_extensions == nil { - this.XXX_extensions = make(map[int32][]byte) - } - return this.XXX_extensions -} - -type ImportedExtendable struct { - XXX_extensions map[int32][]byte `json:",omitempty"` - XXX_unrecognized []byte `json:",omitempty"` -} - -func (this *ImportedExtendable) Reset() { *this = ImportedExtendable{} } -func (this *ImportedExtendable) String() string { return proto.CompactTextString(this) } - -func (this *ImportedExtendable) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(this.ExtensionMap()) -} -func (this *ImportedExtendable) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, this.ExtensionMap()) -} -// ensure ImportedExtendable satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*ImportedExtendable)(nil) -var _ proto.Unmarshaler = (*ImportedExtendable)(nil) - -var extRange_ImportedExtendable = []proto.ExtensionRange{ - proto.ExtensionRange{100, 536870911}, -} - -func (*ImportedExtendable) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_ImportedExtendable -} -func (this *ImportedExtendable) ExtensionMap() map[int32][]byte { - if this.XXX_extensions == nil { - this.XXX_extensions = make(map[int32][]byte) - } - return this.XXX_extensions -} - -func init() { - proto.RegisterEnum("imp.ImportedMessage_Owner", ImportedMessage_Owner_name, ImportedMessage_Owner_value) -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go new file mode 100644 index 00000000..5b780fd5 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go @@ -0,0 +1,110 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/a.proto + +package import_public // import "github.com/golang/protobuf/protoc-gen-go/testdata/import_public" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import sub "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// M from public import import_public/sub/a.proto +type M = sub.M + +// E from public import import_public/sub/a.proto +type E = sub.E + +var E_name = sub.E_name +var E_value = sub.E_value + +const E_ZERO = E(sub.E_ZERO) + +// Ignoring public import of Local from import_public/b.proto + +type Public struct { + M *sub.M `protobuf:"bytes,1,opt,name=m" json:"m,omitempty"` + E sub.E `protobuf:"varint,2,opt,name=e,enum=goproto.test.import_public.sub.E" json:"e,omitempty"` + Local *Local `protobuf:"bytes,3,opt,name=local" json:"local,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Public) Reset() { *m = Public{} } +func (m *Public) String() string { return proto.CompactTextString(m) } +func (*Public) ProtoMessage() {} +func (*Public) Descriptor() ([]byte, []int) { + return fileDescriptor_a_c0314c022b7c17d8, []int{0} +} +func (m *Public) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Public.Unmarshal(m, b) +} +func (m *Public) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Public.Marshal(b, m, deterministic) +} +func (dst *Public) XXX_Merge(src proto.Message) { + xxx_messageInfo_Public.Merge(dst, src) +} +func (m *Public) XXX_Size() int { + return xxx_messageInfo_Public.Size(m) +} +func (m *Public) XXX_DiscardUnknown() { + xxx_messageInfo_Public.DiscardUnknown(m) +} + +var xxx_messageInfo_Public proto.InternalMessageInfo + +func (m *Public) GetM() *sub.M { + if m != nil { + return m.M + } + return nil +} + +func (m *Public) GetE() sub.E { + if m != nil { + return m.E + } + return sub.E_ZERO +} + +func (m *Public) GetLocal() *Local { + if m != nil { + return m.Local + } + return nil +} + +func init() { + proto.RegisterType((*Public)(nil), "goproto.test.import_public.Public") +} + +func init() { proto.RegisterFile("import_public/a.proto", fileDescriptor_a_c0314c022b7c17d8) } + +var fileDescriptor_a_c0314c022b7c17d8 = []byte{ + // 200 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xcd, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x4f, 0xd4, 0x2b, 0x28, 0xca, 0x2f, + 0xc9, 0x17, 0x92, 0x4a, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, 0xf4, 0x50, 0xd4, 0x48, + 0x49, 0xa2, 0x6a, 0x29, 0x2e, 0x4d, 0x82, 0x69, 0x93, 0x42, 0x33, 0x2d, 0x09, 0x22, 0xac, 0xb4, + 0x98, 0x91, 0x8b, 0x2d, 0x00, 0x2c, 0x24, 0xa4, 0xcf, 0xc5, 0x98, 0x2b, 0xc1, 0xa8, 0xc0, 0xa8, + 0xc1, 0x6d, 0xa4, 0xa8, 0x87, 0xdb, 0x12, 0xbd, 0xe2, 0xd2, 0x24, 0x3d, 0xdf, 0x20, 0xc6, 0x5c, + 0x90, 0x86, 0x54, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x3e, 0xc2, 0x1a, 0x5c, 0x83, 0x18, 0x53, 0x85, + 0xcc, 0xb9, 0x58, 0x73, 0xf2, 0x93, 0x13, 0x73, 0x24, 0x98, 0x09, 0xdb, 0xe2, 0x03, 0x52, 0x18, + 0x04, 0x51, 0xef, 0xe4, 0x18, 0x65, 0x9f, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, + 0xab, 0x9f, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x0f, 0xd6, 0x9a, 0x54, 0x9a, 0x06, 0x61, 0x24, + 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, 0xeb, 0x83, 0xcc, 0x4a, 0x49, 0x2c, 0x49, 0xd4, 0x47, + 0x31, 0x2f, 0x80, 0x21, 0x80, 0x31, 0x89, 0x0d, 0xac, 0xd2, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, + 0x70, 0xc5, 0xc3, 0x79, 0x5a, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto new file mode 100644 index 00000000..957ad897 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto @@ -0,0 +1,45 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package goproto.test.import_public; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public"; + +import public "import_public/sub/a.proto"; // Different Go package. +import public "import_public/b.proto"; // Same Go package. + +message Public { + goproto.test.import_public.sub.M m = 1; + goproto.test.import_public.sub.E e = 2; + Local local = 3; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go new file mode 100644 index 00000000..427aa4f3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go @@ -0,0 +1,87 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/b.proto + +package import_public // import "github.com/golang/protobuf/protoc-gen-go/testdata/import_public" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import sub "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Local struct { + M *sub.M `protobuf:"bytes,1,opt,name=m" json:"m,omitempty"` + E sub.E `protobuf:"varint,2,opt,name=e,enum=goproto.test.import_public.sub.E" json:"e,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Local) Reset() { *m = Local{} } +func (m *Local) String() string { return proto.CompactTextString(m) } +func (*Local) ProtoMessage() {} +func (*Local) Descriptor() ([]byte, []int) { + return fileDescriptor_b_7f20a805fad67bd0, []int{0} +} +func (m *Local) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Local.Unmarshal(m, b) +} +func (m *Local) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Local.Marshal(b, m, deterministic) +} +func (dst *Local) XXX_Merge(src proto.Message) { + xxx_messageInfo_Local.Merge(dst, src) +} +func (m *Local) XXX_Size() int { + return xxx_messageInfo_Local.Size(m) +} +func (m *Local) XXX_DiscardUnknown() { + xxx_messageInfo_Local.DiscardUnknown(m) +} + +var xxx_messageInfo_Local proto.InternalMessageInfo + +func (m *Local) GetM() *sub.M { + if m != nil { + return m.M + } + return nil +} + +func (m *Local) GetE() sub.E { + if m != nil { + return m.E + } + return sub.E_ZERO +} + +func init() { + proto.RegisterType((*Local)(nil), "goproto.test.import_public.Local") +} + +func init() { proto.RegisterFile("import_public/b.proto", fileDescriptor_b_7f20a805fad67bd0) } + +var fileDescriptor_b_7f20a805fad67bd0 = []byte{ + // 174 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xcd, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x4f, 0xd2, 0x2b, 0x28, 0xca, 0x2f, + 0xc9, 0x17, 0x92, 0x4a, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, 0xf4, 0x50, 0xd4, 0x48, + 0x49, 0xa2, 0x6a, 0x29, 0x2e, 0x4d, 0xd2, 0x4f, 0x84, 0x68, 0x53, 0xca, 0xe4, 0x62, 0xf5, 0xc9, + 0x4f, 0x4e, 0xcc, 0x11, 0xd2, 0xe7, 0x62, 0xcc, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x36, 0x52, + 0xd4, 0xc3, 0x6d, 0x96, 0x5e, 0x71, 0x69, 0x92, 0x9e, 0x6f, 0x10, 0x63, 0x2e, 0x48, 0x43, 0xaa, + 0x04, 0x93, 0x02, 0xa3, 0x06, 0x1f, 0x61, 0x0d, 0xae, 0x41, 0x8c, 0xa9, 0x4e, 0x8e, 0x51, 0xf6, + 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, 0x79, + 0xe9, 0xfa, 0x60, 0x6d, 0x49, 0xa5, 0x69, 0x10, 0x46, 0xb2, 0x6e, 0x7a, 0x6a, 0x9e, 0x6e, 0x7a, + 0xbe, 0x3e, 0xc8, 0x9c, 0x94, 0xc4, 0x92, 0x44, 0x7d, 0x14, 0xb3, 0x92, 0xd8, 0xc0, 0xaa, 0x8c, + 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xd6, 0x2b, 0x5f, 0x8e, 0x04, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto new file mode 100644 index 00000000..1dbca3e4 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto @@ -0,0 +1,43 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package goproto.test.import_public; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public"; + +import "import_public/sub/a.proto"; + +message Local { + goproto.test.import_public.sub.M m = 1; + goproto.test.import_public.sub.E e = 2; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go new file mode 100644 index 00000000..4f8f6d24 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go @@ -0,0 +1,100 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/sub/a.proto + +package sub // import "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type E int32 + +const ( + E_ZERO E = 0 +) + +var E_name = map[int32]string{ + 0: "ZERO", +} +var E_value = map[string]int32{ + "ZERO": 0, +} + +func (x E) String() string { + return proto.EnumName(E_name, int32(x)) +} +func (E) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_a_91ca0264a534463a, []int{0} +} + +type M struct { + // Field using a type in the same Go package, but a different source file. + M2 *M2 `protobuf:"bytes,1,opt,name=m2" json:"m2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M) Reset() { *m = M{} } +func (m *M) String() string { return proto.CompactTextString(m) } +func (*M) ProtoMessage() {} +func (*M) Descriptor() ([]byte, []int) { + return fileDescriptor_a_91ca0264a534463a, []int{0} +} +func (m *M) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M.Unmarshal(m, b) +} +func (m *M) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M.Marshal(b, m, deterministic) +} +func (dst *M) XXX_Merge(src proto.Message) { + xxx_messageInfo_M.Merge(dst, src) +} +func (m *M) XXX_Size() int { + return xxx_messageInfo_M.Size(m) +} +func (m *M) XXX_DiscardUnknown() { + xxx_messageInfo_M.DiscardUnknown(m) +} + +var xxx_messageInfo_M proto.InternalMessageInfo + +func (m *M) GetM2() *M2 { + if m != nil { + return m.M2 + } + return nil +} + +func init() { + proto.RegisterType((*M)(nil), "goproto.test.import_public.sub.M") + proto.RegisterEnum("goproto.test.import_public.sub.E", E_name, E_value) +} + +func init() { proto.RegisterFile("import_public/sub/a.proto", fileDescriptor_a_91ca0264a534463a) } + +var fileDescriptor_a_91ca0264a534463a = []byte{ + // 172 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x2f, 0x2e, 0x4d, 0xd2, 0x4f, 0xd4, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x4b, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, + 0xf4, 0x50, 0xd4, 0xe9, 0x15, 0x97, 0x26, 0x49, 0x61, 0xd1, 0x9a, 0x04, 0xd1, 0xaa, 0x64, 0xce, + 0xc5, 0xe8, 0x2b, 0x64, 0xc4, 0xc5, 0x94, 0x6b, 0x24, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x6d, 0xa4, + 0xa4, 0x87, 0xdf, 0x30, 0x3d, 0x5f, 0xa3, 0x20, 0xa6, 0x5c, 0x23, 0x2d, 0x5e, 0x2e, 0x46, 0x57, + 0x21, 0x0e, 0x2e, 0x96, 0x28, 0xd7, 0x20, 0x7f, 0x01, 0x06, 0x27, 0xd7, 0x28, 0xe7, 0xf4, 0xcc, + 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xf4, 0xfc, 0x9c, 0xc4, 0xbc, 0x74, 0x7d, + 0xb0, 0x39, 0x49, 0xa5, 0x69, 0x10, 0x46, 0xb2, 0x6e, 0x7a, 0x6a, 0x9e, 0x6e, 0x7a, 0xbe, 0x3e, + 0xc8, 0xe0, 0x94, 0xc4, 0x92, 0x44, 0x7d, 0x0c, 0x67, 0x25, 0xb1, 0x81, 0x55, 0x1a, 0x03, 0x02, + 0x00, 0x00, 0xff, 0xff, 0x81, 0xcc, 0x07, 0x7d, 0xed, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto new file mode 100644 index 00000000..4494c818 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto @@ -0,0 +1,47 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package goproto.test.import_public.sub; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub"; + +import "import_public/sub/b.proto"; + +message M { + // Field using a type in the same Go package, but a different source file. + M2 m2 = 1; +} + +enum E { + ZERO = 0; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go new file mode 100644 index 00000000..d57a3bb9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/sub/b.proto + +package sub // import "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_b_eba25180453d86b4, []int{0} +} +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (dst *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(dst, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "goproto.test.import_public.sub.M2") +} + +func init() { proto.RegisterFile("import_public/sub/b.proto", fileDescriptor_b_eba25180453d86b4) } + +var fileDescriptor_b_eba25180453d86b4 = []byte{ + // 127 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x2f, 0x2e, 0x4d, 0xd2, 0x4f, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x4b, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, + 0xf4, 0x50, 0xd4, 0xe9, 0x15, 0x97, 0x26, 0x29, 0xb1, 0x70, 0x31, 0xf9, 0x1a, 0x39, 0xb9, 0x46, + 0x39, 0xa7, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, + 0xe6, 0xa5, 0xeb, 0x83, 0xf5, 0x25, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, 0xe9, 0xa9, 0x79, 0xba, + 0xe9, 0xf9, 0xfa, 0x20, 0x83, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0x31, 0x2c, 0x4d, 0x62, 0x03, 0xab, + 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x64, 0x42, 0xe4, 0xa8, 0x90, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto new file mode 100644 index 00000000..c7299e0f --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto @@ -0,0 +1,39 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package goproto.test.import_public.sub; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub"; + +message M2 { +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go similarity index 69% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go index 156e078d..7ef776bf 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go @@ -29,42 +29,38 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; +// +build go1.9 -package imp; +package testdata -import "imp2.proto"; -import "imp3.proto"; +import ( + "testing" -message ImportedMessage { - required int64 field = 1; + mainpb "github.com/golang/protobuf/protoc-gen-go/testdata/import_public" + subpb "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" +) - // The forwarded getters for these fields are fiddly to get right. - optional ImportedMessage2 local_msg = 2; - optional ForeignImportedMessage foreign_msg = 3; // in imp3.proto - optional Owner enum_field = 4; - oneof union { - int32 state = 9; - } - - repeated string name = 5; - repeated Owner boss = 6; - repeated ImportedMessage2 memo = 7; - - map msg_map = 8; - - enum Owner { - DAVE = 1; - MIKE = 2; - } - - extensions 90 to 100; -} - -message ImportedMessage2 { -} - -message ImportedExtendable { - option message_set_wire_format = true; - extensions 100 to max; +func TestImportPublicLink(t *testing.T) { + // mainpb.[ME] should be interchangable with subpb.[ME]. + var _ mainpb.M = subpb.M{} + var _ mainpb.E = subpb.E(0) + _ = &mainpb.Public{ + M: &mainpb.M{}, + E: mainpb.E_ZERO, + Local: &mainpb.Local{ + M: &mainpb.M{}, + E: mainpb.E_ZERO, + }, + } + _ = &mainpb.Public{ + M: &subpb.M{}, + E: subpb.E_ZERO, + Local: &mainpb.Local{ + M: &subpb.M{}, + E: subpb.E_ZERO, + }, + } + _ = &mainpb.M{ + M2: &subpb.M2{}, + } } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go new file mode 100644 index 00000000..ca312d6c --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go @@ -0,0 +1,66 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/fmt/m.proto + +package fmt // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M) Reset() { *m = M{} } +func (m *M) String() string { return proto.CompactTextString(m) } +func (*M) ProtoMessage() {} +func (*M) Descriptor() ([]byte, []int) { + return fileDescriptor_m_867dd34c461422b8, []int{0} +} +func (m *M) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M.Unmarshal(m, b) +} +func (m *M) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M.Marshal(b, m, deterministic) +} +func (dst *M) XXX_Merge(src proto.Message) { + xxx_messageInfo_M.Merge(dst, src) +} +func (m *M) XXX_Size() int { + return xxx_messageInfo_M.Size(m) +} +func (m *M) XXX_DiscardUnknown() { + xxx_messageInfo_M.DiscardUnknown(m) +} + +var xxx_messageInfo_M proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M)(nil), "fmt.M") +} + +func init() { proto.RegisterFile("imports/fmt/m.proto", fileDescriptor_m_867dd34c461422b8) } + +var fileDescriptor_m_867dd34c461422b8 = []byte{ + // 109 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xce, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x4f, 0xcb, 0x2d, 0xd1, 0xcf, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, + 0x62, 0x4e, 0xcb, 0x2d, 0x51, 0x62, 0xe6, 0x62, 0xf4, 0x75, 0xb2, 0x8f, 0xb2, 0x4d, 0xcf, 0x2c, + 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x07, + 0x2b, 0x4a, 0x2a, 0x4d, 0x83, 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, 0xf3, 0xf5, 0x4b, + 0x52, 0x8b, 0x4b, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0x91, 0x8c, 0x4c, 0x62, 0x03, 0xab, 0x31, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0xc4, 0xc9, 0xee, 0xbe, 0x68, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto similarity index 89% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto index 58fc7598..142d8cfa 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2012 The Go Authors. All rights reserved. +// Copyright 2018 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,10 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; - -package imp; - -message ForeignImportedMessage { - optional string tuber = 1; -} +syntax = "proto3"; +package fmt; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt"; +message M {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go new file mode 100644 index 00000000..82ec35e1 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_1/m1.proto + +package test_a_1 // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type E1 int32 + +const ( + E1_E1_ZERO E1 = 0 +) + +var E1_name = map[int32]string{ + 0: "E1_ZERO", +} +var E1_value = map[string]int32{ + "E1_ZERO": 0, +} + +func (x E1) String() string { + return proto.EnumName(E1_name, int32(x)) +} +func (E1) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_m1_56a2598431d21e61, []int{0} +} + +type M1 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1) Reset() { *m = M1{} } +func (m *M1) String() string { return proto.CompactTextString(m) } +func (*M1) ProtoMessage() {} +func (*M1) Descriptor() ([]byte, []int) { + return fileDescriptor_m1_56a2598431d21e61, []int{0} +} +func (m *M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1.Unmarshal(m, b) +} +func (m *M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1.Marshal(b, m, deterministic) +} +func (dst *M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1.Merge(dst, src) +} +func (m *M1) XXX_Size() int { + return xxx_messageInfo_M1.Size(m) +} +func (m *M1) XXX_DiscardUnknown() { + xxx_messageInfo_M1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1 proto.InternalMessageInfo + +type M1_1 struct { + M1 *M1 `protobuf:"bytes,1,opt,name=m1" json:"m1,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1_1) Reset() { *m = M1_1{} } +func (m *M1_1) String() string { return proto.CompactTextString(m) } +func (*M1_1) ProtoMessage() {} +func (*M1_1) Descriptor() ([]byte, []int) { + return fileDescriptor_m1_56a2598431d21e61, []int{1} +} +func (m *M1_1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1_1.Unmarshal(m, b) +} +func (m *M1_1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1_1.Marshal(b, m, deterministic) +} +func (dst *M1_1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1_1.Merge(dst, src) +} +func (m *M1_1) XXX_Size() int { + return xxx_messageInfo_M1_1.Size(m) +} +func (m *M1_1) XXX_DiscardUnknown() { + xxx_messageInfo_M1_1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1_1 proto.InternalMessageInfo + +func (m *M1_1) GetM1() *M1 { + if m != nil { + return m.M1 + } + return nil +} + +func init() { + proto.RegisterType((*M1)(nil), "test.a.M1") + proto.RegisterType((*M1_1)(nil), "test.a.M1_1") + proto.RegisterEnum("test.a.E1", E1_name, E1_value) +} + +func init() { proto.RegisterFile("imports/test_a_1/m1.proto", fileDescriptor_m1_56a2598431d21e61) } + +var fileDescriptor_m1_56a2598431d21e61 = []byte{ + // 165 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd4, 0xcf, 0x35, 0xd4, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x2a, 0x29, 0x71, 0xb1, 0xf8, 0x1a, 0xc6, 0x1b, 0x0a, 0x49, 0x71, 0x31, 0xe5, 0x1a, 0x4a, + 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x1b, 0x71, 0xe9, 0x41, 0x94, 0xe8, 0xf9, 0x1a, 0x06, 0x31, 0xe5, + 0x1a, 0x6a, 0x09, 0x72, 0x31, 0xb9, 0x1a, 0x0a, 0x71, 0x73, 0xb1, 0xbb, 0x1a, 0xc6, 0x47, 0xb9, + 0x06, 0xf9, 0x0b, 0x30, 0x38, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, + 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0xcd, 0x4f, 0x2a, 0x4d, 0x83, + 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, 0xf3, 0xc1, 0x4e, 0x48, 0x49, 0x2c, 0x49, 0xd4, + 0x47, 0x77, 0x53, 0x12, 0x1b, 0x58, 0xa1, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0xcc, 0xae, 0xc9, + 0xcd, 0xae, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto new file mode 100644 index 00000000..da54c1ee --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto @@ -0,0 +1,44 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1"; + +message M1 {} + +message M1_1 { + M1 m1 = 1; +} + +enum E1 { + E1_ZERO = 0; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go new file mode 100644 index 00000000..1b629bf3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_1/m2.proto + +package test_a_1 // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_m2_ccd6356c045a9ac3, []int{0} +} +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (dst *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(dst, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "test.a.M2") +} + +func init() { proto.RegisterFile("imports/test_a_1/m2.proto", fileDescriptor_m2_ccd6356c045a9ac3) } + +var fileDescriptor_m2_ccd6356c045a9ac3 = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd4, 0xcf, 0x35, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x39, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xe3, 0xe0, 0x7e, 0xc0, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto similarity index 88% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto index 3bb0632b..49499dc9 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2011 The Go Authors. All rights reserved. +// Copyright 2018 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,15 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; - -package imp; - -message PubliclyImportedMessage { - optional int64 field = 1; -} - -enum PubliclyImportedEnum { - GLASSES = 1; - HAIR = 2; -} +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1"; +message M2 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go new file mode 100644 index 00000000..e3895d2b --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_2/m3.proto + +package test_a_2 // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M3 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M3) Reset() { *m = M3{} } +func (m *M3) String() string { return proto.CompactTextString(m) } +func (*M3) ProtoMessage() {} +func (*M3) Descriptor() ([]byte, []int) { + return fileDescriptor_m3_de310e87d08d4216, []int{0} +} +func (m *M3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M3.Unmarshal(m, b) +} +func (m *M3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M3.Marshal(b, m, deterministic) +} +func (dst *M3) XXX_Merge(src proto.Message) { + xxx_messageInfo_M3.Merge(dst, src) +} +func (m *M3) XXX_Size() int { + return xxx_messageInfo_M3.Size(m) +} +func (m *M3) XXX_DiscardUnknown() { + xxx_messageInfo_M3.DiscardUnknown(m) +} + +var xxx_messageInfo_M3 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M3)(nil), "test.a.M3") +} + +func init() { proto.RegisterFile("imports/test_a_2/m3.proto", fileDescriptor_m3_de310e87d08d4216) } + +var fileDescriptor_m3_de310e87d08d4216 = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd2, 0xcf, 0x35, 0xd6, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x3b, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x23, 0x86, 0x27, 0x47, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto new file mode 100644 index 00000000..5e811ef8 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2"; +message M3 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go new file mode 100644 index 00000000..65a3bad2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_2/m4.proto + +package test_a_2 // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M4 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M4) Reset() { *m = M4{} } +func (m *M4) String() string { return proto.CompactTextString(m) } +func (*M4) ProtoMessage() {} +func (*M4) Descriptor() ([]byte, []int) { + return fileDescriptor_m4_da12b386229f3791, []int{0} +} +func (m *M4) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M4.Unmarshal(m, b) +} +func (m *M4) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M4.Marshal(b, m, deterministic) +} +func (dst *M4) XXX_Merge(src proto.Message) { + xxx_messageInfo_M4.Merge(dst, src) +} +func (m *M4) XXX_Size() int { + return xxx_messageInfo_M4.Size(m) +} +func (m *M4) XXX_DiscardUnknown() { + xxx_messageInfo_M4.DiscardUnknown(m) +} + +var xxx_messageInfo_M4 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M4)(nil), "test.a.M4") +} + +func init() { proto.RegisterFile("imports/test_a_2/m4.proto", fileDescriptor_m4_da12b386229f3791) } + +var fileDescriptor_m4_da12b386229f3791 = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd2, 0xcf, 0x35, 0xd1, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x9a, 0x38, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x58, 0xcb, 0x10, 0xc8, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto new file mode 100644 index 00000000..8f8fe3e1 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2"; +message M4 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go new file mode 100644 index 00000000..831f4149 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_b_1/m1.proto + +package beta // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M1 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1) Reset() { *m = M1{} } +func (m *M1) String() string { return proto.CompactTextString(m) } +func (*M1) ProtoMessage() {} +func (*M1) Descriptor() ([]byte, []int) { + return fileDescriptor_m1_aff127b054aec649, []int{0} +} +func (m *M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1.Unmarshal(m, b) +} +func (m *M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1.Marshal(b, m, deterministic) +} +func (dst *M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1.Merge(dst, src) +} +func (m *M1) XXX_Size() int { + return xxx_messageInfo_M1.Size(m) +} +func (m *M1) XXX_DiscardUnknown() { + xxx_messageInfo_M1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M1)(nil), "test.b.part1.M1") +} + +func init() { proto.RegisterFile("imports/test_b_1/m1.proto", fileDescriptor_m1_aff127b054aec649) } + +var fileDescriptor_m1_aff127b054aec649 = []byte{ + // 125 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8a, 0x37, 0xd4, 0xcf, 0x35, 0xd4, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x09, 0xe9, 0x25, 0xe9, 0x15, 0x24, 0x16, 0x95, + 0x18, 0x2a, 0xb1, 0x70, 0x31, 0xf9, 0x1a, 0x3a, 0x79, 0x46, 0xb9, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x95, 0x27, + 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0x13, 0x53, 0x12, + 0x4b, 0x12, 0xf5, 0xd1, 0xad, 0xb0, 0x4e, 0x4a, 0x2d, 0x49, 0x4c, 0x62, 0x03, 0xab, 0x36, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0x4a, 0xf1, 0x3b, 0x7f, 0x82, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto new file mode 100644 index 00000000..2c35ec4a --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.b.part1; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1;beta"; +message M1 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go new file mode 100644 index 00000000..bc741056 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go @@ -0,0 +1,67 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_b_1/m2.proto + +package beta // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_m2_0c59cab35ba1b0d8, []int{0} +} +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (dst *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(dst, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "test.b.part2.M2") +} + +func init() { proto.RegisterFile("imports/test_b_1/m2.proto", fileDescriptor_m2_0c59cab35ba1b0d8) } + +var fileDescriptor_m2_0c59cab35ba1b0d8 = []byte{ + // 125 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8a, 0x37, 0xd4, 0xcf, 0x35, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x09, 0xe9, 0x25, 0xe9, 0x15, 0x24, 0x16, 0x95, + 0x18, 0x29, 0xb1, 0x70, 0x31, 0xf9, 0x1a, 0x39, 0x79, 0x46, 0xb9, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x95, 0x27, + 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0x13, 0x53, 0x12, + 0x4b, 0x12, 0xf5, 0xd1, 0xad, 0xb0, 0x4e, 0x4a, 0x2d, 0x49, 0x4c, 0x62, 0x03, 0xab, 0x36, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0x44, 0x29, 0xbe, 0x6d, 0x82, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto new file mode 100644 index 00000000..13723be4 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.b.part2; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1;beta"; +message M2 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go new file mode 100644 index 00000000..72daffdb --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go @@ -0,0 +1,80 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_a1m1.proto + +package imports // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type A1M1 struct { + F *test_a_1.M1 `protobuf:"bytes,1,opt,name=f" json:"f,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *A1M1) Reset() { *m = A1M1{} } +func (m *A1M1) String() string { return proto.CompactTextString(m) } +func (*A1M1) ProtoMessage() {} +func (*A1M1) Descriptor() ([]byte, []int) { + return fileDescriptor_test_import_a1m1_d7f2b5c638a69f6e, []int{0} +} +func (m *A1M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_A1M1.Unmarshal(m, b) +} +func (m *A1M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_A1M1.Marshal(b, m, deterministic) +} +func (dst *A1M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_A1M1.Merge(dst, src) +} +func (m *A1M1) XXX_Size() int { + return xxx_messageInfo_A1M1.Size(m) +} +func (m *A1M1) XXX_DiscardUnknown() { + xxx_messageInfo_A1M1.DiscardUnknown(m) +} + +var xxx_messageInfo_A1M1 proto.InternalMessageInfo + +func (m *A1M1) GetF() *test_a_1.M1 { + if m != nil { + return m.F + } + return nil +} + +func init() { + proto.RegisterType((*A1M1)(nil), "test.A1M1") +} + +func init() { + proto.RegisterFile("imports/test_import_a1m1.proto", fileDescriptor_test_import_a1m1_d7f2b5c638a69f6e) +} + +var fileDescriptor_test_import_a1m1_d7f2b5c638a69f6e = []byte{ + // 149 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcb, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x87, 0x70, 0xe2, 0x13, 0x0d, 0x73, 0x0d, + 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x58, 0x40, 0xe2, 0x52, 0x92, 0x28, 0xaa, 0x12, 0xe3, + 0x0d, 0xf5, 0x61, 0x0a, 0x94, 0x14, 0xb8, 0x58, 0x1c, 0x0d, 0x7d, 0x0d, 0x85, 0x24, 0xb8, 0x18, + 0xd3, 0x24, 0x18, 0x15, 0x18, 0x35, 0xb8, 0x8d, 0xb8, 0xf4, 0x40, 0xca, 0xf4, 0x12, 0xf5, 0x7c, + 0x0d, 0x83, 0x18, 0xd3, 0x9c, 0xac, 0xa3, 0x2c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, + 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xc1, 0x9a, 0x93, 0x4a, 0xd3, 0x20, + 0x8c, 0x64, 0xdd, 0xf4, 0xd4, 0x3c, 0xdd, 0xf4, 0x7c, 0xb0, 0xf9, 0x29, 0x89, 0x25, 0x89, 0xfa, + 0x50, 0x0b, 0x93, 0xd8, 0xc0, 0xf2, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x84, 0x2f, 0x18, + 0x23, 0xa8, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto new file mode 100644 index 00000000..abf07f2a --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto @@ -0,0 +1,42 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +import "imports/test_a_1/m1.proto"; + +message A1M1 { + test.a.M1 f = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go new file mode 100644 index 00000000..9e36ebde --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go @@ -0,0 +1,80 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_a1m2.proto + +package imports // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type A1M2 struct { + F *test_a_1.M2 `protobuf:"bytes,1,opt,name=f" json:"f,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *A1M2) Reset() { *m = A1M2{} } +func (m *A1M2) String() string { return proto.CompactTextString(m) } +func (*A1M2) ProtoMessage() {} +func (*A1M2) Descriptor() ([]byte, []int) { + return fileDescriptor_test_import_a1m2_9a3281ce9464e116, []int{0} +} +func (m *A1M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_A1M2.Unmarshal(m, b) +} +func (m *A1M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_A1M2.Marshal(b, m, deterministic) +} +func (dst *A1M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_A1M2.Merge(dst, src) +} +func (m *A1M2) XXX_Size() int { + return xxx_messageInfo_A1M2.Size(m) +} +func (m *A1M2) XXX_DiscardUnknown() { + xxx_messageInfo_A1M2.DiscardUnknown(m) +} + +var xxx_messageInfo_A1M2 proto.InternalMessageInfo + +func (m *A1M2) GetF() *test_a_1.M2 { + if m != nil { + return m.F + } + return nil +} + +func init() { + proto.RegisterType((*A1M2)(nil), "test.A1M2") +} + +func init() { + proto.RegisterFile("imports/test_import_a1m2.proto", fileDescriptor_test_import_a1m2_9a3281ce9464e116) +} + +var fileDescriptor_test_import_a1m2_9a3281ce9464e116 = []byte{ + // 149 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcb, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x87, 0x70, 0xe2, 0x13, 0x0d, 0x73, 0x8d, + 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x58, 0x40, 0xe2, 0x52, 0x92, 0x28, 0xaa, 0x12, 0xe3, + 0x0d, 0xf5, 0x61, 0x0a, 0x94, 0x14, 0xb8, 0x58, 0x1c, 0x0d, 0x7d, 0x8d, 0x84, 0x24, 0xb8, 0x18, + 0xd3, 0x24, 0x18, 0x15, 0x18, 0x35, 0xb8, 0x8d, 0xb8, 0xf4, 0x40, 0xca, 0xf4, 0x12, 0xf5, 0x7c, + 0x8d, 0x82, 0x18, 0xd3, 0x9c, 0xac, 0xa3, 0x2c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, + 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xc1, 0x9a, 0x93, 0x4a, 0xd3, 0x20, + 0x8c, 0x64, 0xdd, 0xf4, 0xd4, 0x3c, 0xdd, 0xf4, 0x7c, 0xb0, 0xf9, 0x29, 0x89, 0x25, 0x89, 0xfa, + 0x50, 0x0b, 0x93, 0xd8, 0xc0, 0xf2, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x1f, 0x88, 0xfb, + 0xea, 0xa8, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto new file mode 100644 index 00000000..5c53950d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto @@ -0,0 +1,42 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +import "imports/test_a_1/m2.proto"; + +message A1M2 { + test.a.M2 f = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go new file mode 100644 index 00000000..f40e0b73 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_all.proto + +package imports // import "github.com/golang/protobuf/protoc-gen-go/testdata/imports" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import fmt1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt" +import test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" +import test_a_2 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2" +import test_b_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type All struct { + Am1 *test_a_1.M1 `protobuf:"bytes,1,opt,name=am1" json:"am1,omitempty"` + Am2 *test_a_1.M2 `protobuf:"bytes,2,opt,name=am2" json:"am2,omitempty"` + Am3 *test_a_2.M3 `protobuf:"bytes,3,opt,name=am3" json:"am3,omitempty"` + Am4 *test_a_2.M4 `protobuf:"bytes,4,opt,name=am4" json:"am4,omitempty"` + Bm1 *test_b_1.M1 `protobuf:"bytes,5,opt,name=bm1" json:"bm1,omitempty"` + Bm2 *test_b_1.M2 `protobuf:"bytes,6,opt,name=bm2" json:"bm2,omitempty"` + Fmt *fmt1.M `protobuf:"bytes,7,opt,name=fmt" json:"fmt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *All) Reset() { *m = All{} } +func (m *All) String() string { return proto.CompactTextString(m) } +func (*All) ProtoMessage() {} +func (*All) Descriptor() ([]byte, []int) { + return fileDescriptor_test_import_all_b41dc4592e4a4f3b, []int{0} +} +func (m *All) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_All.Unmarshal(m, b) +} +func (m *All) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_All.Marshal(b, m, deterministic) +} +func (dst *All) XXX_Merge(src proto.Message) { + xxx_messageInfo_All.Merge(dst, src) +} +func (m *All) XXX_Size() int { + return xxx_messageInfo_All.Size(m) +} +func (m *All) XXX_DiscardUnknown() { + xxx_messageInfo_All.DiscardUnknown(m) +} + +var xxx_messageInfo_All proto.InternalMessageInfo + +func (m *All) GetAm1() *test_a_1.M1 { + if m != nil { + return m.Am1 + } + return nil +} + +func (m *All) GetAm2() *test_a_1.M2 { + if m != nil { + return m.Am2 + } + return nil +} + +func (m *All) GetAm3() *test_a_2.M3 { + if m != nil { + return m.Am3 + } + return nil +} + +func (m *All) GetAm4() *test_a_2.M4 { + if m != nil { + return m.Am4 + } + return nil +} + +func (m *All) GetBm1() *test_b_1.M1 { + if m != nil { + return m.Bm1 + } + return nil +} + +func (m *All) GetBm2() *test_b_1.M2 { + if m != nil { + return m.Bm2 + } + return nil +} + +func (m *All) GetFmt() *fmt1.M { + if m != nil { + return m.Fmt + } + return nil +} + +func init() { + proto.RegisterType((*All)(nil), "test.All") +} + +func init() { + proto.RegisterFile("imports/test_import_all.proto", fileDescriptor_test_import_all_b41dc4592e4a4f3b) +} + +var fileDescriptor_test_import_all_b41dc4592e4a4f3b = []byte{ + // 258 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0xd0, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x06, 0x60, 0x15, 0x97, 0x20, 0x99, 0x05, 0x85, 0xc5, 0x20, 0x90, 0x50, 0x27, 0x96, 0xda, + 0xb2, 0x9d, 0x05, 0x31, 0xc1, 0xde, 0xa5, 0x23, 0x4b, 0x64, 0x97, 0xc6, 0x54, 0xf2, 0xd5, 0x51, + 0x7a, 0x7d, 0x5e, 0x5e, 0x05, 0xd9, 0x07, 0x12, 0x84, 0x66, 0x4b, 0xfe, 0xef, 0xb7, 0xce, 0x3e, + 0x7e, 0xbf, 0x83, 0x3e, 0x0d, 0x78, 0x50, 0xb8, 0x3d, 0x60, 0x4b, 0x3f, 0xad, 0x8b, 0x51, 0xf6, + 0x43, 0xc2, 0x54, 0xcf, 0x73, 0x7c, 0x7b, 0xf3, 0xa7, 0xe4, 0x5a, 0xad, 0x40, 0x53, 0xe1, 0x14, + 0x99, 0x09, 0x32, 0x0a, 0xec, 0x34, 0x35, 0x27, 0xc9, 0x4f, 0xcf, 0xf2, 0xbf, 0x67, 0x5d, 0xff, + 0x50, 0x07, 0xa8, 0x80, 0xc2, 0xc5, 0xe7, 0x8c, 0xb3, 0x97, 0x18, 0xeb, 0x3b, 0xce, 0x1c, 0x68, + 0x31, 0x7b, 0x98, 0x3d, 0x5e, 0x1a, 0x2e, 0xf3, 0x69, 0xe9, 0xe4, 0x4a, 0xaf, 0x73, 0x4c, 0x6a, + 0xc4, 0xd9, 0x48, 0x4d, 0x56, 0x43, 0x6a, 0x05, 0x1b, 0xa9, 0xcd, 0x6a, 0x49, 0x1b, 0x31, 0x1f, + 0x69, 0x93, 0xb5, 0xa9, 0x17, 0x9c, 0x79, 0xd0, 0xe2, 0xbc, 0xe8, 0x15, 0xa9, 0x97, 0xbd, 0x1b, + 0x50, 0x97, 0xe9, 0x1e, 0x34, 0x75, 0x8c, 0xa8, 0xfe, 0x77, 0x4c, 0xb9, 0x83, 0x07, 0x53, 0x0b, + 0xce, 0x3a, 0x40, 0x71, 0x51, 0x3a, 0x95, 0xec, 0x00, 0xe5, 0x6a, 0x9d, 0xa3, 0xd7, 0xe7, 0xb7, + 0xa7, 0xb0, 0xc3, 0x8f, 0xa3, 0x97, 0x9b, 0x04, 0x2a, 0xa4, 0xe8, 0xf6, 0x41, 0x95, 0xc7, 0xfb, + 0x63, 0x47, 0x1f, 0x9b, 0x65, 0xd8, 0xee, 0x97, 0x21, 0x95, 0xa5, 0xbd, 0x3b, 0x74, 0xea, 0x7b, + 0x55, 0xbe, 0x2a, 0x6e, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x95, 0x39, 0xa3, 0x82, 0x03, 0x02, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto new file mode 100644 index 00000000..582d722e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto @@ -0,0 +1,58 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +// test_a_1/m*.proto are in the same Go package and proto package. +// test_a_*/*.proto are in different Go packages, but the same proto package. +// test_b_1/*.proto are in the same Go package, but different proto packages. +// fmt/m.proto has a package name which conflicts with "fmt". +import "imports/test_a_1/m1.proto"; +import "imports/test_a_1/m2.proto"; +import "imports/test_a_2/m3.proto"; +import "imports/test_a_2/m4.proto"; +import "imports/test_b_1/m1.proto"; +import "imports/test_b_1/m2.proto"; +import "imports/fmt/m.proto"; + +message All { + test.a.M1 am1 = 1; + test.a.M2 am2 = 2; + test.a.M3 am3 = 3; + test.a.M4 am4 = 4; + test.b.part1.M1 bm1 = 5; + test.b.part2.M2 bm2 = 6; + fmt.M fmt = 7; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go index f9b5ccf2..7ec1f2db 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go @@ -36,11 +36,13 @@ package testdata import ( "testing" - mytestpb "./my_test" + importspb "github.com/golang/protobuf/protoc-gen-go/testdata/imports" multipb "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + mytestpb "github.com/golang/protobuf/protoc-gen-go/testdata/my_test" ) func TestLink(t *testing.T) { _ = &multipb.Multi1{} _ = &mytestpb.Request{} + _ = &importspb.All{} } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go new file mode 100644 index 00000000..da0fdf8f --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go @@ -0,0 +1,96 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi1.proto + +package multitest // import "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Multi1 struct { + Multi2 *Multi2 `protobuf:"bytes,1,req,name=multi2" json:"multi2,omitempty"` + Color *Multi2_Color `protobuf:"varint,2,opt,name=color,enum=multitest.Multi2_Color" json:"color,omitempty"` + HatType *Multi3_HatType `protobuf:"varint,3,opt,name=hat_type,json=hatType,enum=multitest.Multi3_HatType" json:"hat_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi1) Reset() { *m = Multi1{} } +func (m *Multi1) String() string { return proto.CompactTextString(m) } +func (*Multi1) ProtoMessage() {} +func (*Multi1) Descriptor() ([]byte, []int) { + return fileDescriptor_multi1_08e50c6822e808b8, []int{0} +} +func (m *Multi1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi1.Unmarshal(m, b) +} +func (m *Multi1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi1.Marshal(b, m, deterministic) +} +func (dst *Multi1) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi1.Merge(dst, src) +} +func (m *Multi1) XXX_Size() int { + return xxx_messageInfo_Multi1.Size(m) +} +func (m *Multi1) XXX_DiscardUnknown() { + xxx_messageInfo_Multi1.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi1 proto.InternalMessageInfo + +func (m *Multi1) GetMulti2() *Multi2 { + if m != nil { + return m.Multi2 + } + return nil +} + +func (m *Multi1) GetColor() Multi2_Color { + if m != nil && m.Color != nil { + return *m.Color + } + return Multi2_BLUE +} + +func (m *Multi1) GetHatType() Multi3_HatType { + if m != nil && m.HatType != nil { + return *m.HatType + } + return Multi3_FEDORA +} + +func init() { + proto.RegisterType((*Multi1)(nil), "multitest.Multi1") +} + +func init() { proto.RegisterFile("multi/multi1.proto", fileDescriptor_multi1_08e50c6822e808b8) } + +var fileDescriptor_multi1_08e50c6822e808b8 = []byte{ + // 200 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0x86, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x14, 0xb2, 0xb4, 0x11, 0x44, 0x1a, 0x45, 0xcc, 0x18, 0x22, 0xa6, 0x34, 0x83, + 0x91, 0x8b, 0xcd, 0x17, 0x6c, 0x86, 0x90, 0x26, 0x17, 0x1b, 0x44, 0xb9, 0x04, 0xa3, 0x02, 0x93, + 0x06, 0xb7, 0x91, 0xa0, 0x1e, 0xdc, 0x38, 0x3d, 0xb0, 0x12, 0xa3, 0x20, 0xa8, 0x02, 0x21, 0x5d, + 0x2e, 0xd6, 0xe4, 0xfc, 0x9c, 0xfc, 0x22, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x3e, 0x23, 0x71, 0x0c, + 0x95, 0x7a, 0xce, 0x20, 0xe9, 0x20, 0x88, 0x2a, 0x21, 0x13, 0x2e, 0x8e, 0x8c, 0xc4, 0x92, 0xf8, + 0x92, 0xca, 0x82, 0x54, 0x09, 0x66, 0xb0, 0x0e, 0x49, 0x74, 0x1d, 0xc6, 0x7a, 0x1e, 0x89, 0x25, + 0x21, 0x95, 0x05, 0xa9, 0x41, 0xec, 0x19, 0x10, 0x86, 0x93, 0x73, 0x94, 0x63, 0x7a, 0x66, 0x49, + 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x3e, 0xd8, + 0xd5, 0x49, 0xa5, 0x69, 0x10, 0x46, 0xb2, 0x6e, 0x7a, 0x6a, 0x9e, 0x6e, 0x7a, 0xbe, 0x3e, 0xc8, + 0xa0, 0x94, 0xc4, 0x92, 0x44, 0x88, 0xe7, 0xac, 0xe1, 0x86, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x60, 0x7d, 0xfc, 0x9f, 0x27, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto index 0da6e0af..d3a32041 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto @@ -36,6 +36,8 @@ import "multi/multi3.proto"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi1 { required Multi2 multi2 = 1; optional Multi2.Color color = 2; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go new file mode 100644 index 00000000..b66ce793 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi2.proto + +package multitest // import "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Multi2_Color int32 + +const ( + Multi2_BLUE Multi2_Color = 1 + Multi2_GREEN Multi2_Color = 2 + Multi2_RED Multi2_Color = 3 +) + +var Multi2_Color_name = map[int32]string{ + 1: "BLUE", + 2: "GREEN", + 3: "RED", +} +var Multi2_Color_value = map[string]int32{ + "BLUE": 1, + "GREEN": 2, + "RED": 3, +} + +func (x Multi2_Color) Enum() *Multi2_Color { + p := new(Multi2_Color) + *p = x + return p +} +func (x Multi2_Color) String() string { + return proto.EnumName(Multi2_Color_name, int32(x)) +} +func (x *Multi2_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Multi2_Color_value, data, "Multi2_Color") + if err != nil { + return err + } + *x = Multi2_Color(value) + return nil +} +func (Multi2_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_multi2_c47490ad66d93e67, []int{0, 0} +} + +type Multi2 struct { + RequiredValue *int32 `protobuf:"varint,1,req,name=required_value,json=requiredValue" json:"required_value,omitempty"` + Color *Multi2_Color `protobuf:"varint,2,opt,name=color,enum=multitest.Multi2_Color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi2) Reset() { *m = Multi2{} } +func (m *Multi2) String() string { return proto.CompactTextString(m) } +func (*Multi2) ProtoMessage() {} +func (*Multi2) Descriptor() ([]byte, []int) { + return fileDescriptor_multi2_c47490ad66d93e67, []int{0} +} +func (m *Multi2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi2.Unmarshal(m, b) +} +func (m *Multi2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi2.Marshal(b, m, deterministic) +} +func (dst *Multi2) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi2.Merge(dst, src) +} +func (m *Multi2) XXX_Size() int { + return xxx_messageInfo_Multi2.Size(m) +} +func (m *Multi2) XXX_DiscardUnknown() { + xxx_messageInfo_Multi2.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi2 proto.InternalMessageInfo + +func (m *Multi2) GetRequiredValue() int32 { + if m != nil && m.RequiredValue != nil { + return *m.RequiredValue + } + return 0 +} + +func (m *Multi2) GetColor() Multi2_Color { + if m != nil && m.Color != nil { + return *m.Color + } + return Multi2_BLUE +} + +func init() { + proto.RegisterType((*Multi2)(nil), "multitest.Multi2") + proto.RegisterEnum("multitest.Multi2_Color", Multi2_Color_name, Multi2_Color_value) +} + +func init() { proto.RegisterFile("multi/multi2.proto", fileDescriptor_multi2_c47490ad66d93e67) } + +var fileDescriptor_multi2_c47490ad66d93e67 = []byte{ + // 202 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0x46, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x52, 0x2b, 0x23, 0x17, 0x9b, 0x2f, 0x58, 0x4e, 0x48, 0x95, 0x8b, 0xaf, 0x28, + 0xb5, 0xb0, 0x34, 0xb3, 0x28, 0x35, 0x25, 0xbe, 0x2c, 0x31, 0xa7, 0x34, 0x55, 0x82, 0x51, 0x81, + 0x49, 0x83, 0x35, 0x88, 0x17, 0x26, 0x1a, 0x06, 0x12, 0x14, 0xd2, 0xe5, 0x62, 0x4d, 0xce, 0xcf, + 0xc9, 0x2f, 0x92, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x33, 0x12, 0xd7, 0x83, 0x1b, 0xa6, 0x07, 0x31, + 0x48, 0xcf, 0x19, 0x24, 0x1d, 0x04, 0x51, 0xa5, 0xa4, 0xca, 0xc5, 0x0a, 0xe6, 0x0b, 0x71, 0x70, + 0xb1, 0x38, 0xf9, 0x84, 0xba, 0x0a, 0x30, 0x0a, 0x71, 0x72, 0xb1, 0xba, 0x07, 0xb9, 0xba, 0xfa, + 0x09, 0x30, 0x09, 0xb1, 0x73, 0x31, 0x07, 0xb9, 0xba, 0x08, 0x30, 0x3b, 0x39, 0x47, 0x39, 0xa6, + 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, + 0xeb, 0x83, 0x5d, 0x9b, 0x54, 0x9a, 0x06, 0x61, 0x24, 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, + 0xeb, 0x83, 0xec, 0x4a, 0x49, 0x2c, 0x49, 0x84, 0x78, 0xca, 0x1a, 0x6e, 0x3f, 0x20, 0x00, 0x00, + 0xff, 0xff, 0x49, 0x3b, 0x52, 0x44, 0xec, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto index e6bfc71b..ec5b431e 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi2 { required int32 required_value = 1; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go new file mode 100644 index 00000000..f03c350a --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go @@ -0,0 +1,115 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi3.proto + +package multitest // import "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Multi3_HatType int32 + +const ( + Multi3_FEDORA Multi3_HatType = 1 + Multi3_FEZ Multi3_HatType = 2 +) + +var Multi3_HatType_name = map[int32]string{ + 1: "FEDORA", + 2: "FEZ", +} +var Multi3_HatType_value = map[string]int32{ + "FEDORA": 1, + "FEZ": 2, +} + +func (x Multi3_HatType) Enum() *Multi3_HatType { + p := new(Multi3_HatType) + *p = x + return p +} +func (x Multi3_HatType) String() string { + return proto.EnumName(Multi3_HatType_name, int32(x)) +} +func (x *Multi3_HatType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Multi3_HatType_value, data, "Multi3_HatType") + if err != nil { + return err + } + *x = Multi3_HatType(value) + return nil +} +func (Multi3_HatType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_multi3_d55a72b4628b7875, []int{0, 0} +} + +type Multi3 struct { + HatType *Multi3_HatType `protobuf:"varint,1,opt,name=hat_type,json=hatType,enum=multitest.Multi3_HatType" json:"hat_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi3) Reset() { *m = Multi3{} } +func (m *Multi3) String() string { return proto.CompactTextString(m) } +func (*Multi3) ProtoMessage() {} +func (*Multi3) Descriptor() ([]byte, []int) { + return fileDescriptor_multi3_d55a72b4628b7875, []int{0} +} +func (m *Multi3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi3.Unmarshal(m, b) +} +func (m *Multi3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi3.Marshal(b, m, deterministic) +} +func (dst *Multi3) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi3.Merge(dst, src) +} +func (m *Multi3) XXX_Size() int { + return xxx_messageInfo_Multi3.Size(m) +} +func (m *Multi3) XXX_DiscardUnknown() { + xxx_messageInfo_Multi3.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi3 proto.InternalMessageInfo + +func (m *Multi3) GetHatType() Multi3_HatType { + if m != nil && m.HatType != nil { + return *m.HatType + } + return Multi3_FEDORA +} + +func init() { + proto.RegisterType((*Multi3)(nil), "multitest.Multi3") + proto.RegisterEnum("multitest.Multi3_HatType", Multi3_HatType_name, Multi3_HatType_value) +} + +func init() { proto.RegisterFile("multi/multi3.proto", fileDescriptor_multi3_d55a72b4628b7875) } + +var fileDescriptor_multi3_d55a72b4628b7875 = []byte{ + // 170 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0xc6, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x52, 0x1c, 0x17, 0x9b, 0x2f, 0x58, 0x4a, 0xc8, 0x84, 0x8b, 0x23, 0x23, 0xb1, + 0x24, 0xbe, 0xa4, 0xb2, 0x20, 0x55, 0x82, 0x51, 0x81, 0x51, 0x83, 0xcf, 0x48, 0x52, 0x0f, 0xae, + 0x4e, 0x0f, 0xa2, 0x48, 0xcf, 0x23, 0xb1, 0x24, 0xa4, 0xb2, 0x20, 0x35, 0x88, 0x3d, 0x03, 0xc2, + 0x50, 0x92, 0xe3, 0x62, 0x87, 0x8a, 0x09, 0x71, 0x71, 0xb1, 0xb9, 0xb9, 0xba, 0xf8, 0x07, 0x39, + 0x0a, 0x30, 0x0a, 0xb1, 0x73, 0x31, 0xbb, 0xb9, 0x46, 0x09, 0x30, 0x39, 0x39, 0x47, 0x39, 0xa6, + 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, + 0xeb, 0x83, 0x5d, 0x91, 0x54, 0x9a, 0x06, 0x61, 0x24, 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, + 0xeb, 0x83, 0x2c, 0x4a, 0x49, 0x2c, 0x49, 0x84, 0x38, 0xd6, 0x1a, 0x6e, 0x39, 0x20, 0x00, 0x00, + 0xff, 0xff, 0xd5, 0xa4, 0x1a, 0x0e, 0xc4, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto index 146c255b..8690b881 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi3 { enum HatType { FEDORA = 1; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go index 1954e3fb..8cf6a698 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go @@ -1,24 +1,11 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: my_test/test.proto +package test // import "github.com/golang/protobuf/protoc-gen-go/testdata/my_test" + /* -Package my_test is a generated protocol buffer package. - This package holds interesting messages. - -It is generated from these files: - my_test/test.proto - -It has these top-level messages: - Request - Reply - OtherBase - ReplyExtensions - OtherReplyExtensions - OldReply - Communique */ -package my_test import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -69,6 +56,9 @@ func (x *HatType) UnmarshalJSON(data []byte) error { *x = HatType(value) return nil } +func (HatType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{0} +} // This enum represents days of the week. type Days int32 @@ -106,6 +96,9 @@ func (x *Days) UnmarshalJSON(data []byte) error { *x = Days(value) return nil } +func (Days) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{1} +} type Request_Color int32 @@ -142,6 +135,9 @@ func (x *Request_Color) UnmarshalJSON(data []byte) error { *x = Request_Color(value) return nil } +func (Request_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{0, 0} +} type Reply_Entry_Game int32 @@ -175,6 +171,9 @@ func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { *x = Reply_Entry_Game(value) return nil } +func (Reply_Entry_Game) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{1, 0, 0} +} // This is a message that might be sent somewhere. type Request struct { @@ -191,13 +190,35 @@ type Request struct { MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` // This field should not conflict with any getters. - GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` - XXX_unrecognized []byte `json:"-"` + GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{0} +} +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (dst *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(dst, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo const Default_Request_Hat HatType = HatType_FEDORA @@ -267,13 +288,35 @@ func (m *Request) GetGetKey_() string { } type Request_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } func (*Request_SomeGroup) ProtoMessage() {} +func (*Request_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{0, 0} +} +func (m *Request_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request_SomeGroup.Unmarshal(m, b) +} +func (m *Request_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request_SomeGroup.Marshal(b, m, deterministic) +} +func (dst *Request_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request_SomeGroup.Merge(dst, src) +} +func (m *Request_SomeGroup) XXX_Size() int { + return xxx_messageInfo_Request_SomeGroup.Size(m) +} +func (m *Request_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_Request_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_Request_SomeGroup proto.InternalMessageInfo func (m *Request_SomeGroup) GetGroupField() int32 { if m != nil && m.GroupField != nil { @@ -285,21 +328,43 @@ func (m *Request_SomeGroup) GetGroupField() int32 { type Reply struct { Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Reply) Reset() { *m = Reply{} } func (m *Reply) String() string { return proto.CompactTextString(m) } func (*Reply) ProtoMessage() {} +func (*Reply) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{1} +} var extRange_Reply = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Reply } +func (m *Reply) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reply.Unmarshal(m, b) +} +func (m *Reply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reply.Marshal(b, m, deterministic) +} +func (dst *Reply) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reply.Merge(dst, src) +} +func (m *Reply) XXX_Size() int { + return xxx_messageInfo_Reply.Size(m) +} +func (m *Reply) XXX_DiscardUnknown() { + xxx_messageInfo_Reply.DiscardUnknown(m) +} + +var xxx_messageInfo_Reply proto.InternalMessageInfo func (m *Reply) GetFound() []*Reply_Entry { if m != nil { @@ -316,15 +381,37 @@ func (m *Reply) GetCompactKeys() []int32 { } type Reply_Entry struct { - KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` - Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` - XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` - XXX_unrecognized []byte `json:"-"` + KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` + Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` + XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } func (*Reply_Entry) ProtoMessage() {} +func (*Reply_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{1, 0} +} +func (m *Reply_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reply_Entry.Unmarshal(m, b) +} +func (m *Reply_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reply_Entry.Marshal(b, m, deterministic) +} +func (dst *Reply_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reply_Entry.Merge(dst, src) +} +func (m *Reply_Entry) XXX_Size() int { + return xxx_messageInfo_Reply_Entry.Size(m) +} +func (m *Reply_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Reply_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Reply_Entry proto.InternalMessageInfo const Default_Reply_Entry_Value int64 = 7 @@ -350,22 +437,44 @@ func (m *Reply_Entry) GetXMyFieldName_2() int64 { } type OtherBase struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OtherBase) Reset() { *m = OtherBase{} } func (m *OtherBase) String() string { return proto.CompactTextString(m) } func (*OtherBase) ProtoMessage() {} +func (*OtherBase) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{2} +} var extRange_OtherBase = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OtherBase } +func (m *OtherBase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherBase.Unmarshal(m, b) +} +func (m *OtherBase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherBase.Marshal(b, m, deterministic) +} +func (dst *OtherBase) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherBase.Merge(dst, src) +} +func (m *OtherBase) XXX_Size() int { + return xxx_messageInfo_OtherBase.Size(m) +} +func (m *OtherBase) XXX_DiscardUnknown() { + xxx_messageInfo_OtherBase.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherBase proto.InternalMessageInfo func (m *OtherBase) GetName() string { if m != nil && m.Name != nil { @@ -375,12 +484,34 @@ func (m *OtherBase) GetName() string { } type ReplyExtensions struct { - XXX_unrecognized []byte `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } func (*ReplyExtensions) ProtoMessage() {} +func (*ReplyExtensions) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{3} +} +func (m *ReplyExtensions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReplyExtensions.Unmarshal(m, b) +} +func (m *ReplyExtensions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReplyExtensions.Marshal(b, m, deterministic) +} +func (dst *ReplyExtensions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReplyExtensions.Merge(dst, src) +} +func (m *ReplyExtensions) XXX_Size() int { + return xxx_messageInfo_ReplyExtensions.Size(m) +} +func (m *ReplyExtensions) XXX_DiscardUnknown() { + xxx_messageInfo_ReplyExtensions.DiscardUnknown(m) +} + +var xxx_messageInfo_ReplyExtensions proto.InternalMessageInfo var E_ReplyExtensions_Time = &proto.ExtensionDesc{ ExtendedType: (*Reply)(nil), @@ -410,13 +541,35 @@ var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ } type OtherReplyExtensions struct { - Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - XXX_unrecognized []byte `json:"-"` + Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } func (*OtherReplyExtensions) ProtoMessage() {} +func (*OtherReplyExtensions) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{4} +} +func (m *OtherReplyExtensions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherReplyExtensions.Unmarshal(m, b) +} +func (m *OtherReplyExtensions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherReplyExtensions.Marshal(b, m, deterministic) +} +func (dst *OtherReplyExtensions) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherReplyExtensions.Merge(dst, src) +} +func (m *OtherReplyExtensions) XXX_Size() int { + return xxx_messageInfo_OtherReplyExtensions.Size(m) +} +func (m *OtherReplyExtensions) XXX_DiscardUnknown() { + xxx_messageInfo_OtherReplyExtensions.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherReplyExtensions proto.InternalMessageInfo func (m *OtherReplyExtensions) GetKey() int32 { if m != nil && m.Key != nil { @@ -426,20 +579,19 @@ func (m *OtherReplyExtensions) GetKey() int32 { } type OldReply struct { - proto.XXX_InternalExtensions `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OldReply) Reset() { *m = OldReply{} } func (m *OldReply) String() string { return proto.CompactTextString(m) } func (*OldReply) ProtoMessage() {} +func (*OldReply) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{5} +} -func (m *OldReply) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *OldReply) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} func (m *OldReply) MarshalJSON() ([]byte, error) { return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) } @@ -447,17 +599,30 @@ func (m *OldReply) UnmarshalJSON(buf []byte) error { return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) } -// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*OldReply)(nil) -var _ proto.Unmarshaler = (*OldReply)(nil) - var extRange_OldReply = []proto.ExtensionRange{ - {100, 2147483646}, + {Start: 100, End: 2147483646}, } func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OldReply } +func (m *OldReply) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldReply.Unmarshal(m, b) +} +func (m *OldReply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldReply.Marshal(b, m, deterministic) +} +func (dst *OldReply) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldReply.Merge(dst, src) +} +func (m *OldReply) XXX_Size() int { + return xxx_messageInfo_OldReply.Size(m) +} +func (m *OldReply) XXX_DiscardUnknown() { + xxx_messageInfo_OldReply.DiscardUnknown(m) +} + +var xxx_messageInfo_OldReply proto.InternalMessageInfo type Communique struct { MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` @@ -474,13 +639,35 @@ type Communique struct { // *Communique_Delta_ // *Communique_Msg // *Communique_Somegroup - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique) Reset() { *m = Communique{} } func (m *Communique) String() string { return proto.CompactTextString(m) } func (*Communique) ProtoMessage() {} +func (*Communique) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{6} +} +func (m *Communique) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique.Unmarshal(m, b) +} +func (m *Communique) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique.Marshal(b, m, deterministic) +} +func (dst *Communique) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique.Merge(dst, src) +} +func (m *Communique) XXX_Size() int { + return xxx_messageInfo_Communique.Size(m) +} +func (m *Communique) XXX_DiscardUnknown() { + xxx_messageInfo_Communique.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique proto.InternalMessageInfo type isCommunique_Union interface { isCommunique_Union() @@ -511,7 +698,7 @@ type Communique_Delta_ struct { Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` } type Communique_Msg struct { - Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` + Msg *Reply `protobuf:"bytes,16,opt,name=msg,oneof"` } type Communique_Somegroup struct { Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` @@ -661,7 +848,7 @@ func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { b.EncodeVarint(12<<3 | proto.WireVarint) b.EncodeZigzag32(uint64(x.Delta)) case *Communique_Msg: - b.EncodeVarint(13<<3 | proto.WireBytes) + b.EncodeVarint(16<<3 | proto.WireBytes) if err := b.EncodeMessage(x.Msg); err != nil { return err } @@ -737,7 +924,7 @@ func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buf x, err := b.DecodeZigzag32() m.Union = &Communique_Delta_{int32(x)} return true, err - case 13: // union.msg + case 16: // union.msg if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } @@ -763,40 +950,40 @@ func _Communique_OneofSizer(msg proto.Message) (n int) { // union switch x := m.Union.(type) { case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) + n += 1 // tag and wire n += proto.SizeVarint(uint64(x.Number)) case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Name))) n += len(x.Name) case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Data))) n += len(x.Data) case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) + n += 1 // tag and wire n += 8 case *Communique_Height: - n += proto.SizeVarint(9<<3 | proto.WireFixed32) + n += 1 // tag and wire n += 4 case *Communique_Today: - n += proto.SizeVarint(10<<3 | proto.WireVarint) + n += 1 // tag and wire n += proto.SizeVarint(uint64(x.Today)) case *Communique_Maybe: - n += proto.SizeVarint(11<<3 | proto.WireVarint) + n += 1 // tag and wire n += 1 case *Communique_Delta_: - n += proto.SizeVarint(12<<3 | proto.WireVarint) + n += 1 // tag and wire n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) case *Communique_Msg: s := proto.Size(x.Msg) - n += proto.SizeVarint(13<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Communique_Somegroup: - n += proto.SizeVarint(14<<3 | proto.WireStartGroup) + n += 1 // tag and wire n += proto.Size(x.Somegroup) - n += proto.SizeVarint(14<<3 | proto.WireEndGroup) + n += 1 // tag and wire case nil: default: panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) @@ -805,13 +992,35 @@ func _Communique_OneofSizer(msg proto.Message) (n int) { } type Communique_SomeGroup struct { - Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` - XXX_unrecognized []byte `json:"-"` + Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } func (*Communique_SomeGroup) ProtoMessage() {} +func (*Communique_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{6, 0} +} +func (m *Communique_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique_SomeGroup.Unmarshal(m, b) +} +func (m *Communique_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique_SomeGroup.Marshal(b, m, deterministic) +} +func (dst *Communique_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique_SomeGroup.Merge(dst, src) +} +func (m *Communique_SomeGroup) XXX_Size() int { + return xxx_messageInfo_Communique_SomeGroup.Size(m) +} +func (m *Communique_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_Communique_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique_SomeGroup proto.InternalMessageInfo func (m *Communique_SomeGroup) GetMember() string { if m != nil && m.Member != nil { @@ -821,12 +1030,34 @@ func (m *Communique_SomeGroup) GetMember() string { } type Communique_Delta struct { - XXX_unrecognized []byte `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } func (*Communique_Delta) ProtoMessage() {} +func (*Communique_Delta) Descriptor() ([]byte, []int) { + return fileDescriptor_test_2309d445eee26af7, []int{6, 1} +} +func (m *Communique_Delta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique_Delta.Unmarshal(m, b) +} +func (m *Communique_Delta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique_Delta.Marshal(b, m, deterministic) +} +func (dst *Communique_Delta) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique_Delta.Merge(dst, src) +} +func (m *Communique_Delta) XXX_Size() int { + return xxx_messageInfo_Communique_Delta.Size(m) +} +func (m *Communique_Delta) XXX_DiscardUnknown() { + xxx_messageInfo_Communique_Delta.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique_Delta proto.InternalMessageInfo var E_Tag = &proto.ExtensionDesc{ ExtendedType: (*Reply)(nil), @@ -848,6 +1079,8 @@ var E_Donut = &proto.ExtensionDesc{ func init() { proto.RegisterType((*Request)(nil), "my.test.Request") + proto.RegisterMapType((map[int64]*Reply)(nil), "my.test.Request.MsgMappingEntry") + proto.RegisterMapType((map[int32]string)(nil), "my.test.Request.NameMappingEntry") proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") proto.RegisterType((*Reply)(nil), "my.test.Reply") proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") @@ -868,3 +1101,74 @@ func init() { proto.RegisterExtension(E_Tag) proto.RegisterExtension(E_Donut) } + +func init() { proto.RegisterFile("my_test/test.proto", fileDescriptor_test_2309d445eee26af7) } + +var fileDescriptor_test_2309d445eee26af7 = []byte{ + // 1033 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x55, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0xce, 0xd8, 0x71, 0x7e, 0x4e, 0x42, 0x6b, 0x46, 0x55, 0x6b, 0x05, 0xed, 0xd6, 0x04, 0x8a, + 0x4c, 0xc5, 0xa6, 0xda, 0x80, 0xc4, 0x2a, 0x88, 0xd5, 0x36, 0x3f, 0x6d, 0xaa, 0x6d, 0x12, 0x69, + 0xda, 0x5e, 0xb0, 0x37, 0xd6, 0x34, 0x9e, 0x3a, 0xa6, 0x19, 0x3b, 0x6b, 0x8f, 0x11, 0xbe, 0xeb, + 0x53, 0xc0, 0x6b, 0x70, 0xcf, 0x0b, 0xf1, 0x16, 0x45, 0x33, 0x0e, 0x49, 0xda, 0xa0, 0xbd, 0xb1, + 0x7c, 0xce, 0xf9, 0xce, 0xe7, 0x39, 0x3f, 0xfe, 0x06, 0x30, 0xcf, 0x5c, 0xc1, 0x12, 0x71, 0x22, + 0x1f, 0xad, 0x45, 0x1c, 0x89, 0x08, 0x97, 0x79, 0xd6, 0x92, 0x66, 0x03, 0xf3, 0x74, 0x2e, 0x82, + 0x13, 0xf5, 0x7c, 0x9d, 0x07, 0x9b, 0xff, 0x14, 0xa1, 0x4c, 0xd8, 0xc7, 0x94, 0x25, 0x02, 0x9b, + 0xa0, 0xdf, 0xb3, 0xcc, 0x42, 0xb6, 0xee, 0xe8, 0x44, 0xbe, 0x62, 0x07, 0xf4, 0x59, 0xca, 0x2c, + 0xdd, 0x46, 0xce, 0x4e, 0x7b, 0xbf, 0xb5, 0x24, 0x6a, 0x2d, 0x13, 0x5a, 0xbd, 0x68, 0x1e, 0xc5, + 0x44, 0x42, 0xf0, 0x31, 0xe8, 0x33, 0x2a, 0xac, 0xa2, 0x42, 0x9a, 0x2b, 0xe4, 0x90, 0x8a, 0xeb, + 0x6c, 0xc1, 0x3a, 0xa5, 0xb3, 0x41, 0x7f, 0x42, 0x4e, 0x89, 0x04, 0xe1, 0x43, 0xa8, 0x78, 0x8c, + 0x7a, 0xf3, 0x20, 0x64, 0x56, 0xd9, 0x46, 0x8e, 0xd6, 0xd1, 0x83, 0xf0, 0x8e, 0xac, 0x9c, 0xf8, + 0x0d, 0x54, 0x93, 0x88, 0x33, 0x3f, 0x8e, 0xd2, 0x85, 0x55, 0xb1, 0x91, 0x03, 0xed, 0xc6, 0xd6, + 0xc7, 0xaf, 0x22, 0xce, 0xce, 0x25, 0x82, 0xac, 0xc1, 0xb8, 0x0f, 0xf5, 0x90, 0x72, 0xe6, 0x72, + 0xba, 0x58, 0x04, 0xa1, 0x6f, 0xed, 0xd8, 0xba, 0x53, 0x6b, 0x7f, 0xb9, 0x95, 0x3c, 0xa6, 0x9c, + 0x8d, 0x72, 0xcc, 0x20, 0x14, 0x71, 0x46, 0x6a, 0xe1, 0xda, 0x83, 0x4f, 0xa1, 0xc6, 0x13, 0x7f, + 0x45, 0xb2, 0xab, 0x48, 0xec, 0x2d, 0x92, 0x51, 0xe2, 0x3f, 0xe1, 0x00, 0xbe, 0x72, 0xe0, 0x3d, + 0x30, 0x62, 0x96, 0x30, 0x61, 0xd5, 0x6d, 0xe4, 0x18, 0x24, 0x37, 0xf0, 0x01, 0x94, 0x7d, 0x26, + 0x5c, 0xd9, 0x65, 0xd3, 0x46, 0x4e, 0x95, 0x94, 0x7c, 0x26, 0xde, 0xb3, 0xac, 0xf1, 0x1d, 0x54, + 0x57, 0xf5, 0xe0, 0x43, 0xa8, 0xa9, 0x6a, 0xdc, 0xbb, 0x80, 0xcd, 0x3d, 0xab, 0xaa, 0x18, 0x40, + 0xb9, 0xce, 0xa4, 0xa7, 0xf1, 0x16, 0xcc, 0xe7, 0x05, 0xac, 0x87, 0x27, 0xc1, 0x6a, 0x78, 0x7b, + 0x60, 0xfc, 0x46, 0xe7, 0x29, 0xb3, 0x34, 0xf5, 0xa9, 0xdc, 0xe8, 0x68, 0x6f, 0x50, 0x63, 0x04, + 0xbb, 0xcf, 0xce, 0xbe, 0x99, 0x8e, 0xf3, 0xf4, 0xaf, 0x37, 0xd3, 0x6b, 0xed, 0x9d, 0x8d, 0xf2, + 0x17, 0xf3, 0x6c, 0x83, 0xae, 0x79, 0x04, 0x86, 0xda, 0x04, 0x5c, 0x06, 0x9d, 0x0c, 0xfa, 0x66, + 0x01, 0x57, 0xc1, 0x38, 0x27, 0x83, 0xc1, 0xd8, 0x44, 0xb8, 0x02, 0xc5, 0xee, 0xe5, 0xcd, 0xc0, + 0xd4, 0x9a, 0x7f, 0x6a, 0x60, 0xa8, 0x5c, 0x7c, 0x0c, 0xc6, 0x5d, 0x94, 0x86, 0x9e, 0x5a, 0xb5, + 0x5a, 0x7b, 0xef, 0x29, 0x75, 0x2b, 0xef, 0x66, 0x0e, 0xc1, 0x47, 0x50, 0x9f, 0x46, 0x7c, 0x41, + 0xa7, 0xaa, 0x6d, 0x89, 0xa5, 0xd9, 0xba, 0x63, 0x74, 0x35, 0x13, 0x91, 0xda, 0xd2, 0xff, 0x9e, + 0x65, 0x49, 0xe3, 0x2f, 0x04, 0x46, 0x5e, 0x49, 0x1f, 0x0e, 0xef, 0x59, 0xe6, 0x8a, 0x19, 0x15, + 0x6e, 0xc8, 0x98, 0x97, 0xb8, 0xaf, 0xdb, 0xdf, 0xff, 0x30, 0xa5, 0x9c, 0xcd, 0xdd, 0x1e, 0x4d, + 0x2e, 0x42, 0xdf, 0x42, 0xb6, 0xe6, 0xe8, 0xe4, 0x8b, 0x7b, 0x96, 0x5d, 0xcf, 0xa8, 0x18, 0x4b, + 0xd0, 0x0a, 0x93, 0x43, 0xf0, 0xc1, 0x66, 0xf5, 0x7a, 0x07, 0xfd, 0xb8, 0x2c, 0x18, 0x7f, 0x03, + 0xa6, 0xcb, 0xb3, 0x7c, 0x34, 0xae, 0xda, 0xb5, 0xb6, 0xfa, 0x3f, 0x74, 0x52, 0x1f, 0x65, 0x6a, + 0x3c, 0x72, 0x34, 0xed, 0xa6, 0x0d, 0xc5, 0x73, 0xca, 0x19, 0xae, 0x43, 0xe5, 0x6c, 0x32, 0xb9, + 0xee, 0x9e, 0x5e, 0x5e, 0x9a, 0x08, 0x03, 0x94, 0xae, 0x07, 0xe3, 0xf1, 0xc5, 0x95, 0xa9, 0x1d, + 0x57, 0x2a, 0x9e, 0xf9, 0xf0, 0xf0, 0xf0, 0xa0, 0x35, 0xbf, 0x85, 0xea, 0x44, 0xcc, 0x58, 0xdc, + 0xa5, 0x09, 0xc3, 0x18, 0x8a, 0x92, 0x56, 0x8d, 0xa2, 0x4a, 0xd4, 0xfb, 0x06, 0xf4, 0x6f, 0x04, + 0xbb, 0xaa, 0x4b, 0x83, 0xdf, 0x05, 0x0b, 0x93, 0x20, 0x0a, 0x93, 0x76, 0x13, 0x8a, 0x22, 0xe0, + 0x0c, 0x3f, 0x1b, 0x91, 0xc5, 0x6c, 0xe4, 0x20, 0xa2, 0x62, 0xed, 0x77, 0x50, 0x9a, 0xd2, 0x38, + 0x8e, 0xc4, 0x16, 0x2a, 0x50, 0xe3, 0xb5, 0x9e, 0x7a, 0xd7, 0xec, 0x64, 0x99, 0xd7, 0xee, 0x82, + 0xe1, 0x45, 0x61, 0x2a, 0x30, 0x5e, 0x41, 0x57, 0x87, 0x56, 0x9f, 0xfa, 0x14, 0x49, 0x9e, 0xda, + 0x74, 0x60, 0x4f, 0xe5, 0x3c, 0x0b, 0x6f, 0x2f, 0x6f, 0xd3, 0x82, 0xca, 0x64, 0xee, 0x29, 0x9c, + 0xaa, 0xfe, 0xf1, 0xf1, 0xf1, 0xb1, 0xdc, 0xd1, 0x2a, 0xa8, 0xf9, 0x87, 0x0e, 0xd0, 0x8b, 0x38, + 0x4f, 0xc3, 0xe0, 0x63, 0xca, 0xf0, 0x4b, 0xa8, 0x71, 0x7a, 0xcf, 0x5c, 0xce, 0xdc, 0x69, 0x9c, + 0x53, 0x54, 0x48, 0x55, 0xba, 0x46, 0xac, 0x17, 0x67, 0xd8, 0x82, 0x52, 0x98, 0xf2, 0x5b, 0x16, + 0x5b, 0x86, 0x64, 0x1f, 0x16, 0xc8, 0xd2, 0xc6, 0x7b, 0xcb, 0x46, 0x97, 0x64, 0xa3, 0x87, 0x85, + 0xbc, 0xd5, 0xd2, 0xeb, 0x51, 0x41, 0x95, 0x30, 0xd5, 0xa5, 0x57, 0x5a, 0xf8, 0x00, 0x4a, 0x82, + 0xf1, 0x85, 0x3b, 0x55, 0x72, 0x84, 0x86, 0x05, 0x62, 0x48, 0xbb, 0x27, 0xe9, 0x67, 0x2c, 0xf0, + 0x67, 0x42, 0xfd, 0xa6, 0x9a, 0xa4, 0xcf, 0x6d, 0x7c, 0x04, 0x86, 0x88, 0x3c, 0x9a, 0x59, 0xa0, + 0x34, 0xf1, 0xb3, 0x55, 0x6f, 0xfa, 0x34, 0x4b, 0x14, 0x81, 0x8c, 0xe2, 0x7d, 0x30, 0x38, 0xcd, + 0x6e, 0x99, 0x55, 0x93, 0x27, 0x97, 0x7e, 0x65, 0x4a, 0xbf, 0xc7, 0xe6, 0x82, 0x2a, 0x01, 0xf9, + 0x5c, 0xfa, 0x95, 0x89, 0x9b, 0xa0, 0xf3, 0xc4, 0x57, 0xf2, 0xb1, 0xf5, 0x53, 0x0e, 0x0b, 0x44, + 0x06, 0xf1, 0xcf, 0x9b, 0xfa, 0xb9, 0xa3, 0xf4, 0xf3, 0xc5, 0x0a, 0xb9, 0xee, 0xdd, 0x5a, 0x42, + 0x87, 0x85, 0x0d, 0x11, 0x6d, 0x7c, 0xb5, 0x29, 0x46, 0xfb, 0x50, 0xe2, 0x4c, 0xf5, 0x6f, 0x37, + 0x57, 0xac, 0xdc, 0x6a, 0x94, 0xc1, 0xe8, 0xcb, 0x03, 0x75, 0xcb, 0x60, 0xa4, 0x61, 0x10, 0x85, + 0xc7, 0x2f, 0xa1, 0xbc, 0x94, 0x7b, 0xb9, 0xe6, 0xb9, 0xe0, 0x9b, 0x48, 0x8a, 0xc2, 0xd9, 0xe0, + 0x83, 0xa9, 0x1d, 0xb7, 0xa0, 0x28, 0x4b, 0x97, 0xc1, 0xd1, 0x64, 0xdc, 0x3f, 0xfd, 0xc5, 0x44, + 0xb8, 0x06, 0xe5, 0xeb, 0x9b, 0xc1, 0x95, 0x34, 0x34, 0xa9, 0x1a, 0x97, 0x37, 0xe3, 0xfe, 0x85, + 0x89, 0x1a, 0x9a, 0x89, 0x3a, 0x36, 0xe8, 0x82, 0xfa, 0x5b, 0xfb, 0xea, 0xab, 0x63, 0xc8, 0x50, + 0xa7, 0xf7, 0xdf, 0x4a, 0x3e, 0xc7, 0xfc, 0xaa, 0xba, 0xf3, 0xe2, 0xe9, 0xa2, 0xfe, 0xff, 0x4e, + 0x76, 0xdf, 0x7d, 0x78, 0xeb, 0x07, 0x62, 0x96, 0xde, 0xb6, 0xa6, 0x11, 0x3f, 0xf1, 0xa3, 0x39, + 0x0d, 0xfd, 0x13, 0x75, 0x39, 0xde, 0xa6, 0x77, 0xf9, 0xcb, 0xf4, 0x95, 0xcf, 0xc2, 0x57, 0x7e, + 0xa4, 0x6e, 0x55, 0xb9, 0x0f, 0x27, 0xcb, 0x6b, 0xf6, 0x27, 0xf9, 0xf8, 0x37, 0x00, 0x00, 0xff, + 0xff, 0x12, 0xd5, 0x46, 0x00, 0x75, 0x07, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden deleted file mode 100644 index d8717d57..00000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden +++ /dev/null @@ -1,871 +0,0 @@ -// Code generated by protoc-gen-go. -// source: my_test/test.proto -// DO NOT EDIT! - -/* -Package my_test is a generated protocol buffer package. - -This package holds interesting messages. - -It is generated from these files: - my_test/test.proto - -It has these top-level messages: - Request - Reply - OtherBase - ReplyExtensions - OtherReplyExtensions - OldReply - Communique -*/ -package my_test - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type HatType int32 - -const ( - // deliberately skipping 0 - HatType_FEDORA HatType = 1 - HatType_FEZ HatType = 2 -) - -var HatType_name = map[int32]string{ - 1: "FEDORA", - 2: "FEZ", -} -var HatType_value = map[string]int32{ - "FEDORA": 1, - "FEZ": 2, -} - -func (x HatType) Enum() *HatType { - p := new(HatType) - *p = x - return p -} -func (x HatType) String() string { - return proto.EnumName(HatType_name, int32(x)) -} -func (x *HatType) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(HatType_value, data, "HatType") - if err != nil { - return err - } - *x = HatType(value) - return nil -} - -// This enum represents days of the week. -type Days int32 - -const ( - Days_MONDAY Days = 1 - Days_TUESDAY Days = 2 - Days_LUNDI Days = 1 -) - -var Days_name = map[int32]string{ - 1: "MONDAY", - 2: "TUESDAY", - // Duplicate value: 1: "LUNDI", -} -var Days_value = map[string]int32{ - "MONDAY": 1, - "TUESDAY": 2, - "LUNDI": 1, -} - -func (x Days) Enum() *Days { - p := new(Days) - *p = x - return p -} -func (x Days) String() string { - return proto.EnumName(Days_name, int32(x)) -} -func (x *Days) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Days_value, data, "Days") - if err != nil { - return err - } - *x = Days(value) - return nil -} - -type Request_Color int32 - -const ( - Request_RED Request_Color = 0 - Request_GREEN Request_Color = 1 - Request_BLUE Request_Color = 2 -) - -var Request_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var Request_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x Request_Color) Enum() *Request_Color { - p := new(Request_Color) - *p = x - return p -} -func (x Request_Color) String() string { - return proto.EnumName(Request_Color_name, int32(x)) -} -func (x *Request_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Request_Color_value, data, "Request_Color") - if err != nil { - return err - } - *x = Request_Color(value) - return nil -} - -type Reply_Entry_Game int32 - -const ( - Reply_Entry_FOOTBALL Reply_Entry_Game = 1 - Reply_Entry_TENNIS Reply_Entry_Game = 2 -) - -var Reply_Entry_Game_name = map[int32]string{ - 1: "FOOTBALL", - 2: "TENNIS", -} -var Reply_Entry_Game_value = map[string]int32{ - "FOOTBALL": 1, - "TENNIS": 2, -} - -func (x Reply_Entry_Game) Enum() *Reply_Entry_Game { - p := new(Reply_Entry_Game) - *p = x - return p -} -func (x Reply_Entry_Game) String() string { - return proto.EnumName(Reply_Entry_Game_name, int32(x)) -} -func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Reply_Entry_Game_value, data, "Reply_Entry_Game") - if err != nil { - return err - } - *x = Reply_Entry_Game(value) - return nil -} - -// This is a message that might be sent somewhere. -type Request struct { - Key []int64 `protobuf:"varint,1,rep,name=key" json:"key,omitempty"` - // optional imp.ImportedMessage imported_message = 2; - Hue *Request_Color `protobuf:"varint,3,opt,name=hue,enum=my.test.Request_Color" json:"hue,omitempty"` - Hat *HatType `protobuf:"varint,4,opt,name=hat,enum=my.test.HatType,def=1" json:"hat,omitempty"` - // optional imp.ImportedMessage.Owner owner = 6; - Deadline *float32 `protobuf:"fixed32,7,opt,name=deadline,def=inf" json:"deadline,omitempty"` - Somegroup *Request_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` - // This is a map field. It will generate map[int32]string. - NameMapping map[int32]string `protobuf:"bytes,14,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - // This is a map field whose value type is a message. - MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` - // This field should not conflict with any getters. - GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Request) Reset() { *m = Request{} } -func (m *Request) String() string { return proto.CompactTextString(m) } -func (*Request) ProtoMessage() {} - -const Default_Request_Hat HatType = HatType_FEDORA - -var Default_Request_Deadline float32 = float32(math.Inf(1)) - -func (m *Request) GetKey() []int64 { - if m != nil { - return m.Key - } - return nil -} - -func (m *Request) GetHue() Request_Color { - if m != nil && m.Hue != nil { - return *m.Hue - } - return Request_RED -} - -func (m *Request) GetHat() HatType { - if m != nil && m.Hat != nil { - return *m.Hat - } - return Default_Request_Hat -} - -func (m *Request) GetDeadline() float32 { - if m != nil && m.Deadline != nil { - return *m.Deadline - } - return Default_Request_Deadline -} - -func (m *Request) GetSomegroup() *Request_SomeGroup { - if m != nil { - return m.Somegroup - } - return nil -} - -func (m *Request) GetNameMapping() map[int32]string { - if m != nil { - return m.NameMapping - } - return nil -} - -func (m *Request) GetMsgMapping() map[int64]*Reply { - if m != nil { - return m.MsgMapping - } - return nil -} - -func (m *Request) GetReset_() int32 { - if m != nil && m.Reset_ != nil { - return *m.Reset_ - } - return 0 -} - -func (m *Request) GetGetKey_() string { - if m != nil && m.GetKey_ != nil { - return *m.GetKey_ - } - return "" -} - -type Request_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } -func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*Request_SomeGroup) ProtoMessage() {} - -func (m *Request_SomeGroup) GetGroupField() int32 { - if m != nil && m.GroupField != nil { - return *m.GroupField - } - return 0 -} - -type Reply struct { - Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` - CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Reply) Reset() { *m = Reply{} } -func (m *Reply) String() string { return proto.CompactTextString(m) } -func (*Reply) ProtoMessage() {} - -var extRange_Reply = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_Reply -} - -func (m *Reply) GetFound() []*Reply_Entry { - if m != nil { - return m.Found - } - return nil -} - -func (m *Reply) GetCompactKeys() []int32 { - if m != nil { - return m.CompactKeys - } - return nil -} - -type Reply_Entry struct { - KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` - Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` - XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } -func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } -func (*Reply_Entry) ProtoMessage() {} - -const Default_Reply_Entry_Value int64 = 7 - -func (m *Reply_Entry) GetKeyThatNeeds_1234Camel_CasIng() int64 { - if m != nil && m.KeyThatNeeds_1234Camel_CasIng != nil { - return *m.KeyThatNeeds_1234Camel_CasIng - } - return 0 -} - -func (m *Reply_Entry) GetValue() int64 { - if m != nil && m.Value != nil { - return *m.Value - } - return Default_Reply_Entry_Value -} - -func (m *Reply_Entry) GetXMyFieldName_2() int64 { - if m != nil && m.XMyFieldName_2 != nil { - return *m.XMyFieldName_2 - } - return 0 -} - -type OtherBase struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherBase) Reset() { *m = OtherBase{} } -func (m *OtherBase) String() string { return proto.CompactTextString(m) } -func (*OtherBase) ProtoMessage() {} - -var extRange_OtherBase = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OtherBase -} - -func (m *OtherBase) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -type ReplyExtensions struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } -func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } -func (*ReplyExtensions) ProtoMessage() {} - -var E_ReplyExtensions_Time = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*float64)(nil), - Field: 101, - Name: "my.test.ReplyExtensions.time", - Tag: "fixed64,101,opt,name=time", - Filename: "my_test/test.proto", -} - -var E_ReplyExtensions_Carrot = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*ReplyExtensions)(nil), - Field: 105, - Name: "my.test.ReplyExtensions.carrot", - Tag: "bytes,105,opt,name=carrot", - Filename: "my_test/test.proto", -} - -var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ - ExtendedType: (*OtherBase)(nil), - ExtensionType: (*ReplyExtensions)(nil), - Field: 101, - Name: "my.test.ReplyExtensions.donut", - Tag: "bytes,101,opt,name=donut", - Filename: "my_test/test.proto", -} - -type OtherReplyExtensions struct { - Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } -func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } -func (*OtherReplyExtensions) ProtoMessage() {} - -func (m *OtherReplyExtensions) GetKey() int32 { - if m != nil && m.Key != nil { - return *m.Key - } - return 0 -} - -type OldReply struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldReply) Reset() { *m = OldReply{} } -func (m *OldReply) String() string { return proto.CompactTextString(m) } -func (*OldReply) ProtoMessage() {} - -func (m *OldReply) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *OldReply) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} -func (m *OldReply) MarshalJSON() ([]byte, error) { - return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) -} -func (m *OldReply) UnmarshalJSON(buf []byte) error { - return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) -} - -// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*OldReply)(nil) -var _ proto.Unmarshaler = (*OldReply)(nil) - -var extRange_OldReply = []proto.ExtensionRange{ - {100, 2147483646}, -} - -func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OldReply -} - -type Communique struct { - MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` - // This is a oneof, called "union". - // - // Types that are valid to be assigned to Union: - // *Communique_Number - // *Communique_Name - // *Communique_Data - // *Communique_TempC - // *Communique_Height - // *Communique_Today - // *Communique_Maybe - // *Communique_Delta_ - // *Communique_Msg - // *Communique_Somegroup - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique) Reset() { *m = Communique{} } -func (m *Communique) String() string { return proto.CompactTextString(m) } -func (*Communique) ProtoMessage() {} - -type isCommunique_Union interface { - isCommunique_Union() -} - -type Communique_Number struct { - Number int32 `protobuf:"varint,5,opt,name=number,oneof"` -} -type Communique_Name struct { - Name string `protobuf:"bytes,6,opt,name=name,oneof"` -} -type Communique_Data struct { - Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` -} -type Communique_TempC struct { - TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` -} -type Communique_Height struct { - Height float32 `protobuf:"fixed32,9,opt,name=height,oneof"` -} -type Communique_Today struct { - Today Days `protobuf:"varint,10,opt,name=today,enum=my.test.Days,oneof"` -} -type Communique_Maybe struct { - Maybe bool `protobuf:"varint,11,opt,name=maybe,oneof"` -} -type Communique_Delta_ struct { - Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` -} -type Communique_Msg struct { - Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` -} -type Communique_Somegroup struct { - Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` -} - -func (*Communique_Number) isCommunique_Union() {} -func (*Communique_Name) isCommunique_Union() {} -func (*Communique_Data) isCommunique_Union() {} -func (*Communique_TempC) isCommunique_Union() {} -func (*Communique_Height) isCommunique_Union() {} -func (*Communique_Today) isCommunique_Union() {} -func (*Communique_Maybe) isCommunique_Union() {} -func (*Communique_Delta_) isCommunique_Union() {} -func (*Communique_Msg) isCommunique_Union() {} -func (*Communique_Somegroup) isCommunique_Union() {} - -func (m *Communique) GetUnion() isCommunique_Union { - if m != nil { - return m.Union - } - return nil -} - -func (m *Communique) GetMakeMeCry() bool { - if m != nil && m.MakeMeCry != nil { - return *m.MakeMeCry - } - return false -} - -func (m *Communique) GetNumber() int32 { - if x, ok := m.GetUnion().(*Communique_Number); ok { - return x.Number - } - return 0 -} - -func (m *Communique) GetName() string { - if x, ok := m.GetUnion().(*Communique_Name); ok { - return x.Name - } - return "" -} - -func (m *Communique) GetData() []byte { - if x, ok := m.GetUnion().(*Communique_Data); ok { - return x.Data - } - return nil -} - -func (m *Communique) GetTempC() float64 { - if x, ok := m.GetUnion().(*Communique_TempC); ok { - return x.TempC - } - return 0 -} - -func (m *Communique) GetHeight() float32 { - if x, ok := m.GetUnion().(*Communique_Height); ok { - return x.Height - } - return 0 -} - -func (m *Communique) GetToday() Days { - if x, ok := m.GetUnion().(*Communique_Today); ok { - return x.Today - } - return Days_MONDAY -} - -func (m *Communique) GetMaybe() bool { - if x, ok := m.GetUnion().(*Communique_Maybe); ok { - return x.Maybe - } - return false -} - -func (m *Communique) GetDelta() int32 { - if x, ok := m.GetUnion().(*Communique_Delta_); ok { - return x.Delta - } - return 0 -} - -func (m *Communique) GetMsg() *Reply { - if x, ok := m.GetUnion().(*Communique_Msg); ok { - return x.Msg - } - return nil -} - -func (m *Communique) GetSomegroup() *Communique_SomeGroup { - if x, ok := m.GetUnion().(*Communique_Somegroup); ok { - return x.Somegroup - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ - (*Communique_Number)(nil), - (*Communique_Name)(nil), - (*Communique_Data)(nil), - (*Communique_TempC)(nil), - (*Communique_Height)(nil), - (*Communique_Today)(nil), - (*Communique_Maybe)(nil), - (*Communique_Delta_)(nil), - (*Communique_Msg)(nil), - (*Communique_Somegroup)(nil), - } -} - -func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - b.EncodeVarint(5<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Number)) - case *Communique_Name: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Name) - case *Communique_Data: - b.EncodeVarint(7<<3 | proto.WireBytes) - b.EncodeRawBytes(x.Data) - case *Communique_TempC: - b.EncodeVarint(8<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.TempC)) - case *Communique_Height: - b.EncodeVarint(9<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.Height))) - case *Communique_Today: - b.EncodeVarint(10<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Today)) - case *Communique_Maybe: - t := uint64(0) - if x.Maybe { - t = 1 - } - b.EncodeVarint(11<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Communique_Delta_: - b.EncodeVarint(12<<3 | proto.WireVarint) - b.EncodeZigzag32(uint64(x.Delta)) - case *Communique_Msg: - b.EncodeVarint(13<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Msg); err != nil { - return err - } - case *Communique_Somegroup: - b.EncodeVarint(14<<3 | proto.WireStartGroup) - if err := b.Marshal(x.Somegroup); err != nil { - return err - } - b.EncodeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - return fmt.Errorf("Communique.Union has unexpected type %T", x) - } - return nil -} - -func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Communique) - switch tag { - case 5: // union.number - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Number{int32(x)} - return true, err - case 6: // union.name - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Communique_Name{x} - return true, err - case 7: // union.data - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Communique_Data{x} - return true, err - case 8: // union.temp_c - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Communique_TempC{math.Float64frombits(x)} - return true, err - case 9: // union.height - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Communique_Height{math.Float32frombits(uint32(x))} - return true, err - case 10: // union.today - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Today{Days(x)} - return true, err - case 11: // union.maybe - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Maybe{x != 0} - return true, err - case 12: // union.delta - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag32() - m.Union = &Communique_Delta_{int32(x)} - return true, err - case 13: // union.msg - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Reply) - err := b.DecodeMessage(msg) - m.Union = &Communique_Msg{msg} - return true, err - case 14: // union.somegroup - if wire != proto.WireStartGroup { - return true, proto.ErrInternalBadWireType - } - msg := new(Communique_SomeGroup) - err := b.DecodeGroup(msg) - m.Union = &Communique_Somegroup{msg} - return true, err - default: - return false, nil - } -} - -func _Communique_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Number)) - case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Name))) - n += len(x.Name) - case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Data))) - n += len(x.Data) - case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) - n += 8 - case *Communique_Height: - n += proto.SizeVarint(9<<3 | proto.WireFixed32) - n += 4 - case *Communique_Today: - n += proto.SizeVarint(10<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Today)) - case *Communique_Maybe: - n += proto.SizeVarint(11<<3 | proto.WireVarint) - n += 1 - case *Communique_Delta_: - n += proto.SizeVarint(12<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) - case *Communique_Msg: - s := proto.Size(x.Msg) - n += proto.SizeVarint(13<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Communique_Somegroup: - n += proto.SizeVarint(14<<3 | proto.WireStartGroup) - n += proto.Size(x.Somegroup) - n += proto.SizeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type Communique_SomeGroup struct { - Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } -func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*Communique_SomeGroup) ProtoMessage() {} - -func (m *Communique_SomeGroup) GetMember() string { - if m != nil && m.Member != nil { - return *m.Member - } - return "" -} - -type Communique_Delta struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } -func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } -func (*Communique_Delta) ProtoMessage() {} - -var E_Tag = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*string)(nil), - Field: 103, - Name: "my.test.tag", - Tag: "bytes,103,opt,name=tag", - Filename: "my_test/test.proto", -} - -var E_Donut = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*OtherReplyExtensions)(nil), - Field: 106, - Name: "my.test.donut", - Tag: "bytes,106,opt,name=donut", - Filename: "my_test/test.proto", -} - -func init() { - proto.RegisterType((*Request)(nil), "my.test.Request") - proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") - proto.RegisterType((*Reply)(nil), "my.test.Reply") - proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") - proto.RegisterType((*OtherBase)(nil), "my.test.OtherBase") - proto.RegisterType((*ReplyExtensions)(nil), "my.test.ReplyExtensions") - proto.RegisterType((*OtherReplyExtensions)(nil), "my.test.OtherReplyExtensions") - proto.RegisterType((*OldReply)(nil), "my.test.OldReply") - proto.RegisterType((*Communique)(nil), "my.test.Communique") - proto.RegisterType((*Communique_SomeGroup)(nil), "my.test.Communique.SomeGroup") - proto.RegisterType((*Communique_Delta)(nil), "my.test.Communique.Delta") - proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) - proto.RegisterEnum("my.test.Days", Days_name, Days_value) - proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) - proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) - proto.RegisterExtension(E_ReplyExtensions_Time) - proto.RegisterExtension(E_ReplyExtensions_Carrot) - proto.RegisterExtension(E_ReplyExtensions_Donut) - proto.RegisterExtension(E_Tag) - proto.RegisterExtension(E_Donut) -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto index 8e709463..1ef3fd02 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto @@ -34,6 +34,8 @@ syntax = "proto2"; // This package holds interesting messages. package my.test; // dotted package name +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/my_test;test"; + //import "imp.proto"; import "multi/multi1.proto"; // unused import @@ -145,7 +147,7 @@ message Communique { Days today = 10; bool maybe = 11; sint32 delta = 12; // name will conflict with Delta below - Reply msg = 13; + Reply msg = 16; // requires two bytes to encode field tag group SomeGroup = 14 { optional string member = 15; } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go new file mode 100644 index 00000000..3b0ad849 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go @@ -0,0 +1,196 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: proto3/proto3.proto + +package proto3 // import "github.com/golang/protobuf/protoc-gen-go/testdata/proto3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Request_Flavour int32 + +const ( + Request_SWEET Request_Flavour = 0 + Request_SOUR Request_Flavour = 1 + Request_UMAMI Request_Flavour = 2 + Request_GOPHERLICIOUS Request_Flavour = 3 +) + +var Request_Flavour_name = map[int32]string{ + 0: "SWEET", + 1: "SOUR", + 2: "UMAMI", + 3: "GOPHERLICIOUS", +} +var Request_Flavour_value = map[string]int32{ + "SWEET": 0, + "SOUR": 1, + "UMAMI": 2, + "GOPHERLICIOUS": 3, +} + +func (x Request_Flavour) String() string { + return proto.EnumName(Request_Flavour_name, int32(x)) +} +func (Request_Flavour) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_proto3_a752e09251f17e01, []int{0, 0} +} + +type Request struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Key []int64 `protobuf:"varint,2,rep,packed,name=key" json:"key,omitempty"` + Taste Request_Flavour `protobuf:"varint,3,opt,name=taste,enum=proto3.Request_Flavour" json:"taste,omitempty"` + Book *Book `protobuf:"bytes,4,opt,name=book" json:"book,omitempty"` + Unpacked []int64 `protobuf:"varint,5,rep,name=unpacked" json:"unpacked,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_a752e09251f17e01, []int{0} +} +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (dst *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(dst, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo + +func (m *Request) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Request) GetKey() []int64 { + if m != nil { + return m.Key + } + return nil +} + +func (m *Request) GetTaste() Request_Flavour { + if m != nil { + return m.Taste + } + return Request_SWEET +} + +func (m *Request) GetBook() *Book { + if m != nil { + return m.Book + } + return nil +} + +func (m *Request) GetUnpacked() []int64 { + if m != nil { + return m.Unpacked + } + return nil +} + +type Book struct { + Title string `protobuf:"bytes,1,opt,name=title" json:"title,omitempty"` + RawData []byte `protobuf:"bytes,2,opt,name=raw_data,json=rawData,proto3" json:"raw_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Book) Reset() { *m = Book{} } +func (m *Book) String() string { return proto.CompactTextString(m) } +func (*Book) ProtoMessage() {} +func (*Book) Descriptor() ([]byte, []int) { + return fileDescriptor_proto3_a752e09251f17e01, []int{1} +} +func (m *Book) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Book.Unmarshal(m, b) +} +func (m *Book) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Book.Marshal(b, m, deterministic) +} +func (dst *Book) XXX_Merge(src proto.Message) { + xxx_messageInfo_Book.Merge(dst, src) +} +func (m *Book) XXX_Size() int { + return xxx_messageInfo_Book.Size(m) +} +func (m *Book) XXX_DiscardUnknown() { + xxx_messageInfo_Book.DiscardUnknown(m) +} + +var xxx_messageInfo_Book proto.InternalMessageInfo + +func (m *Book) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Book) GetRawData() []byte { + if m != nil { + return m.RawData + } + return nil +} + +func init() { + proto.RegisterType((*Request)(nil), "proto3.Request") + proto.RegisterType((*Book)(nil), "proto3.Book") + proto.RegisterEnum("proto3.Request_Flavour", Request_Flavour_name, Request_Flavour_value) +} + +func init() { proto.RegisterFile("proto3/proto3.proto", fileDescriptor_proto3_a752e09251f17e01) } + +var fileDescriptor_proto3_a752e09251f17e01 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x3c, 0x90, 0xcf, 0x4e, 0xf2, 0x40, + 0x14, 0xc5, 0x99, 0xfe, 0xf9, 0x80, 0xfb, 0xa1, 0x19, 0xaf, 0x26, 0x8e, 0x1b, 0x33, 0x61, 0xd5, + 0x0d, 0x25, 0xc1, 0x85, 0xc6, 0xb8, 0x11, 0x45, 0x25, 0x91, 0x60, 0x06, 0x89, 0x89, 0x1b, 0x33, + 0x85, 0xb1, 0x92, 0x42, 0x07, 0xcb, 0x54, 0xe2, 0xcb, 0xfa, 0x2c, 0xa6, 0x9d, 0xe2, 0xea, 0x9e, + 0x7b, 0xe7, 0xe4, 0x77, 0x32, 0x07, 0x0e, 0xd7, 0x99, 0x36, 0xfa, 0xac, 0x6b, 0x47, 0x58, 0x0e, + 0xfc, 0x67, 0xb7, 0xf6, 0x0f, 0x81, 0xba, 0x50, 0x9f, 0xb9, 0xda, 0x18, 0x44, 0xf0, 0x52, 0xb9, + 0x52, 0x8c, 0x70, 0x12, 0x34, 0x45, 0xa9, 0x91, 0x82, 0x9b, 0xa8, 0x6f, 0xe6, 0x70, 0x37, 0x70, + 0x45, 0x21, 0xb1, 0x03, 0xbe, 0x91, 0x1b, 0xa3, 0x98, 0xcb, 0x49, 0xb0, 0xdf, 0x3b, 0x0e, 0x2b, + 0x6e, 0x45, 0x09, 0xef, 0x96, 0xf2, 0x4b, 0xe7, 0x99, 0xb0, 0x2e, 0xe4, 0xe0, 0x45, 0x5a, 0x27, + 0xcc, 0xe3, 0x24, 0xf8, 0xdf, 0x6b, 0xed, 0xdc, 0x7d, 0xad, 0x13, 0x51, 0xbe, 0xe0, 0x29, 0x34, + 0xf2, 0x74, 0x2d, 0x67, 0x89, 0x9a, 0x33, 0xbf, 0xc8, 0xe9, 0x3b, 0xb4, 0x26, 0xfe, 0x6e, 0xed, + 0x2b, 0xa8, 0x57, 0x4c, 0x6c, 0x82, 0x3f, 0x79, 0x19, 0x0c, 0x9e, 0x69, 0x0d, 0x1b, 0xe0, 0x4d, + 0xc6, 0x53, 0x41, 0x49, 0x71, 0x9c, 0x8e, 0xae, 0x47, 0x43, 0xea, 0xe0, 0x01, 0xec, 0xdd, 0x8f, + 0x9f, 0x1e, 0x06, 0xe2, 0x71, 0x78, 0x33, 0x1c, 0x4f, 0x27, 0xd4, 0x6d, 0x9f, 0x83, 0x57, 0x64, + 0xe1, 0x11, 0xf8, 0x66, 0x61, 0x96, 0xbb, 0xdf, 0xd9, 0x05, 0x4f, 0xa0, 0x91, 0xc9, 0xed, 0xdb, + 0x5c, 0x1a, 0xc9, 0x1c, 0x4e, 0x82, 0x96, 0xa8, 0x67, 0x72, 0x7b, 0x2b, 0x8d, 0xec, 0x5f, 0xbe, + 0x5e, 0xc4, 0x0b, 0xf3, 0x91, 0x47, 0xe1, 0x4c, 0xaf, 0xba, 0xb1, 0x5e, 0xca, 0x34, 0xb6, 0x1d, + 0x46, 0xf9, 0xbb, 0x15, 0xb3, 0x4e, 0xac, 0xd2, 0x4e, 0xac, 0xbb, 0x46, 0x6d, 0x4c, 0xc1, 0xa8, + 0x3a, 0x8e, 0xaa, 0x76, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xec, 0x71, 0xee, 0xdb, 0x7b, 0x01, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto similarity index 96% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto index 869b9af5..79954e4e 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto @@ -33,6 +33,8 @@ syntax = "proto3"; package proto3; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/proto3"; + message Request { enum Flavour { SWEET = 0; diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go index 89e07ae1..b2af97f4 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any.go +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -51,6 +51,9 @@ const googleApis = "type.googleapis.com/" // function. AnyMessageName is provided for less common use cases like filtering a // sequence of Any messages based on a set of allowed message type names. func AnyMessageName(any *any.Any) (string, error) { + if any == nil { + return "", fmt.Errorf("message is nil") + } slash := strings.LastIndex(any.TypeUrl, "/") if slash < 0 { return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl) diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go index 1fbaa44c..f67edc7d 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go @@ -1,16 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/any/any.proto +// source: google/protobuf/any.proto -/* -Package any is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/any/any.proto - -It has these top-level messages: - Any -*/ -package any +package any // import "github.com/golang/protobuf/ptypes/any" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -62,6 +53,16 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // any.Unpack(foo) // ... // +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack // methods only use the fully qualified type name after the last '/' @@ -122,14 +123,36 @@ type Any struct { // TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl" json:"type_url,omitempty"` // Must be a valid serialized protocol buffer of the above specified type. - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Any) Reset() { *m = Any{} } -func (m *Any) String() string { return proto.CompactTextString(m) } -func (*Any) ProtoMessage() {} -func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Any) XXX_WellKnownType() string { return "Any" } +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { + return fileDescriptor_any_744b9ca530f228db, []int{0} +} +func (*Any) XXX_WellKnownType() string { return "Any" } +func (m *Any) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Any.Unmarshal(m, b) +} +func (m *Any) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Any.Marshal(b, m, deterministic) +} +func (dst *Any) XXX_Merge(src proto.Message) { + xxx_messageInfo_Any.Merge(dst, src) +} +func (m *Any) XXX_Size() int { + return xxx_messageInfo_Any.Size(m) +} +func (m *Any) XXX_DiscardUnknown() { + xxx_messageInfo_Any.DiscardUnknown(m) +} + +var xxx_messageInfo_Any proto.InternalMessageInfo func (m *Any) GetTypeUrl() string { if m != nil { @@ -149,20 +172,20 @@ func init() { proto.RegisterType((*Any)(nil), "google.protobuf.Any") } -func init() { proto.RegisterFile("github.com/golang/protobuf/ptypes/any/any.proto", fileDescriptor0) } +func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_any_744b9ca530f228db) } -var fileDescriptor0 = []byte{ - // 184 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4f, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0xcc, - 0xab, 0x04, 0x61, 0x3d, 0xb0, 0xb8, 0x10, 0x7f, 0x7a, 0x7e, 0x7e, 0x7a, 0x4e, 0xaa, 0x1e, 0x4c, - 0x95, 0x92, 0x19, 0x17, 0xb3, 0x63, 0x5e, 0xa5, 0x90, 0x24, 0x17, 0x07, 0x48, 0x79, 0x7c, 0x69, - 0x51, 0x8e, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x3b, 0x88, 0x1f, 0x5a, 0x94, 0x23, 0x24, - 0xc2, 0xc5, 0x5a, 0x96, 0x98, 0x53, 0x9a, 0x2a, 0xc1, 0xa4, 0xc0, 0xa8, 0xc1, 0x13, 0x04, 0xe1, - 0x38, 0xe5, 0x73, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x19, 0xe7, 0xc4, 0xe1, 0x98, 0x57, 0x19, - 0x00, 0xe2, 0x04, 0x30, 0x46, 0xa9, 0x12, 0xe5, 0xb8, 0x45, 0x4c, 0xcc, 0xee, 0x01, 0x4e, 0xab, - 0x98, 0xe4, 0xdc, 0x21, 0x46, 0x05, 0x40, 0x95, 0xe8, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, - 0x97, 0xe7, 0x85, 0x80, 0x94, 0x26, 0xb1, 0x81, 0xf5, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, - 0x45, 0x1f, 0x1a, 0xf2, 0xf3, 0x00, 0x00, 0x00, +var fileDescriptor_any_744b9ca530f228db = []byte{ + // 185 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4, + 0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a, + 0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46, + 0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, + 0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xca, 0xe7, 0x12, 0x4e, 0xce, + 0xcf, 0xd5, 0x43, 0x33, 0xce, 0x89, 0xc3, 0x31, 0xaf, 0x32, 0x00, 0xc4, 0x09, 0x60, 0x8c, 0x52, + 0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, + 0x4b, 0x47, 0xb8, 0xa8, 0x00, 0x64, 0x7a, 0x31, 0xc8, 0x61, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, + 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x8c, 0x0a, 0x80, 0x2a, 0xd1, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, + 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0x29, 0x4d, 0x62, 0x03, 0xeb, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff, + 0xff, 0x13, 0xf8, 0xe8, 0x42, 0xdd, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.proto b/vendor/github.com/golang/protobuf/ptypes/any/any.proto index 9bd3f50a..c7486676 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.proto +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.proto @@ -74,6 +74,16 @@ option objc_class_prefix = "GPB"; // any.Unpack(foo) // ... // +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack // methods only use the fully qualified type name after the last '/' diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go index fe3350be..4d75473b 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go @@ -1,16 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/duration/duration.proto +// source: google/protobuf/duration.proto -/* -Package duration is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/duration/duration.proto - -It has these top-level messages: - Duration -*/ -package duration +package duration // import "github.com/golang/protobuf/ptypes/duration" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -98,14 +89,36 @@ type Duration struct { // of one second or more, a non-zero value for the `nanos` field must be // of the same sign as the `seconds` field. Must be from -999,999,999 // to +999,999,999 inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Duration) Reset() { *m = Duration{} } -func (m *Duration) String() string { return proto.CompactTextString(m) } -func (*Duration) ProtoMessage() {} -func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Duration) XXX_WellKnownType() string { return "Duration" } +func (m *Duration) Reset() { *m = Duration{} } +func (m *Duration) String() string { return proto.CompactTextString(m) } +func (*Duration) ProtoMessage() {} +func (*Duration) Descriptor() ([]byte, []int) { + return fileDescriptor_duration_e7d612259e3f0613, []int{0} +} +func (*Duration) XXX_WellKnownType() string { return "Duration" } +func (m *Duration) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Duration.Unmarshal(m, b) +} +func (m *Duration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Duration.Marshal(b, m, deterministic) +} +func (dst *Duration) XXX_Merge(src proto.Message) { + xxx_messageInfo_Duration.Merge(dst, src) +} +func (m *Duration) XXX_Size() int { + return xxx_messageInfo_Duration.Size(m) +} +func (m *Duration) XXX_DiscardUnknown() { + xxx_messageInfo_Duration.DiscardUnknown(m) +} + +var xxx_messageInfo_Duration proto.InternalMessageInfo func (m *Duration) GetSeconds() int64 { if m != nil { @@ -126,21 +139,21 @@ func init() { } func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/duration/duration.proto", fileDescriptor0) + proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_duration_e7d612259e3f0613) } -var fileDescriptor0 = []byte{ - // 189 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0x29, - 0x2d, 0x4a, 0x2c, 0xc9, 0xcc, 0xcf, 0x83, 0x33, 0xf4, 0xc0, 0x2a, 0x84, 0xf8, 0xd3, 0xf3, 0xf3, - 0xd3, 0x73, 0x52, 0xf5, 0x60, 0xea, 0x95, 0xac, 0xb8, 0x38, 0x5c, 0xa0, 0x4a, 0x84, 0x24, 0xb8, - 0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83, 0x60, - 0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xd6, - 0x20, 0x08, 0xc7, 0xa9, 0x86, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x48, 0x27, 0x5e, 0x98, - 0x81, 0x01, 0x20, 0x91, 0x00, 0xc6, 0x28, 0x2d, 0xe2, 0xdd, 0xfb, 0x83, 0x91, 0x71, 0x11, 0x13, - 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xb9, 0x01, 0x50, 0xa5, 0x7a, 0xe1, 0xa9, - 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x2d, 0x49, 0x6c, 0x60, 0x33, 0x8c, 0x01, - 0x01, 0x00, 0x00, 0xff, 0xff, 0x45, 0x5a, 0x81, 0x3d, 0x0e, 0x01, 0x00, 0x00, +var fileDescriptor_duration_e7d612259e3f0613 = []byte{ + // 190 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a, + 0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56, + 0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5, + 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e, + 0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0xc3, 0x25, 0x9c, 0x9c, + 0x9f, 0xab, 0x87, 0x66, 0xa4, 0x13, 0x2f, 0xcc, 0xc0, 0x00, 0x90, 0x48, 0x00, 0x63, 0x94, 0x56, + 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, + 0x3a, 0xc2, 0x7d, 0x05, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x70, 0x67, 0xfe, 0x60, 0x64, 0x5c, 0xc4, + 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, 0x00, 0x54, 0xa9, 0x5e, 0x78, + 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, 0x12, 0x1b, 0xd8, 0x0c, 0x63, + 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x30, 0xff, 0xf3, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/duration_test.go b/vendor/github.com/golang/protobuf/ptypes/duration_test.go index e761289f..e00491a3 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration_test.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration_test.go @@ -52,37 +52,37 @@ var durationTests = []struct { dur time.Duration }{ // The zero duration. - {&durpb.Duration{0, 0}, true, true, 0}, + {&durpb.Duration{Seconds: 0, Nanos: 0}, true, true, 0}, // Some ordinary non-zero durations. - {&durpb.Duration{100, 0}, true, true, 100 * time.Second}, - {&durpb.Duration{-100, 0}, true, true, -100 * time.Second}, - {&durpb.Duration{100, 987}, true, true, 100*time.Second + 987}, - {&durpb.Duration{-100, -987}, true, true, -(100*time.Second + 987)}, + {&durpb.Duration{Seconds: 100, Nanos: 0}, true, true, 100 * time.Second}, + {&durpb.Duration{Seconds: -100, Nanos: 0}, true, true, -100 * time.Second}, + {&durpb.Duration{Seconds: 100, Nanos: 987}, true, true, 100*time.Second + 987}, + {&durpb.Duration{Seconds: -100, Nanos: -987}, true, true, -(100*time.Second + 987)}, // The largest duration representable in Go. - {&durpb.Duration{maxGoSeconds, int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, true, math.MaxInt64}, + {&durpb.Duration{Seconds: maxGoSeconds, Nanos: int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, true, math.MaxInt64}, // The smallest duration representable in Go. - {&durpb.Duration{minGoSeconds, int32(math.MinInt64 - 1e9*minGoSeconds)}, true, true, math.MinInt64}, + {&durpb.Duration{Seconds: minGoSeconds, Nanos: int32(math.MinInt64 - 1e9*minGoSeconds)}, true, true, math.MinInt64}, {nil, false, false, 0}, - {&durpb.Duration{-100, 987}, false, false, 0}, - {&durpb.Duration{100, -987}, false, false, 0}, - {&durpb.Duration{math.MinInt64, 0}, false, false, 0}, - {&durpb.Duration{math.MaxInt64, 0}, false, false, 0}, + {&durpb.Duration{Seconds: -100, Nanos: 987}, false, false, 0}, + {&durpb.Duration{Seconds: 100, Nanos: -987}, false, false, 0}, + {&durpb.Duration{Seconds: math.MinInt64, Nanos: 0}, false, false, 0}, + {&durpb.Duration{Seconds: math.MaxInt64, Nanos: 0}, false, false, 0}, // The largest valid duration. - {&durpb.Duration{maxSeconds, 1e9 - 1}, true, false, 0}, + {&durpb.Duration{Seconds: maxSeconds, Nanos: 1e9 - 1}, true, false, 0}, // The smallest valid duration. - {&durpb.Duration{minSeconds, -(1e9 - 1)}, true, false, 0}, + {&durpb.Duration{Seconds: minSeconds, Nanos: -(1e9 - 1)}, true, false, 0}, // The smallest invalid duration above the valid range. - {&durpb.Duration{maxSeconds + 1, 0}, false, false, 0}, + {&durpb.Duration{Seconds: maxSeconds + 1, Nanos: 0}, false, false, 0}, // The largest invalid duration below the valid range. - {&durpb.Duration{minSeconds - 1, -(1e9 - 1)}, false, false, 0}, + {&durpb.Duration{Seconds: minSeconds - 1, Nanos: -(1e9 - 1)}, false, false, 0}, // One nanosecond past the largest duration representable in Go. - {&durpb.Duration{maxGoSeconds, int32(math.MaxInt64-1e9*maxGoSeconds) + 1}, true, false, 0}, + {&durpb.Duration{Seconds: maxGoSeconds, Nanos: int32(math.MaxInt64-1e9*maxGoSeconds) + 1}, true, false, 0}, // One nanosecond past the smallest duration representable in Go. - {&durpb.Duration{minGoSeconds, int32(math.MinInt64-1e9*minGoSeconds) - 1}, true, false, 0}, + {&durpb.Duration{Seconds: minGoSeconds, Nanos: int32(math.MinInt64-1e9*minGoSeconds) - 1}, true, false, 0}, // One second past the largest duration representable in Go. - {&durpb.Duration{maxGoSeconds + 1, int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, false, 0}, + {&durpb.Duration{Seconds: maxGoSeconds + 1, Nanos: int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, false, 0}, // One second past the smallest duration representable in Go. - {&durpb.Duration{minGoSeconds - 1, int32(math.MinInt64 - 1e9*minGoSeconds)}, true, false, 0}, + {&durpb.Duration{Seconds: minGoSeconds - 1, Nanos: int32(math.MinInt64 - 1e9*minGoSeconds)}, true, false, 0}, } func TestValidateDuration(t *testing.T) { diff --git a/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go index ae159414..a69b403c 100644 --- a/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go @@ -1,16 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/empty/empty.proto +// source: google/protobuf/empty.proto -/* -Package empty is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/empty/empty.proto - -It has these top-level messages: - Empty -*/ -package empty +package empty // import "github.com/golang/protobuf/ptypes/empty" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -37,32 +28,52 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // // The JSON representation for `Empty` is empty JSON object `{}`. type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Empty) Reset() { *m = Empty{} } -func (m *Empty) String() string { return proto.CompactTextString(m) } -func (*Empty) ProtoMessage() {} -func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Empty) XXX_WellKnownType() string { return "Empty" } +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_empty_39e6d6db0632e5b2, []int{0} +} +func (*Empty) XXX_WellKnownType() string { return "Empty" } +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (dst *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(dst, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo func init() { proto.RegisterType((*Empty)(nil), "google.protobuf.Empty") } -func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/empty/empty.proto", fileDescriptor0) -} +func init() { proto.RegisterFile("google/protobuf/empty.proto", fileDescriptor_empty_39e6d6db0632e5b2) } -var fileDescriptor0 = []byte{ - // 147 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4e, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0xcd, - 0x2d, 0x28, 0xa9, 0x84, 0x90, 0x7a, 0x60, 0x39, 0x21, 0xfe, 0xf4, 0xfc, 0xfc, 0xf4, 0x9c, 0x54, - 0x3d, 0x98, 0x4a, 0x25, 0x76, 0x2e, 0x56, 0x57, 0x90, 0xbc, 0x53, 0x19, 0x97, 0x70, 0x72, 0x7e, - 0xae, 0x1e, 0x9a, 0xbc, 0x13, 0x17, 0x58, 0x36, 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x52, 0x27, 0xd2, - 0xce, 0x1f, 0x8c, 0x8c, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x4c, - 0x0c, 0x80, 0xaa, 0xd3, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0xa9, - 0x4f, 0x62, 0x03, 0x1b, 0x60, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x6e, 0x8e, 0x0a, 0x06, 0xcf, - 0x00, 0x00, 0x00, +var fileDescriptor_empty_39e6d6db0632e5b2 = []byte{ + // 148 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcd, 0x2d, 0x28, + 0xa9, 0xd4, 0x03, 0x73, 0x85, 0xf8, 0x21, 0x92, 0x7a, 0x30, 0x49, 0x25, 0x76, 0x2e, 0x56, 0x57, + 0x90, 0xbc, 0x53, 0x19, 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0xbc, 0x13, 0x17, 0x58, 0x36, + 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x52, 0x4f, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, + 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0x47, 0x58, 0x53, 0x50, 0x52, 0x59, 0x90, 0x5a, 0x0c, + 0xb1, 0xed, 0x07, 0x23, 0xe3, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55, 0x4c, 0x72, 0xee, 0x10, + 0x13, 0x03, 0xa0, 0xea, 0xf4, 0xc2, 0x53, 0x73, 0x72, 0xbc, 0xf3, 0xf2, 0xcb, 0xf3, 0x42, 0x40, + 0xea, 0x93, 0xd8, 0xc0, 0x06, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x64, 0xd4, 0xb3, 0xa6, + 0xb7, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/regen.sh b/vendor/github.com/golang/protobuf/ptypes/regen.sh deleted file mode 100755 index 2a5b4e8b..00000000 --- a/vendor/github.com/golang/protobuf/ptypes/regen.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash -e -# -# This script fetches and rebuilds the "well-known types" protocol buffers. -# To run this you will need protoc and goprotobuf installed; -# see https://github.com/golang/protobuf for instructions. -# You also need Go and Git installed. - -PKG=github.com/golang/protobuf/ptypes -UPSTREAM=https://github.com/google/protobuf -UPSTREAM_SUBDIR=src/google/protobuf -PROTO_FILES=' - any.proto - duration.proto - empty.proto - struct.proto - timestamp.proto - wrappers.proto -' - -function die() { - echo 1>&2 $* - exit 1 -} - -# Sanity check that the right tools are accessible. -for tool in go git protoc protoc-gen-go; do - q=$(which $tool) || die "didn't find $tool" - echo 1>&2 "$tool: $q" -done - -tmpdir=$(mktemp -d -t regen-wkt.XXXXXX) -trap 'rm -rf $tmpdir' EXIT - -echo -n 1>&2 "finding package dir... " -pkgdir=$(go list -f '{{.Dir}}' $PKG) -echo 1>&2 $pkgdir -base=$(echo $pkgdir | sed "s,/$PKG\$,,") -echo 1>&2 "base: $base" -cd $base - -echo 1>&2 "fetching latest protos... " -git clone -q $UPSTREAM $tmpdir -# Pass 1: build mapping from upstream filename to our filename. -declare -A filename_map -for f in $(cd $PKG && find * -name '*.proto'); do - echo -n 1>&2 "looking for latest version of $f... " - up=$(cd $tmpdir/$UPSTREAM_SUBDIR && find * -name $(basename $f) | grep -v /testdata/) - echo 1>&2 $up - if [ $(echo $up | wc -w) != "1" ]; then - die "not exactly one match" - fi - filename_map[$up]=$f -done -# Pass 2: copy files -for up in "${!filename_map[@]}"; do - f=${filename_map[$up]} - shortname=$(basename $f | sed 's,\.proto$,,') - cp $tmpdir/$UPSTREAM_SUBDIR/$up $PKG/$f -done - -# Run protoc once per package. -for dir in $(find $PKG -name '*.proto' | xargs dirname | sort | uniq); do - echo 1>&2 "* $dir" - protoc --go_out=. $dir/*.proto -done -echo 1>&2 "All OK" diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go index 35a8ec59..442c0e09 100644 --- a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go @@ -1,18 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/struct/struct.proto +// source: google/protobuf/struct.proto -/* -Package structpb is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/struct/struct.proto - -It has these top-level messages: - Struct - Value - ListValue -*/ -package structpb +package structpb // import "github.com/golang/protobuf/ptypes/struct" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -50,8 +39,10 @@ var NullValue_value = map[string]int32{ func (x NullValue) String() string { return proto.EnumName(NullValue_name, int32(x)) } -func (NullValue) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (NullValue) XXX_WellKnownType() string { return "NullValue" } +func (NullValue) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_struct_3a5a94e0c7801b27, []int{0} +} +func (NullValue) XXX_WellKnownType() string { return "NullValue" } // `Struct` represents a structured data value, consisting of fields // which map to dynamically typed values. In some languages, `Struct` @@ -63,14 +54,36 @@ func (NullValue) XXX_WellKnownType() string { return "NullValue" } // The JSON representation for `Struct` is JSON object. type Struct struct { // Unordered map of dynamically typed values. - Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Struct) Reset() { *m = Struct{} } -func (m *Struct) String() string { return proto.CompactTextString(m) } -func (*Struct) ProtoMessage() {} -func (*Struct) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Struct) XXX_WellKnownType() string { return "Struct" } +func (m *Struct) Reset() { *m = Struct{} } +func (m *Struct) String() string { return proto.CompactTextString(m) } +func (*Struct) ProtoMessage() {} +func (*Struct) Descriptor() ([]byte, []int) { + return fileDescriptor_struct_3a5a94e0c7801b27, []int{0} +} +func (*Struct) XXX_WellKnownType() string { return "Struct" } +func (m *Struct) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Struct.Unmarshal(m, b) +} +func (m *Struct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Struct.Marshal(b, m, deterministic) +} +func (dst *Struct) XXX_Merge(src proto.Message) { + xxx_messageInfo_Struct.Merge(dst, src) +} +func (m *Struct) XXX_Size() int { + return xxx_messageInfo_Struct.Size(m) +} +func (m *Struct) XXX_DiscardUnknown() { + xxx_messageInfo_Struct.DiscardUnknown(m) +} + +var xxx_messageInfo_Struct proto.InternalMessageInfo func (m *Struct) GetFields() map[string]*Value { if m != nil { @@ -95,14 +108,36 @@ type Value struct { // *Value_BoolValue // *Value_StructValue // *Value_ListValue - Kind isValue_Kind `protobuf_oneof:"kind"` + Kind isValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Value) Reset() { *m = Value{} } -func (m *Value) String() string { return proto.CompactTextString(m) } -func (*Value) ProtoMessage() {} -func (*Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } -func (*Value) XXX_WellKnownType() string { return "Value" } +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_struct_3a5a94e0c7801b27, []int{1} +} +func (*Value) XXX_WellKnownType() string { return "Value" } +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo type isValue_Kind interface { isValue_Kind() @@ -289,26 +324,26 @@ func _Value_OneofSizer(msg proto.Message) (n int) { // kind switch x := m.Kind.(type) { case *Value_NullValue: - n += proto.SizeVarint(1<<3 | proto.WireVarint) + n += 1 // tag and wire n += proto.SizeVarint(uint64(x.NullValue)) case *Value_NumberValue: - n += proto.SizeVarint(2<<3 | proto.WireFixed64) + n += 1 // tag and wire n += 8 case *Value_StringValue: - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.StringValue))) n += len(x.StringValue) case *Value_BoolValue: - n += proto.SizeVarint(4<<3 | proto.WireVarint) + n += 1 // tag and wire n += 1 case *Value_StructValue: s := proto.Size(x.StructValue) - n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Value_ListValue: s := proto.Size(x.ListValue) - n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -323,14 +358,36 @@ func _Value_OneofSizer(msg proto.Message) (n int) { // The JSON representation for `ListValue` is JSON array. type ListValue struct { // Repeated field of dynamically typed values. - Values []*Value `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"` + Values []*Value `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListValue) Reset() { *m = ListValue{} } -func (m *ListValue) String() string { return proto.CompactTextString(m) } -func (*ListValue) ProtoMessage() {} -func (*ListValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (*ListValue) XXX_WellKnownType() string { return "ListValue" } +func (m *ListValue) Reset() { *m = ListValue{} } +func (m *ListValue) String() string { return proto.CompactTextString(m) } +func (*ListValue) ProtoMessage() {} +func (*ListValue) Descriptor() ([]byte, []int) { + return fileDescriptor_struct_3a5a94e0c7801b27, []int{2} +} +func (*ListValue) XXX_WellKnownType() string { return "ListValue" } +func (m *ListValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListValue.Unmarshal(m, b) +} +func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListValue.Marshal(b, m, deterministic) +} +func (dst *ListValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListValue.Merge(dst, src) +} +func (m *ListValue) XXX_Size() int { + return xxx_messageInfo_ListValue.Size(m) +} +func (m *ListValue) XXX_DiscardUnknown() { + xxx_messageInfo_ListValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ListValue proto.InternalMessageInfo func (m *ListValue) GetValues() []*Value { if m != nil { @@ -341,42 +398,43 @@ func (m *ListValue) GetValues() []*Value { func init() { proto.RegisterType((*Struct)(nil), "google.protobuf.Struct") + proto.RegisterMapType((map[string]*Value)(nil), "google.protobuf.Struct.FieldsEntry") proto.RegisterType((*Value)(nil), "google.protobuf.Value") proto.RegisterType((*ListValue)(nil), "google.protobuf.ListValue") proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value) } func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/struct/struct.proto", fileDescriptor0) + proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor_struct_3a5a94e0c7801b27) } -var fileDescriptor0 = []byte{ +var fileDescriptor_struct_3a5a94e0c7801b27 = []byte{ // 417 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x41, 0x8b, 0xd3, 0x40, - 0x14, 0x80, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa0, 0xa1, 0x7b, 0x09, - 0x22, 0x09, 0x56, 0x04, 0x31, 0x5e, 0x0c, 0xac, 0xbb, 0x60, 0x58, 0x62, 0x74, 0x57, 0xf0, 0x52, - 0x9a, 0x34, 0x8d, 0xa1, 0xd3, 0x99, 0x90, 0xcc, 0x28, 0x3d, 0xfa, 0x2f, 0x3c, 0x7b, 0xf4, 0xe8, - 0xaf, 0xf3, 0x28, 0x33, 0x93, 0x44, 0x69, 0x29, 0x78, 0x9a, 0xbe, 0x37, 0xdf, 0xfb, 0xe6, 0xbd, - 0xd7, 0xc0, 0xf3, 0xb2, 0xe2, 0x9f, 0x45, 0xe6, 0xe7, 0x6c, 0x13, 0x94, 0x8c, 0x2c, 0x68, 0x19, - 0xd4, 0x0d, 0xe3, 0x2c, 0x13, 0xab, 0xa0, 0xe6, 0xdb, 0xba, 0x68, 0x83, 0x96, 0x37, 0x22, 0xe7, - 0xdd, 0xe1, 0xab, 0x5b, 0x7c, 0xa7, 0x64, 0xac, 0x24, 0x85, 0xdf, 0xb3, 0xd3, 0xef, 0x08, 0xac, - 0xf7, 0x8a, 0xc0, 0x21, 0x58, 0xab, 0xaa, 0x20, 0xcb, 0x76, 0x82, 0x5c, 0xd3, 0x73, 0x66, 0x67, - 0xfe, 0x0e, 0xec, 0x6b, 0xd0, 0x7f, 0xa3, 0xa8, 0x73, 0xca, 0x9b, 0x6d, 0xda, 0x95, 0x9c, 0xbe, - 0x03, 0xe7, 0x9f, 0x34, 0x3e, 0x01, 0x73, 0x5d, 0x6c, 0x27, 0xc8, 0x45, 0x9e, 0x9d, 0xca, 0x9f, - 0xf8, 0x09, 0x8c, 0xbf, 0x2c, 0x88, 0x28, 0x26, 0x86, 0x8b, 0x3c, 0x67, 0x76, 0x6f, 0x4f, 0x7e, - 0x23, 0x6f, 0x53, 0x0d, 0xbd, 0x34, 0x5e, 0xa0, 0xe9, 0x2f, 0x03, 0xc6, 0x2a, 0x89, 0x43, 0x00, - 0x2a, 0x08, 0x99, 0x6b, 0x81, 0x94, 0x1e, 0xcf, 0x4e, 0xf7, 0x04, 0x57, 0x82, 0x10, 0xc5, 0x5f, - 0x8e, 0x52, 0x9b, 0xf6, 0x01, 0x3e, 0x83, 0xdb, 0x54, 0x6c, 0xb2, 0xa2, 0x99, 0xff, 0x7d, 0x1f, - 0x5d, 0x8e, 0x52, 0x47, 0x67, 0x07, 0xa8, 0xe5, 0x4d, 0x45, 0xcb, 0x0e, 0x32, 0x65, 0xe3, 0x12, - 0xd2, 0x59, 0x0d, 0x3d, 0x02, 0xc8, 0x18, 0xeb, 0xdb, 0x38, 0x72, 0x91, 0x77, 0x4b, 0x3e, 0x25, - 0x73, 0x1a, 0x78, 0xa5, 0x2c, 0x22, 0xe7, 0x1d, 0x32, 0x56, 0xa3, 0xde, 0x3f, 0xb0, 0xc7, 0x4e, - 0x2f, 0x72, 0x3e, 0x4c, 0x49, 0xaa, 0xb6, 0xaf, 0xb5, 0x54, 0xed, 0xfe, 0x94, 0x71, 0xd5, 0xf2, - 0x61, 0x4a, 0xd2, 0x07, 0x91, 0x05, 0x47, 0xeb, 0x8a, 0x2e, 0xa7, 0x21, 0xd8, 0x03, 0x81, 0x7d, - 0xb0, 0x94, 0xac, 0xff, 0x47, 0x0f, 0x2d, 0xbd, 0xa3, 0x1e, 0x3f, 0x00, 0x7b, 0x58, 0x22, 0x3e, - 0x06, 0xb8, 0xba, 0x8e, 0xe3, 0xf9, 0xcd, 0xeb, 0xf8, 0xfa, 0xfc, 0x64, 0x14, 0x7d, 0x43, 0x70, - 0x37, 0x67, 0x9b, 0x5d, 0x45, 0xe4, 0xe8, 0x69, 0x12, 0x19, 0x27, 0xe8, 0xd3, 0xd3, 0xff, 0xfd, - 0x30, 0x43, 0x7d, 0xd4, 0xd9, 0x6f, 0x84, 0x7e, 0x18, 0xe6, 0x45, 0x12, 0xfd, 0x34, 0x1e, 0x5e, - 0x68, 0x79, 0xd2, 0xf7, 0xf7, 0xb1, 0x20, 0xe4, 0x2d, 0x65, 0x5f, 0xe9, 0x07, 0x59, 0x99, 0x59, - 0x4a, 0xf5, 0xec, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x9b, 0x6e, 0x5d, 0x3c, 0xfe, 0x02, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa2, 0xa1, 0x7b, 0x09, + 0x22, 0x29, 0xd6, 0x8b, 0x18, 0x2f, 0x06, 0xd6, 0x5d, 0x30, 0x2c, 0x31, 0xba, 0x15, 0xbc, 0x94, + 0x26, 0x4d, 0x63, 0xe8, 0x74, 0x26, 0x24, 0x33, 0x4a, 0x8f, 0x7e, 0x0b, 0xcf, 0x1e, 0x3d, 0xfa, + 0xe9, 0x3c, 0xca, 0xcc, 0x24, 0xa9, 0xb4, 0xf4, 0x94, 0xbc, 0xf7, 0x7e, 0xef, 0x3f, 0xef, 0xff, + 0x66, 0xe0, 0x71, 0xc1, 0x58, 0x41, 0xf2, 0x49, 0x55, 0x33, 0xce, 0x52, 0xb1, 0x9a, 0x34, 0xbc, + 0x16, 0x19, 0xf7, 0x55, 0x8c, 0xef, 0xe9, 0xaa, 0xdf, 0x55, 0xc7, 0x3f, 0x11, 0x58, 0x1f, 0x15, + 0x81, 0x03, 0xb0, 0x56, 0x65, 0x4e, 0x96, 0xcd, 0x08, 0xb9, 0xa6, 0xe7, 0x4c, 0x2f, 0xfc, 0x3d, + 0xd8, 0xd7, 0xa0, 0xff, 0x4e, 0x51, 0x97, 0x94, 0xd7, 0xdb, 0xa4, 0x6d, 0x39, 0xff, 0x00, 0xce, + 0x7f, 0x69, 0x7c, 0x06, 0xe6, 0x3a, 0xdf, 0x8e, 0x90, 0x8b, 0x3c, 0x3b, 0x91, 0xbf, 0xf8, 0x39, + 0x0c, 0xbf, 0x2d, 0x88, 0xc8, 0x47, 0x86, 0x8b, 0x3c, 0x67, 0xfa, 0xe0, 0x40, 0x7c, 0x26, 0xab, + 0x89, 0x86, 0x5e, 0x1b, 0xaf, 0xd0, 0xf8, 0x8f, 0x01, 0x43, 0x95, 0xc4, 0x01, 0x00, 0x15, 0x84, + 0xcc, 0xb5, 0x80, 0x14, 0x3d, 0x9d, 0x9e, 0x1f, 0x08, 0xdc, 0x08, 0x42, 0x14, 0x7f, 0x3d, 0x48, + 0x6c, 0xda, 0x05, 0xf8, 0x02, 0xee, 0x52, 0xb1, 0x49, 0xf3, 0x7a, 0xbe, 0x3b, 0x1f, 0x5d, 0x0f, + 0x12, 0x47, 0x67, 0x7b, 0xa8, 0xe1, 0x75, 0x49, 0x8b, 0x16, 0x32, 0xe5, 0xe0, 0x12, 0xd2, 0x59, + 0x0d, 0x3d, 0x05, 0x48, 0x19, 0xeb, 0xc6, 0x38, 0x71, 0x91, 0x77, 0x47, 0x1e, 0x25, 0x73, 0x1a, + 0x78, 0xa3, 0x54, 0x44, 0xc6, 0x5b, 0x64, 0xa8, 0xac, 0x3e, 0x3c, 0xb2, 0xc7, 0x56, 0x5e, 0x64, + 0xbc, 0x77, 0x49, 0xca, 0xa6, 0xeb, 0xb5, 0x54, 0xef, 0xa1, 0xcb, 0xa8, 0x6c, 0x78, 0xef, 0x92, + 0x74, 0x41, 0x68, 0xc1, 0xc9, 0xba, 0xa4, 0xcb, 0x71, 0x00, 0x76, 0x4f, 0x60, 0x1f, 0x2c, 0x25, + 0xd6, 0xdd, 0xe8, 0xb1, 0xa5, 0xb7, 0xd4, 0xb3, 0x47, 0x60, 0xf7, 0x4b, 0xc4, 0xa7, 0x00, 0x37, + 0xb7, 0x51, 0x34, 0x9f, 0xbd, 0x8d, 0x6e, 0x2f, 0xcf, 0x06, 0xe1, 0x0f, 0x04, 0xf7, 0x33, 0xb6, + 0xd9, 0x97, 0x08, 0x1d, 0xed, 0x26, 0x96, 0x71, 0x8c, 0xbe, 0xbc, 0x28, 0x4a, 0xfe, 0x55, 0xa4, + 0x7e, 0xc6, 0x36, 0x93, 0x82, 0x91, 0x05, 0x2d, 0x76, 0x4f, 0xb1, 0xe2, 0xdb, 0x2a, 0x6f, 0xda, + 0x17, 0x19, 0xe8, 0x4f, 0x95, 0xfe, 0x45, 0xe8, 0x97, 0x61, 0x5e, 0xc5, 0xe1, 0x6f, 0xe3, 0xc9, + 0x95, 0x16, 0x8f, 0xbb, 0xf9, 0x3e, 0xe7, 0x84, 0xbc, 0xa7, 0xec, 0x3b, 0xfd, 0x24, 0x3b, 0x53, + 0x4b, 0x49, 0xbd, 0xfc, 0x17, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x1b, 0x59, 0xf8, 0xe5, 0x02, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go index 1b365762..47f10dbc 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -99,6 +99,15 @@ func Timestamp(ts *tspb.Timestamp) (time.Time, error) { return t, validateTimestamp(ts) } +// TimestampNow returns a google.protobuf.Timestamp for the current time. +func TimestampNow() *tspb.Timestamp { + ts, err := TimestampProto(time.Now()) + if err != nil { + panic("ptypes: time.Now() out of Timestamp range") + } + return ts +} + // TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. // It returns an error if the resulting Timestamp is invalid. func TimestampProto(t time.Time) (*tspb.Timestamp, error) { diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go index 3b76261e..e9c22228 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go @@ -1,16 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/timestamp/timestamp.proto +// source: google/protobuf/timestamp.proto -/* -Package timestamp is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/timestamp/timestamp.proto - -It has these top-level messages: - Timestamp -*/ -package timestamp +package timestamp // import "github.com/golang/protobuf/ptypes/timestamp" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -101,7 +92,7 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) // with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one // can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--) // to obtain a formatter capable of generating timestamps in this format. // // @@ -114,14 +105,36 @@ type Timestamp struct { // second values with fractions must still have non-negative nanos values // that count forward in time. Must be from 0 to 999,999,999 // inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Timestamp) Reset() { *m = Timestamp{} } -func (m *Timestamp) String() string { return proto.CompactTextString(m) } -func (*Timestamp) ProtoMessage() {} -func (*Timestamp) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } +func (m *Timestamp) Reset() { *m = Timestamp{} } +func (m *Timestamp) String() string { return proto.CompactTextString(m) } +func (*Timestamp) ProtoMessage() {} +func (*Timestamp) Descriptor() ([]byte, []int) { + return fileDescriptor_timestamp_b826e8e5fba671a8, []int{0} +} +func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } +func (m *Timestamp) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Timestamp.Unmarshal(m, b) +} +func (m *Timestamp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Timestamp.Marshal(b, m, deterministic) +} +func (dst *Timestamp) XXX_Merge(src proto.Message) { + xxx_messageInfo_Timestamp.Merge(dst, src) +} +func (m *Timestamp) XXX_Size() int { + return xxx_messageInfo_Timestamp.Size(m) +} +func (m *Timestamp) XXX_DiscardUnknown() { + xxx_messageInfo_Timestamp.DiscardUnknown(m) +} + +var xxx_messageInfo_Timestamp proto.InternalMessageInfo func (m *Timestamp) GetSeconds() int64 { if m != nil { @@ -142,21 +155,21 @@ func init() { } func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/timestamp/timestamp.proto", fileDescriptor0) + proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_timestamp_b826e8e5fba671a8) } -var fileDescriptor0 = []byte{ - // 190 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2f, 0xc9, - 0xcc, 0x4d, 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0x40, 0xb0, 0xf4, 0xc0, 0x6a, 0x84, 0xf8, 0xd3, 0xf3, - 0xf3, 0xd3, 0x73, 0x52, 0xf5, 0x60, 0x3a, 0x94, 0xac, 0xb9, 0x38, 0x43, 0x60, 0x6a, 0x84, 0x24, - 0xb8, 0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83, - 0x60, 0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, - 0xd6, 0x20, 0x08, 0xc7, 0xa9, 0x8e, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x4c, 0x27, 0x3e, - 0xb8, 0x89, 0x01, 0x20, 0xa1, 0x00, 0xc6, 0x28, 0x6d, 0x12, 0xdc, 0xfc, 0x83, 0x91, 0x71, 0x11, - 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, 0xe1, - 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, 0x8c, - 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6b, 0x59, 0x0a, 0x4d, 0x13, 0x01, 0x00, 0x00, +var fileDescriptor_timestamp_b826e8e5fba671a8 = []byte{ + // 191 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d, + 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28, + 0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5, + 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89, + 0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x1d, 0x97, 0x70, + 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0x99, 0x4e, 0x7c, 0x70, 0x13, 0x03, 0x40, 0x42, 0x01, 0x8c, 0x51, + 0xda, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, + 0x79, 0xe9, 0x08, 0x27, 0x16, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x23, 0x5c, 0xfa, 0x83, 0x91, 0x71, + 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, + 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, + 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x77, 0x4a, 0x07, 0xf7, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto index b7cbd175..06750ab1 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto @@ -114,7 +114,7 @@ option objc_class_prefix = "GPB"; // to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) // with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one // can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--) // to obtain a formatter capable of generating timestamps in this format. // // diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go b/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go index 114a7f9f..6e3c969b 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go @@ -46,32 +46,32 @@ var tests = []struct { t time.Time }{ // The timestamp representing the Unix epoch date. - {&tspb.Timestamp{0, 0}, true, utcDate(1970, 1, 1)}, + {&tspb.Timestamp{Seconds: 0, Nanos: 0}, true, utcDate(1970, 1, 1)}, // The smallest representable timestamp. - {&tspb.Timestamp{math.MinInt64, math.MinInt32}, false, + {&tspb.Timestamp{Seconds: math.MinInt64, Nanos: math.MinInt32}, false, time.Unix(math.MinInt64, math.MinInt32).UTC()}, // The smallest representable timestamp with non-negative nanos. - {&tspb.Timestamp{math.MinInt64, 0}, false, time.Unix(math.MinInt64, 0).UTC()}, + {&tspb.Timestamp{Seconds: math.MinInt64, Nanos: 0}, false, time.Unix(math.MinInt64, 0).UTC()}, // The earliest valid timestamp. - {&tspb.Timestamp{minValidSeconds, 0}, true, utcDate(1, 1, 1)}, + {&tspb.Timestamp{Seconds: minValidSeconds, Nanos: 0}, true, utcDate(1, 1, 1)}, //"0001-01-01T00:00:00Z"}, // The largest representable timestamp. - {&tspb.Timestamp{math.MaxInt64, math.MaxInt32}, false, + {&tspb.Timestamp{Seconds: math.MaxInt64, Nanos: math.MaxInt32}, false, time.Unix(math.MaxInt64, math.MaxInt32).UTC()}, // The largest representable timestamp with nanos in range. - {&tspb.Timestamp{math.MaxInt64, 1e9 - 1}, false, + {&tspb.Timestamp{Seconds: math.MaxInt64, Nanos: 1e9 - 1}, false, time.Unix(math.MaxInt64, 1e9-1).UTC()}, // The largest valid timestamp. - {&tspb.Timestamp{maxValidSeconds - 1, 1e9 - 1}, true, + {&tspb.Timestamp{Seconds: maxValidSeconds - 1, Nanos: 1e9 - 1}, true, time.Date(9999, 12, 31, 23, 59, 59, 1e9-1, time.UTC)}, // The smallest invalid timestamp that is larger than the valid range. - {&tspb.Timestamp{maxValidSeconds, 0}, false, time.Unix(maxValidSeconds, 0).UTC()}, + {&tspb.Timestamp{Seconds: maxValidSeconds, Nanos: 0}, false, time.Unix(maxValidSeconds, 0).UTC()}, // A date before the epoch. - {&tspb.Timestamp{-281836800, 0}, true, utcDate(1961, 1, 26)}, + {&tspb.Timestamp{Seconds: -281836800, Nanos: 0}, true, utcDate(1961, 1, 26)}, // A date after the epoch. - {&tspb.Timestamp{1296000000, 0}, true, utcDate(2011, 1, 26)}, + {&tspb.Timestamp{Seconds: 1296000000, Nanos: 0}, true, utcDate(2011, 1, 26)}, // A date after the epoch, in the middle of the day. - {&tspb.Timestamp{1296012345, 940483}, true, + {&tspb.Timestamp{Seconds: 1296012345, Nanos: 940483}, true, time.Date(2011, 1, 26, 3, 25, 45, 940483, time.UTC)}, } @@ -123,8 +123,8 @@ func TestTimestampString(t *testing.T) { }{ // Not much testing needed because presumably time.Format is // well-tested. - {&tspb.Timestamp{0, 0}, "1970-01-01T00:00:00Z"}, - {&tspb.Timestamp{minValidSeconds - 1, 0}, "(timestamp: seconds:-62135596801 before 0001-01-01)"}, + {&tspb.Timestamp{Seconds: 0, Nanos: 0}, "1970-01-01T00:00:00Z"}, + {&tspb.Timestamp{Seconds: minValidSeconds - 1, Nanos: 0}, "(timestamp: seconds:-62135596801 before 0001-01-01)"}, } { got := TimestampString(test.ts) if got != test.want { @@ -136,3 +136,18 @@ func TestTimestampString(t *testing.T) { func utcDate(year, month, day int) time.Time { return time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC) } + +func TestTimestampNow(t *testing.T) { + // Bracket the expected time. + before := time.Now() + ts := TimestampNow() + after := time.Now() + + tm, err := Timestamp(ts) + if err != nil { + t.Errorf("between %v and %v\nTimestampNow() = %v\nwhich is invalid (%v)", before, after, ts, err) + } + if tm.Before(before) || tm.After(after) { + t.Errorf("between %v and %v\nTimestamp(TimestampNow()) = %v", before, after, tm) + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go index 1328bd29..d1fc4d0b 100644 --- a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go @@ -1,24 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/wrappers/wrappers.proto +// source: google/protobuf/wrappers.proto -/* -Package wrappers is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/wrappers/wrappers.proto - -It has these top-level messages: - DoubleValue - FloatValue - Int64Value - UInt64Value - Int32Value - UInt32Value - BoolValue - StringValue - BytesValue -*/ -package wrappers +package wrappers // import "github.com/golang/protobuf/ptypes/wrappers" import proto "github.com/golang/protobuf/proto" import fmt "fmt" @@ -40,14 +23,36 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // The JSON representation for `DoubleValue` is JSON number. type DoubleValue struct { // The double value. - Value float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + Value float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DoubleValue) Reset() { *m = DoubleValue{} } -func (m *DoubleValue) String() string { return proto.CompactTextString(m) } -func (*DoubleValue) ProtoMessage() {} -func (*DoubleValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" } +func (m *DoubleValue) Reset() { *m = DoubleValue{} } +func (m *DoubleValue) String() string { return proto.CompactTextString(m) } +func (*DoubleValue) ProtoMessage() {} +func (*DoubleValue) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{0} +} +func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" } +func (m *DoubleValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleValue.Unmarshal(m, b) +} +func (m *DoubleValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleValue.Marshal(b, m, deterministic) +} +func (dst *DoubleValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleValue.Merge(dst, src) +} +func (m *DoubleValue) XXX_Size() int { + return xxx_messageInfo_DoubleValue.Size(m) +} +func (m *DoubleValue) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleValue.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleValue proto.InternalMessageInfo func (m *DoubleValue) GetValue() float64 { if m != nil { @@ -61,14 +66,36 @@ func (m *DoubleValue) GetValue() float64 { // The JSON representation for `FloatValue` is JSON number. type FloatValue struct { // The float value. - Value float32 `protobuf:"fixed32,1,opt,name=value" json:"value,omitempty"` + Value float32 `protobuf:"fixed32,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FloatValue) Reset() { *m = FloatValue{} } -func (m *FloatValue) String() string { return proto.CompactTextString(m) } -func (*FloatValue) ProtoMessage() {} -func (*FloatValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } -func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" } +func (m *FloatValue) Reset() { *m = FloatValue{} } +func (m *FloatValue) String() string { return proto.CompactTextString(m) } +func (*FloatValue) ProtoMessage() {} +func (*FloatValue) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{1} +} +func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" } +func (m *FloatValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatValue.Unmarshal(m, b) +} +func (m *FloatValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatValue.Marshal(b, m, deterministic) +} +func (dst *FloatValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatValue.Merge(dst, src) +} +func (m *FloatValue) XXX_Size() int { + return xxx_messageInfo_FloatValue.Size(m) +} +func (m *FloatValue) XXX_DiscardUnknown() { + xxx_messageInfo_FloatValue.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatValue proto.InternalMessageInfo func (m *FloatValue) GetValue() float32 { if m != nil { @@ -82,14 +109,36 @@ func (m *FloatValue) GetValue() float32 { // The JSON representation for `Int64Value` is JSON string. type Int64Value struct { // The int64 value. - Value int64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value int64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Int64Value) Reset() { *m = Int64Value{} } -func (m *Int64Value) String() string { return proto.CompactTextString(m) } -func (*Int64Value) ProtoMessage() {} -func (*Int64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" } +func (m *Int64Value) Reset() { *m = Int64Value{} } +func (m *Int64Value) String() string { return proto.CompactTextString(m) } +func (*Int64Value) ProtoMessage() {} +func (*Int64Value) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{2} +} +func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" } +func (m *Int64Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int64Value.Unmarshal(m, b) +} +func (m *Int64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int64Value.Marshal(b, m, deterministic) +} +func (dst *Int64Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64Value.Merge(dst, src) +} +func (m *Int64Value) XXX_Size() int { + return xxx_messageInfo_Int64Value.Size(m) +} +func (m *Int64Value) XXX_DiscardUnknown() { + xxx_messageInfo_Int64Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Int64Value proto.InternalMessageInfo func (m *Int64Value) GetValue() int64 { if m != nil { @@ -103,14 +152,36 @@ func (m *Int64Value) GetValue() int64 { // The JSON representation for `UInt64Value` is JSON string. type UInt64Value struct { // The uint64 value. - Value uint64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value uint64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UInt64Value) Reset() { *m = UInt64Value{} } -func (m *UInt64Value) String() string { return proto.CompactTextString(m) } -func (*UInt64Value) ProtoMessage() {} -func (*UInt64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } -func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" } +func (m *UInt64Value) Reset() { *m = UInt64Value{} } +func (m *UInt64Value) String() string { return proto.CompactTextString(m) } +func (*UInt64Value) ProtoMessage() {} +func (*UInt64Value) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{3} +} +func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" } +func (m *UInt64Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt64Value.Unmarshal(m, b) +} +func (m *UInt64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt64Value.Marshal(b, m, deterministic) +} +func (dst *UInt64Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt64Value.Merge(dst, src) +} +func (m *UInt64Value) XXX_Size() int { + return xxx_messageInfo_UInt64Value.Size(m) +} +func (m *UInt64Value) XXX_DiscardUnknown() { + xxx_messageInfo_UInt64Value.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt64Value proto.InternalMessageInfo func (m *UInt64Value) GetValue() uint64 { if m != nil { @@ -124,14 +195,36 @@ func (m *UInt64Value) GetValue() uint64 { // The JSON representation for `Int32Value` is JSON number. type Int32Value struct { // The int32 value. - Value int32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value int32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Int32Value) Reset() { *m = Int32Value{} } -func (m *Int32Value) String() string { return proto.CompactTextString(m) } -func (*Int32Value) ProtoMessage() {} -func (*Int32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } -func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" } +func (m *Int32Value) Reset() { *m = Int32Value{} } +func (m *Int32Value) String() string { return proto.CompactTextString(m) } +func (*Int32Value) ProtoMessage() {} +func (*Int32Value) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{4} +} +func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" } +func (m *Int32Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int32Value.Unmarshal(m, b) +} +func (m *Int32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int32Value.Marshal(b, m, deterministic) +} +func (dst *Int32Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int32Value.Merge(dst, src) +} +func (m *Int32Value) XXX_Size() int { + return xxx_messageInfo_Int32Value.Size(m) +} +func (m *Int32Value) XXX_DiscardUnknown() { + xxx_messageInfo_Int32Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Int32Value proto.InternalMessageInfo func (m *Int32Value) GetValue() int32 { if m != nil { @@ -145,14 +238,36 @@ func (m *Int32Value) GetValue() int32 { // The JSON representation for `UInt32Value` is JSON number. type UInt32Value struct { // The uint32 value. - Value uint32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value uint32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UInt32Value) Reset() { *m = UInt32Value{} } -func (m *UInt32Value) String() string { return proto.CompactTextString(m) } -func (*UInt32Value) ProtoMessage() {} -func (*UInt32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } -func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" } +func (m *UInt32Value) Reset() { *m = UInt32Value{} } +func (m *UInt32Value) String() string { return proto.CompactTextString(m) } +func (*UInt32Value) ProtoMessage() {} +func (*UInt32Value) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{5} +} +func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" } +func (m *UInt32Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt32Value.Unmarshal(m, b) +} +func (m *UInt32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt32Value.Marshal(b, m, deterministic) +} +func (dst *UInt32Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt32Value.Merge(dst, src) +} +func (m *UInt32Value) XXX_Size() int { + return xxx_messageInfo_UInt32Value.Size(m) +} +func (m *UInt32Value) XXX_DiscardUnknown() { + xxx_messageInfo_UInt32Value.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt32Value proto.InternalMessageInfo func (m *UInt32Value) GetValue() uint32 { if m != nil { @@ -166,14 +281,36 @@ func (m *UInt32Value) GetValue() uint32 { // The JSON representation for `BoolValue` is JSON `true` and `false`. type BoolValue struct { // The bool value. - Value bool `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value bool `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BoolValue) Reset() { *m = BoolValue{} } -func (m *BoolValue) String() string { return proto.CompactTextString(m) } -func (*BoolValue) ProtoMessage() {} -func (*BoolValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } -func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" } +func (m *BoolValue) Reset() { *m = BoolValue{} } +func (m *BoolValue) String() string { return proto.CompactTextString(m) } +func (*BoolValue) ProtoMessage() {} +func (*BoolValue) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{6} +} +func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" } +func (m *BoolValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoolValue.Unmarshal(m, b) +} +func (m *BoolValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoolValue.Marshal(b, m, deterministic) +} +func (dst *BoolValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoolValue.Merge(dst, src) +} +func (m *BoolValue) XXX_Size() int { + return xxx_messageInfo_BoolValue.Size(m) +} +func (m *BoolValue) XXX_DiscardUnknown() { + xxx_messageInfo_BoolValue.DiscardUnknown(m) +} + +var xxx_messageInfo_BoolValue proto.InternalMessageInfo func (m *BoolValue) GetValue() bool { if m != nil { @@ -187,14 +324,36 @@ func (m *BoolValue) GetValue() bool { // The JSON representation for `StringValue` is JSON string. type StringValue struct { // The string value. - Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *StringValue) Reset() { *m = StringValue{} } -func (m *StringValue) String() string { return proto.CompactTextString(m) } -func (*StringValue) ProtoMessage() {} -func (*StringValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } -func (*StringValue) XXX_WellKnownType() string { return "StringValue" } +func (m *StringValue) Reset() { *m = StringValue{} } +func (m *StringValue) String() string { return proto.CompactTextString(m) } +func (*StringValue) ProtoMessage() {} +func (*StringValue) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{7} +} +func (*StringValue) XXX_WellKnownType() string { return "StringValue" } +func (m *StringValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringValue.Unmarshal(m, b) +} +func (m *StringValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringValue.Marshal(b, m, deterministic) +} +func (dst *StringValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringValue.Merge(dst, src) +} +func (m *StringValue) XXX_Size() int { + return xxx_messageInfo_StringValue.Size(m) +} +func (m *StringValue) XXX_DiscardUnknown() { + xxx_messageInfo_StringValue.DiscardUnknown(m) +} + +var xxx_messageInfo_StringValue proto.InternalMessageInfo func (m *StringValue) GetValue() string { if m != nil { @@ -208,14 +367,36 @@ func (m *StringValue) GetValue() string { // The JSON representation for `BytesValue` is JSON string. type BytesValue struct { // The bytes value. - Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BytesValue) Reset() { *m = BytesValue{} } -func (m *BytesValue) String() string { return proto.CompactTextString(m) } -func (*BytesValue) ProtoMessage() {} -func (*BytesValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } -func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" } +func (m *BytesValue) Reset() { *m = BytesValue{} } +func (m *BytesValue) String() string { return proto.CompactTextString(m) } +func (*BytesValue) ProtoMessage() {} +func (*BytesValue) Descriptor() ([]byte, []int) { + return fileDescriptor_wrappers_16c7c35c009f3253, []int{8} +} +func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" } +func (m *BytesValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BytesValue.Unmarshal(m, b) +} +func (m *BytesValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BytesValue.Marshal(b, m, deterministic) +} +func (dst *BytesValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_BytesValue.Merge(dst, src) +} +func (m *BytesValue) XXX_Size() int { + return xxx_messageInfo_BytesValue.Size(m) +} +func (m *BytesValue) XXX_DiscardUnknown() { + xxx_messageInfo_BytesValue.DiscardUnknown(m) +} + +var xxx_messageInfo_BytesValue proto.InternalMessageInfo func (m *BytesValue) GetValue() []byte { if m != nil { @@ -237,26 +418,26 @@ func init() { } func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/wrappers/wrappers.proto", fileDescriptor0) + proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor_wrappers_16c7c35c009f3253) } -var fileDescriptor0 = []byte{ - // 257 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2f, 0x2f, - 0x4a, 0x2c, 0x28, 0x48, 0x2d, 0x42, 0x30, 0xf4, 0xc0, 0x2a, 0x84, 0xf8, 0xd3, 0xf3, 0xf3, 0xd3, - 0x73, 0x52, 0xf5, 0x60, 0xea, 0x95, 0x94, 0xb9, 0xb8, 0x5d, 0xf2, 0x4b, 0x93, 0x72, 0x52, 0xc3, - 0x12, 0x73, 0x4a, 0x53, 0x85, 0x44, 0xb8, 0x58, 0xcb, 0x40, 0x0c, 0x09, 0x46, 0x05, 0x46, 0x0d, - 0xc6, 0x20, 0x08, 0x47, 0x49, 0x89, 0x8b, 0xcb, 0x2d, 0x27, 0x3f, 0xb1, 0x04, 0x8b, 0x1a, 0x26, - 0x24, 0x35, 0x9e, 0x79, 0x25, 0x66, 0x26, 0x58, 0xd4, 0x30, 0xc3, 0xd4, 0x28, 0x73, 0x71, 0x87, - 0xe2, 0x52, 0xc4, 0x82, 0x6a, 0x90, 0xb1, 0x11, 0x16, 0x35, 0xac, 0x68, 0x06, 0x61, 0x55, 0xc4, - 0x0b, 0x53, 0xa4, 0xc8, 0xc5, 0xe9, 0x94, 0x9f, 0x9f, 0x83, 0x45, 0x09, 0x07, 0x92, 0x39, 0xc1, - 0x25, 0x45, 0x99, 0x79, 0xe9, 0x58, 0x14, 0x71, 0x22, 0x39, 0xc8, 0xa9, 0xb2, 0x24, 0xb5, 0x18, - 0x8b, 0x1a, 0x1e, 0xa8, 0x1a, 0xa7, 0x1a, 0x2e, 0xe1, 0xe4, 0xfc, 0x5c, 0x3d, 0xb4, 0xd0, 0x75, - 0xe2, 0x0d, 0x87, 0x06, 0x7f, 0x00, 0x48, 0x24, 0x80, 0x31, 0x4a, 0x8b, 0xf8, 0xa8, 0xfb, 0xc1, - 0xc8, 0xb8, 0x88, 0x89, 0xd9, 0x3d, 0xc0, 0x69, 0x15, 0x93, 0x9c, 0x3b, 0xc4, 0xdc, 0x00, 0xa8, - 0x52, 0xbd, 0xf0, 0xd4, 0x9c, 0x1c, 0xef, 0xbc, 0xfc, 0xf2, 0xbc, 0x10, 0x90, 0x96, 0x24, 0x36, - 0xb0, 0x19, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xee, 0x36, 0x8d, 0xd8, 0x19, 0x02, 0x00, - 0x00, +var fileDescriptor_wrappers_16c7c35c009f3253 = []byte{ + // 259 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c, + 0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca, + 0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c, + 0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5, + 0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13, + 0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8, + 0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca, + 0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a, + 0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x0d, + 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x5a, 0xe8, 0x3a, 0xf1, 0x86, 0x43, 0x83, 0x3f, 0x00, 0x24, + 0x12, 0xc0, 0x18, 0xa5, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x9f, + 0x9e, 0x9f, 0x93, 0x98, 0x97, 0x8e, 0x88, 0xaa, 0x82, 0x92, 0xca, 0x82, 0xd4, 0x62, 0x78, 0x8c, + 0xfd, 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, + 0x00, 0x54, 0xa9, 0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, + 0x12, 0x1b, 0xd8, 0x0c, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x19, 0x6c, 0xb9, 0xb8, 0xfe, + 0x01, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/regenerate.sh b/vendor/github.com/golang/protobuf/regenerate.sh new file mode 100755 index 00000000..dc7e2d1f --- /dev/null +++ b/vendor/github.com/golang/protobuf/regenerate.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +set -e + +# Install the working tree's protoc-gen-gen in a tempdir. +tmpdir=$(mktemp -d -t regen-wkt.XXXXXX) +trap 'rm -rf $tmpdir' EXIT +mkdir -p $tmpdir/bin +PATH=$tmpdir/bin:$PATH +GOBIN=$tmpdir/bin go install ./protoc-gen-go + +# Public imports require at least Go 1.9. +supportTypeAliases="" +if go list -f '{{context.ReleaseTags}}' runtime | grep -q go1.9; then + supportTypeAliases=1 +fi + +# Generate various test protos. +PROTO_DIRS=( + conformance/internal/conformance_proto + jsonpb/jsonpb_test_proto + proto + protoc-gen-go/testdata +) +for dir in ${PROTO_DIRS[@]}; do + for p in `find $dir -name "*.proto"`; do + if [[ $p == */import_public/* && ! $supportTypeAliases ]]; then + echo "# $p (skipped)" + continue; + fi + echo "# $p" + protoc -I$dir --go_out=plugins=grpc,paths=source_relative:$dir $p + done +done + +# Deriving the location of the source protos from the path to the +# protoc binary may be a bit odd, but this is what protoc itself does. +PROTO_INCLUDE=$(dirname $(dirname $(which protoc)))/include + +# Well-known types. +WKT_PROTOS=(any duration empty struct timestamp wrappers) +for p in ${WKT_PROTOS[@]}; do + echo "# google/protobuf/$p.proto" + protoc --go_out=paths=source_relative:$tmpdir google/protobuf/$p.proto + cp $tmpdir/google/protobuf/$p.pb.go ptypes/$p + cp $PROTO_INCLUDE/google/protobuf/$p.proto ptypes/$p +done + +# descriptor.proto. +echo "# google/protobuf/descriptor.proto" +protoc --go_out=paths=source_relative:$tmpdir google/protobuf/descriptor.proto +cp $tmpdir/google/protobuf/descriptor.pb.go protoc-gen-go/descriptor +cp $PROTO_INCLUDE/google/protobuf/descriptor.proto protoc-gen-go/descriptor diff --git a/vendor/github.com/grafeas/grafeas/.circleci/config.yml b/vendor/github.com/grafeas/grafeas/.circleci/config.yml index 351a9687..9ff381a0 100644 --- a/vendor/github.com/grafeas/grafeas/.circleci/config.yml +++ b/vendor/github.com/grafeas/grafeas/.circleci/config.yml @@ -6,12 +6,15 @@ jobs: build: docker: # specify the version - - image: circleci/golang:1.8 + - image: circleci/golang:1.9 # Specify service dependencies here if necessary # CircleCI maintains a library of pre-built images # documented at https://circleci.com/docs/2.0/circleci-images/ - # - image: circleci/postgres:9.4 - + - image: postgres:9.6 + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_DB: password #### TEMPLATE_NOTE: go expects specific checkout path representing url #### expecting it in the form of #### /go/src/github.com/circleci/go-tool diff --git a/vendor/github.com/grafeas/grafeas/.gitignore b/vendor/github.com/grafeas/grafeas/.gitignore index f5c37d65..5e78b285 100644 --- a/vendor/github.com/grafeas/grafeas/.gitignore +++ b/vendor/github.com/grafeas/grafeas/.gitignore @@ -1 +1,2 @@ +.install.grpc-gateway .install.protoc-gen-go diff --git a/vendor/github.com/grafeas/grafeas/Makefile b/vendor/github.com/grafeas/grafeas/Makefile index 09051f4d..cbfff3e7 100644 --- a/vendor/github.com/grafeas/grafeas/Makefile +++ b/vendor/github.com/grafeas/grafeas/Makefile @@ -30,11 +30,12 @@ vet: v1alpha1/proto/grafeas.pb.go: .install.protoc-gen-go .install.grpc-gateway v1alpha1/proto/grafeas.proto protoc \ -I ./ \ + -I ./include \ -I vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ -I vendor/github.com/googleapis/googleapis \ --go_out=plugins=grpc:. \ --grpc-gateway_out=logtostderr=true:. \ - --swagger_out=logtostderr=true:. \ + --swagger_out=logtostderr=true:. \ v1alpha1/proto/grafeas.proto diff --git a/vendor/github.com/grafeas/grafeas/README.md b/vendor/github.com/grafeas/grafeas/README.md index 42cd2bca..94eb72f3 100644 --- a/vendor/github.com/grafeas/grafeas/README.md +++ b/vendor/github.com/grafeas/grafeas/README.md @@ -3,7 +3,7 @@ Grafeas defines metadata API spec for computing components (e.g., VM images, con ## Running grafeas -To run your own Grafeas instance just follow the [instructions](docs/running_greafeas.md). +To run your own Grafeas instance just follow the [instructions](docs/running_grafeas.md). ## Definition of terms **Notes**: A note is an item or condition that can be found via an analysis or something that is used multiple times in a process. For example, a CVE could be the result of a vulnerability analysis of a Linux package. In a build process, we would store information about our builder in a note. @@ -38,7 +38,7 @@ Specifying a kind in our notes and occurrences makes Grafeas extensible. As new ## Examples A vulnerability scanning provider would create a note under their project with the following json for CVE-2017-14159 -``` +```json { "name": "projects/security-scanner/notes/CVE-2017-14159", "shortDescription": "CVE-2017-14159", @@ -163,7 +163,7 @@ A vulnerability scanning provider would create a note under their project with t On scanning and coming across this vulnerability, a security scanning provider would create the following in their customer’s project: -``` +```json { "name": "projects/scanning-customer/occurrences/randomId1234", "resourceUrl": "https://gcr.io/scanning-customer/dockerimage@sha256:hash", @@ -200,6 +200,7 @@ On scanning and coming across this vulnerability, a security scanning provider w ``` ## Resource Urls + Component resource Urls need to be unique per resource as well as immutable. This will mean that the metadata associated with a resourceUrl will always be associated with exactly one component, and what is pointed at should never change. Content addressable resource urls are preferred. In the case with resources that cannot be immutable, a timestamp should be appended. The following table provides examples one could use as resource urls for several component types: @@ -214,3 +215,20 @@ Component Type|Identifier |Example| |NuGet |nuget://module:version |nuget://log4net:9.0.1| |Python |pip://package:version |pip://raven:5.13.0| |RPM |rpm://dist(optional):arch:name:version |rpm://el6:i386:ImageMagick:6.7.2.7-4| + + +## Protobuf API + +The authoritative API for grafeas is the protobuf files. +[https://github.com/Grafeas/Grafeas/tree/master/v1alpha1/proto](https://github.com/Grafeas/Grafeas/tree/master/v1alpha1/proto) +We're currently working from master, and have a versioned path as well. +These paths will ideally make their way to "v1beta" and then "v1", once vetted. + + +## Golang API + +[Documentation of `github.com/Grafeas/Grafeas/v1alpha1/proto`](https://godoc.org/github.com/Grafeas/Grafeas/v1alpha1/proto) is the golang package for the Protobuf API. + +## Swagger API + +To provide a JSON bridge to the Protobuf API, there is now a swagger/OpenAPI representation generated [here](https://raw.githubusercontent.com/Grafeas/Grafeas/master/v1alpha1/proto/grafeas.swagger.json). diff --git a/vendor/github.com/grafeas/grafeas/config.yaml.sample b/vendor/github.com/grafeas/grafeas/config.yaml.sample index 1fbd7f2c..1ef7f2b2 100644 --- a/vendor/github.com/grafeas/grafeas/config.yaml.sample +++ b/vendor/github.com/grafeas/grafeas/config.yaml.sample @@ -13,10 +13,33 @@ # limitations under the License. grafeas: - server: - # Endpoint address, e.g. localhost:10000 - address: localhost:10000 + # Grafeas api server config + api: + # Endpoint address + address: "0.0.0.0:8080" # PKI configuration (optional) cafile: keyfile: certfile: + # CORS configuration (optional) + cors_allowed_origins: + # - "http://example.net" + # Supported storage types are "memstore" and "postgres" + storage_type: "memstore" + # Postgres options + postgres: + # Database host + host: "127.0.0.1:5432" + # Dabase name + dbname: "postgres" + # Database username + user: "postgres" + # Database password + password: "password" + # Valid sslmodes disable, allow, prefer, require, verify-ca, verify-full. + # See https://www.postgresql.org/docs/current/static/libpq-connect.html for details + sslmode: "require" + # 32-bit URL-safe base64 key used to encrypt pagination tokens + # If one is not provided, it will be generated. + # Multiple grafeas instances in the same cluster need the same value. + paginationkey: diff --git a/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md b/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md index f4a05c70..0dc604f0 100644 --- a/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md +++ b/vendor/github.com/grafeas/grafeas/docs/running_grafeas.md @@ -6,13 +6,13 @@ To start the server go to `samples/server/go-server/api/server/main` and execute go run main.go -This will start the Grafeas gRPC and REST API:s on `localhost:10000`. To start grafeas with a custom configuration use the `-config` flag (e.g. `-config config.yaml`). The root directory includes a `config.yaml.sample` that can be used as a starting point when creating your own config file. +This will start the Grafeas gRPC and REST API:s on `localhost:8080`. To start grafeas with a custom configuration use the `-config` flag (e.g. `-config config.yaml`). The root directory includes a `config.yaml.sample` that can be used as a starting point when creating your own config file. ### Access REST API with curl Grafeas provides both a REST API and a gRPC API. Here is an example of using the REST API to list projects in Grafeas. -`curl http://localhost:10000/v1alpha1/projects` +`curl http://localhost:8080/v1alpha1/projects` ### Access gRPC API with a go client @@ -30,7 +30,7 @@ import ( ) func main() { - conn, err := grpc.Dial("localhost:10000", grpc.WithInsecure()) + conn, err := grpc.Dial("localhost:8080", grpc.WithInsecure()) defer conn.Close() client := pb.NewGrafeasClient(conn) // List notes @@ -90,7 +90,7 @@ Add the following to your config file When using curl with a self signed certificate you need to add `-k/--insecure` and specify the client certificate. -`curl -k --cert path/to/client.pem https://localhost:10000/v1alpha1/projects` +`curl -k --cert path/to/client.pem https://localhost:8080/v1alpha1/projects` ### Access gRPC with a go client @@ -138,7 +138,7 @@ func main() { } tlsConfig.BuildNameToCertificate() creds := credentials.NewTLS(tlsConfig) - conn, err := grpc.Dial("localhost:10000", grpc.WithTransportCredentials(creds)) + conn, err := grpc.Dial("localhost:8080", grpc.WithTransportCredentials(creds)) client := pb.NewGrafeasClient(conn) // List notes @@ -156,4 +156,14 @@ func main() { log.Println("Project does not contain any notes") } } -``` \ No newline at end of file +``` + +## Enable CORS on the sample server. + +### Update config + +Add the following to your config file below the `api` key. + + cors_allowed_origins: + - "https://some.example.tld" + - "https://*.example.net" diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md index 3470cf68..24945820 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/README.md @@ -1,11 +1,10 @@ # Grafeas API Reference Implementation -This is a reference implementation of the [Grafeas API Spec](https://github.com/Grafeas/Grafeas/blob/master/README) +This is a reference implementation of the [Grafeas API Spec](https://github.com/grafeas/grafeas/blob/master/README.md) ## Overview This reference implementation comes with the following caveats: -* Storage: map backed in memory server storage * No ACLs are used in this implementation * No authorization is in place #28 * Filtering in list methods is not currently supported #29 diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/api/api.go similarity index 83% rename from vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go rename to vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/api/api.go index 9ed4b23b..1ad015ba 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/server/server.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/api/api.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package server +package api import ( "context" @@ -25,29 +25,31 @@ import ( "strings" "github.com/cockroachdb/cmux" - "github.com/grafeas/grafeas/samples/server/go-server/api/server/storage" "github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1" + server "github.com/grafeas/grafeas/server-go" pb "github.com/grafeas/grafeas/v1alpha1/proto" "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/rs/cors" opspb "google.golang.org/genproto/googleapis/longrunning" "google.golang.org/grpc" "google.golang.org/grpc/credentials" ) type Config struct { - Address string `yaml:"address"` // Endpoint address, e.g. localhost:10000 - CertFile string `yaml:"certfile"` // A PEM eoncoded certificate file - KeyFile string `yaml:"keyfile"` // A PEM encoded private key file - CAFile string `yaml:"cafile"` // A PEM eoncoded CA's certificate file + Address string `yaml:"address"` // Endpoint address, e.g. localhost:8080 + CertFile string `yaml:"certfile"` // A PEM eoncoded certificate file + KeyFile string `yaml:"keyfile"` // A PEM encoded private key file + CAFile string `yaml:"cafile"` // A PEM eoncoded CA's certificate file + CORSAllowedOrigins []string `yaml:"cors_allowed_origins"` // Permitted CORS origins. } // Run initializes grpc and grpc gateway api services on the same address -func Run(config *Config) { +func Run(config *Config, storage *server.Storager) { l, err := net.Listen("tcp", config.Address) if err != nil { log.Fatalln("could not listen to address", config.Address) } - log.Println("starting grpc server") + log.Printf("starting grpc server on %s", config.Address) var ( apiHandler http.Handler @@ -74,7 +76,7 @@ func Run(config *Config) { apiListener = tls.NewListener(tcpMux.Match(cmux.Any()), tlsConfig) go func() { handleShutdown(tcpMux.Serve()) }() - grpcServer := newGrpcServer(tlsConfig) + grpcServer := newGrpcServer(tlsConfig, storage) gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), tlsConfig) httpMux.Handle("/", gwmux) @@ -86,7 +88,7 @@ func Run(config *Config) { apiListener = tcpMux.Match(cmux.Any()) go func() { handleShutdown(tcpMux.Serve()) }() - grpcServer := newGrpcServer(nil) + grpcServer := newGrpcServer(nil, storage) go func() { handleShutdown(grpcServer.Serve(grpcL)) }() gwmux := newGrpcGatewayServer(ctx, apiListener.Addr().String(), nil) @@ -97,8 +99,14 @@ func Run(config *Config) { log.Println("grpc server is configured without client certificate authentication") } + // Setup the CORS middleware. If `config.CORSAllowedOrigins` is empty, no CORS + // Origins will be allowed through. + cors := cors.New(cors.Options{ + AllowedOrigins: config.CORSAllowedOrigins, + }) + srv = &http.Server{ - Handler: apiHandler, + Handler: cors.Handler(apiHandler), TLSConfig: tlsConfig, } @@ -116,7 +124,7 @@ func handleShutdown(err error) { } } -func newGrpcServer(tlsConfig *tls.Config) *grpc.Server { +func newGrpcServer(tlsConfig *tls.Config, storage *server.Storager) *grpc.Server { grpcOpts := []grpc.ServerOption{} if tlsConfig != nil { @@ -124,7 +132,7 @@ func newGrpcServer(tlsConfig *tls.Config) *grpc.Server { } grpcServer := grpc.NewServer(grpcOpts...) - g := v1alpha1.Grafeas{S: storage.NewMemStore()} + g := v1alpha1.Grafeas{S: *storage} pb.RegisterGrafeasServer(grpcServer, &g) pb.RegisterGrafeasProjectsServer(grpcServer, &g) opspb.RegisterOperationsServer(grpcServer, &g) diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go index 8013ad5a..8c0deaab 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/config/config.go @@ -15,9 +15,13 @@ package config import ( + "errors" "io/ioutil" + "log" - "github.com/grafeas/grafeas/samples/server/go-server/api/server/server" + fernet "github.com/fernet/fernet-go" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/api" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/storage" "gopkg.in/yaml.v2" ) @@ -28,18 +32,22 @@ type file struct { // Config is the global configuration for an instance of Grafeas. type config struct { - Server *server.Config `yaml:"server"` + API *api.Config `yaml:"api"` + StorageType string `yaml:"storage_type"` // Supported storage types are "memstore" and "postgres" + PgSQLConfig *storage.PgSQLConfig `yaml:"postgres"` } // DefaultConfig is a configuration that can be used as a fallback value. func defaultConfig() *config { return &config{ - &server.Config{ - Address: "localhost:10000", + API: &api.Config{ + Address: "localhost:8080", CertFile: "", KeyFile: "", CAFile: "", }, + StorageType: "memstore", + PgSQLConfig: &storage.PgSQLConfig{}, } } @@ -58,5 +66,24 @@ func LoadConfig(fileName string) (*config, error) { if err != nil { return nil, err } - return configFile.Grafeas, nil + config := configFile.Grafeas + + if config.StorageType == "postgres" { + // Generate a pagination key if none is provided. + if config.PgSQLConfig.PaginationKey == "" { + log.Println("pagination key is empty, generating...") + var key fernet.Key + if err = key.Generate(); err != nil { + return nil, err + } + config.PgSQLConfig.PaginationKey = key.Encode() + } else { + _, err = fernet.DecodeKey(config.PgSQLConfig.PaginationKey) + if err != nil { + err = errors.New("Invalid Pagination key; must be 32-bit URL-safe base64") + return nil, err + } + } + } + return config, nil } diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go index 923f1054..a7f42832 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/main/main.go @@ -18,8 +18,10 @@ import ( "flag" "log" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/api" "github.com/grafeas/grafeas/samples/server/go-server/api/server/config" - "github.com/grafeas/grafeas/samples/server/go-server/api/server/server" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/storage" + server "github.com/grafeas/grafeas/server-go" ) var ( @@ -30,7 +32,21 @@ func main() { flag.Parse() config, err := config.LoadConfig(*configFile) if err != nil { - log.Fatalf("Failed to load config file") + log.Fatalf("Failed to load config file: %s", err) } - server.Run(config.Server) + storage := createStorage(config.StorageType, config.PgSQLConfig) + api.Run(config.API, &storage) +} + +func createStorage(storageType string, pgSQLConfig *storage.PgSQLConfig) server.Storager { + switch storageType { + case "memstore": + return storage.NewMemStore() + case "postgres": + return storage.NewPgSQLStore(pgSQLConfig) + default: + log.Fatalf("Storage type unsupported: %s", storageType) + } + + return nil } diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go index ed6db860..01b25ba0 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore.go @@ -16,6 +16,8 @@ package storage import ( "fmt" + "sort" + "strconv" "strings" "sync" @@ -78,8 +80,9 @@ func (m *memStore) GetProject(pID string) (*pb.Project, error) { return &pb.Project{Name: name.FormatProject(pID)}, nil } -// ListProjects returns the project id for all projects from the mem store -func (m *memStore) ListProjects(filters string) []*pb.Project { +// ListProjects returns up to pageSize number of projects beginning at pageToken (or from +// start if pageToken is the emtpy string). +func (m *memStore) ListProjects(filter string, pageSize int, pageToken string) ([]*pb.Project, string, error) { m.RLock() defer m.RUnlock() projects := make([]*pb.Project, len(m.projects)) @@ -88,7 +91,12 @@ func (m *memStore) ListProjects(filters string) []*pb.Project { projects[i] = &pb.Project{Name: name.FormatProject(k)} i++ } - return projects + sort.Slice(projects, func(i, j int) bool { + return projects[i].Name < projects[j].Name + }) + startPos := parsePageToken(pageToken, 0) + endPos := min(startPos+pageSize, len(projects)) + return projects[startPos:endPos], strconv.Itoa(endPos), nil } // CreateOccurrence adds the specified occurrence to the mem store @@ -138,8 +146,9 @@ func (m *memStore) GetOccurrence(pID, oID string) (*pb.Occurrence, error) { return o, nil } -// ListOccurrences returns the occurrences for this project ID (pID) -func (m *memStore) ListOccurrences(pID, filters string) []*pb.Occurrence { +// ListOccurrences returns up to pageSize number of occurrences for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (m *memStore) ListOccurrences(pID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) { os := []*pb.Occurrence{} m.RLock() defer m.RUnlock() @@ -148,7 +157,12 @@ func (m *memStore) ListOccurrences(pID, filters string) []*pb.Occurrence { os = append(os, o) } } - return os + sort.Slice(os, func(i, j int) bool { + return os[i].Name < os[j].Name + }) + startPos := parsePageToken(pageToken, 0) + endPos := min(startPos+pageSize, len(os)) + return os[startPos:endPos], strconv.Itoa(endPos), nil } // CreateNote adds the specified note to the mem store @@ -214,8 +228,9 @@ func (m *memStore) GetNoteByOccurrence(pID, oID string) (*pb.Note, error) { return n, nil } -// ListNotes returns the notes for for this project (pID) -func (m *memStore) ListNotes(pID, filters string) []*pb.Note { +// ListNotes returns up to pageSize number of notes for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (m *memStore) ListNotes(pID, filters string, pageSize int, pageToken string) ([]*pb.Note, string, error) { ns := []*pb.Note{} m.RLock() defer m.RUnlock() @@ -224,17 +239,23 @@ func (m *memStore) ListNotes(pID, filters string) []*pb.Note { ns = append(ns, n) } } - return ns + sort.Slice(ns, func(i, j int) bool { + return ns[i].Name < ns[j].Name + }) + startPos := parsePageToken(pageToken, 0) + endPos := min(startPos+pageSize, len(ns)) + return ns[startPos:endPos], strconv.Itoa(endPos), nil } -// ListNoteOccurrences returns the occcurrences on the particular note (nID) for this project (pID) -func (m *memStore) ListNoteOccurrences(pID, nID, filters string) ([]*pb.Occurrence, error) { +// ListNoteOccurrences returns up to pageSize number of occcurrences on the particular note (nID) +// for this project (pID) projects beginning at pageToken (or from start if pageToken is the emtpy string). +func (m *memStore) ListNoteOccurrences(pID, nID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) { // TODO: use filters m.RLock() defer m.RUnlock() // Verify that note exists if _, err := m.GetNote(pID, nID); err != nil { - return nil, err + return nil, "", err } nName := name.FormatNote(pID, nID) os := []*pb.Occurrence{} @@ -243,7 +264,12 @@ func (m *memStore) ListNoteOccurrences(pID, nID, filters string) ([]*pb.Occurren os = append(os, o) } } - return os, nil + sort.Slice(os, func(i, j int) bool { + return os[i].Name < os[j].Name + }) + startPos := parsePageToken(pageToken, 0) + endPos := min(startPos+pageSize, len(os)) + return os[startPos:endPos], strconv.Itoa(endPos), nil } // GetOperation returns the operation with pID and oID @@ -293,8 +319,9 @@ func (m *memStore) UpdateOperation(pID, opID string, op *opspb.Operation) error return nil } -// ListOperations returns the operations for this project (pID) -func (m *memStore) ListOperations(pID, filters string) []*opspb.Operation { +// ListOperations returns up to pageSize number of operations for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (m *memStore) ListOperations(pID, filters string, pageSize int, pageToken string) ([]*opspb.Operation, string, error) { ops := []*opspb.Operation{} m.RLock() defer m.RUnlock() @@ -303,5 +330,31 @@ func (m *memStore) ListOperations(pID, filters string) []*opspb.Operation { ops = append(ops, op) } } - return ops + sort.Slice(ops, func(i, j int) bool { + return ops[i].Name < ops[j].Name + }) + startPos := parsePageToken(pageToken, 0) + endPos := min(startPos+pageSize, len(ops)) + return ops[startPos:endPos], strconv.Itoa(endPos), nil +} + +// Parses the page token to an int. Returns defaultValue if parsing fails +func parsePageToken(pageToken string, defaultValue int) int { + if pageToken == "" { + return defaultValue + } + parsed, err := strconv.Atoi(pageToken) + if err != nil { + return defaultValue + } + return parsed +} + +// Returns the smallest of a and b +func min(a, b int) int { + if a < b { + return a + } else { + return b + } } diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go index 8f84262f..0c2d135b 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/memstore_test.go @@ -15,554 +15,14 @@ package storage import ( - "fmt" - "sort" + server "github.com/grafeas/grafeas/server-go" - "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" - "github.com/grafeas/grafeas/samples/server/go-server/api/server/testing" - - "reflect" - "strings" "testing" - - pb "github.com/grafeas/grafeas/v1alpha1/proto" - opspb "google.golang.org/genproto/googleapis/longrunning" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) -func TestCreateProject(t *testing.T) { - s := NewMemStore() - p := "myproject" - if err := s.CreateProject(p); err != nil { - t.Errorf("CreateProject got %v want success", err) - } - // Try to insert the same project twice, expect failure. - if err := s.CreateProject(p); err == nil { - t.Errorf("CreateProject got success, want Error") - } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { - t.Errorf("CreateProject got code %v want %v", s.Code(), codes.AlreadyExists) - } -} - -func TestCreateNote(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Errorf("CreateNote got %v want success", err) - } - // Try to insert the same note twice, expect failure. - if err := s.CreateNote(n); err == nil { - t.Errorf("CreateNote got success, want Error") - } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { - t.Errorf("CreateNote got code %v want %v", s.Code(), codes.AlreadyExists) - } -} - -func TestCreateOccurrence(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - oPID := "occurrence-project" - o := testutil.Occurrence(oPID, n.Name) - if err := s.CreateOccurrence(o); err != nil { - t.Errorf("CreateOccurrence got %v want success", err) - } - // Try to insert the same occurrence twice, expect failure. - if err := s.CreateOccurrence(o); err == nil { - t.Errorf("CreateOccurrence got success, want Error") - } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { - t.Errorf("CreateOccurrence got code %v want %v", s.Code(), codes.AlreadyExists) - } - pID, oID, err := name.ParseOccurrence(o.Name) - if err != nil { - t.Fatalf("Error parsing projectID and occurrenceID %v", err) - } - if got, err := s.GetOccurrence(pID, oID); err != nil { - t.Fatalf("GetOccurrence got %v, want success", err) - } else if !reflect.DeepEqual(got, o) { - t.Errorf("GetOccurrence got %v, want %v", got, o) - } -} - -func TestCreateOperation(t *testing.T) { - s := NewMemStore() - opPID := "vulnerability-scanner-a" - op := testutil.Operation(opPID) - if err := s.CreateOperation(op); err != nil { - t.Errorf("CreateOperation got %v want success", err) - } - // Try to insert the same note twice, expect failure. - if err := s.CreateOperation(op); err == nil { - t.Errorf("CreateOperation got success, want Error") - } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { - t.Errorf("CreateOperation got code %v want %v", s.Code(), codes.AlreadyExists) - } -} -func TestDeleteProject(t *testing.T) { - s := NewMemStore() - pID := "myproject" - // Delete before the note exists - if err := s.DeleteProject(pID); err == nil { - t.Error("Deleting nonexistant note got success, want error") - } - if err := s.CreateProject(pID); err != nil { - t.Fatalf("CreateProject got %v want success", err) - } - - if err := s.DeleteProject(pID); err != nil { - t.Errorf("DeleteProject got %v, want success ", err) - } -} - -func TestDeleteOccurrence(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - oPID := "occurrence-project" - o := testutil.Occurrence(oPID, n.Name) - // Delete before the occurrence exists - pID, oID, err := name.ParseOccurrence(o.Name) - if err != nil { - t.Fatalf("Error parsing occurrence %v", err) - } - if err := s.DeleteOccurrence(pID, oID); err == nil { - t.Error("Deleting nonexistant occurrence got success, want error") - } - if err := s.CreateOccurrence(o); err != nil { - t.Fatalf("CreateOccurrence got %v want success", err) - } - if err := s.DeleteOccurrence(pID, oID); err != nil { - t.Errorf("DeleteOccurrence got %v, want success ", err) - } -} - -func TestUpdateOccurrence(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - oPID := "occurrence-project" - o := testutil.Occurrence(oPID, n.Name) - pID, oID, err := name.ParseOccurrence(o.Name) - if err != nil { - t.Fatalf("Error parsing projectID and occurrenceID %v", err) - } - if err := s.UpdateOccurrence(pID, oID, o); err == nil { - t.Fatal("UpdateOccurrence got success want error") - } - if err := s.CreateOccurrence(o); err != nil { - t.Fatalf("CreateOccurrence got %v want success", err) - } - if got, err := s.GetOccurrence(pID, oID); err != nil { - t.Fatalf("GetOccurrence got %v, want success", err) - } else if !reflect.DeepEqual(got, o) { - t.Errorf("GetOccurrence got %v, want %v", got, o) - } - - o2 := o - o2.GetVulnerabilityDetails().CvssScore = 1.0 - if err := s.UpdateOccurrence(pID, oID, o2); err != nil { - t.Fatalf("UpdateOccurrence got %v want success", err) - } - - if got, err := s.GetOccurrence(pID, oID); err != nil { - t.Fatalf("GetOccurrence got %v, want success", err) - } else if !reflect.DeepEqual(got, o2) { - t.Errorf("GetOccurrence got %v, want %v", got, o2) - } -} - -func TestDeleteNote(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - // Delete before the note exists - pID, oID, err := name.ParseNote(n.Name) - if err != nil { - t.Fatalf("Error parsing note %v", err) - } - if err := s.DeleteNote(pID, oID); err == nil { - t.Error("Deleting nonexistant note got success, want error") - } - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - - if err := s.DeleteNote(pID, oID); err != nil { - t.Errorf("DeleteNote got %v, want success ", err) - } -} - -func TestUpdateNote(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - - pID, nID, err := name.ParseNote(n.Name) - if err != nil { - t.Fatalf("Error parsing projectID and noteID %v", err) - } - if err := s.UpdateNote(pID, nID, n); err == nil { - t.Fatal("UpdateNote got success want error") - } - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - if got, err := s.GetNote(pID, nID); err != nil { - t.Fatalf("GetNote got %v, want success", err) - } else if !reflect.DeepEqual(got, n) { - t.Errorf("GetNote got %v, want %v", got, n) - } - - n2 := n - n2.GetVulnerabilityType().CvssScore = 1.0 - if err := s.UpdateNote(pID, nID, n2); err != nil { - t.Fatalf("UpdateNote got %v want success", err) - } - - if got, err := s.GetNote(pID, nID); err != nil { - t.Fatalf("GetNote got %v, want success", err) - } else if !reflect.DeepEqual(got, n2) { - t.Errorf("GetNote got %v, want %v", got, n2) - } -} - -func TestGetProject(t *testing.T) { - s := NewMemStore() - pID := "myproject" - // Try to get project before it has been created, expect failure. - if _, err := s.GetProject(pID); err == nil { - t.Errorf("GetProject got success, want Error") - } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { - t.Errorf("GetProject got code %v want %v", s.Code(), codes.NotFound) - } - s.CreateProject(pID) - if p, err := s.GetProject(pID); err != nil { - t.Fatalf("GetProject got %v want success", err) - } else if p.Name != name.FormatProject(pID) { - t.Fatalf("Got %s want %s", p.Name, pID) - } -} - -func TestGetOccurrence(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - oPID := "occurrence-project" - o := testutil.Occurrence(oPID, n.Name) - pID, oID, err := name.ParseOccurrence(o.Name) - if err != nil { - t.Fatalf("Error parsing occurrence %v", err) - } - if _, err := s.GetOccurrence(pID, oID); err == nil { - t.Fatal("GetOccurrence got success, want error") - } - if err := s.CreateOccurrence(o); err != nil { - t.Errorf("CreateOccurrence got %v, want Success", err) - } - if got, err := s.GetOccurrence(pID, oID); err != nil { - t.Fatalf("GetOccurrence got %v, want success", err) - } else if !reflect.DeepEqual(got, o) { - t.Errorf("GetOccurrence got %v, want %v", got, o) - } -} - -func TestGetNote(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - - pID, nID, err := name.ParseNote(n.Name) - if err != nil { - t.Fatalf("Error parsing note %v", err) - } - if _, err := s.GetNote(pID, nID); err == nil { - t.Fatal("GetNote got success, want error") - } - if err := s.CreateNote(n); err != nil { - t.Errorf("CreateNote got %v, want Success", err) - } - if got, err := s.GetNote(pID, nID); err != nil { - t.Fatalf("GetNote got %v, want success", err) - } else if !reflect.DeepEqual(got, n) { - t.Errorf("GetNote got %v, want %v", got, n) - } -} - -func TestGetNoteByOccurrence(t *testing.T) { - s := NewMemStore() - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - oPID := "occurrence-project" - o := testutil.Occurrence(oPID, n.Name) - pID, oID, err := name.ParseOccurrence(o.Name) - if err != nil { - t.Fatalf("Error parsing occurrence %v", err) - } - if _, err := s.GetNoteByOccurrence(pID, oID); err == nil { - t.Fatal("GetNoteByOccurrence got success, want error") - } - if err := s.CreateOccurrence(o); err != nil { - t.Errorf("CreateOccurrence got %v, want Success", err) - } - if got, err := s.GetNoteByOccurrence(pID, oID); err != nil { - t.Fatalf("GetNoteByOccurrence got %v, want success", err) - } else if !reflect.DeepEqual(got, n) { - t.Errorf("GetNoteByOccurrence got %v, want %v", got, n) - } -} - -func TestGetOperation(t *testing.T) { - s := NewMemStore() - oPID := "vulnerability-scanner-a" - o := testutil.Operation(oPID) - - pID, oID, err := name.ParseOperation(o.Name) - if err != nil { - t.Fatalf("Error parsing operation %v", err) - } - if _, err := s.GetOperation(pID, oID); err == nil { - t.Fatal("GetOperation got success, want error") - } - if err := s.CreateOperation(o); err != nil { - t.Errorf("CreateOperation got %v, want Success", err) - } - if got, err := s.GetOperation(pID, oID); err != nil { - t.Fatalf("GetOperation got %v, want success", err) - } else if !reflect.DeepEqual(got, o) { - t.Errorf("GetOperation got %v, want %v", got, o) - } -} - -func TestDeleteOperation(t *testing.T) { - s := NewMemStore() - oPID := "vulnerability-scanner-a" - o := testutil.Operation(oPID) - // Delete before the operation exists - pID, oID, err := name.ParseOperation(o.Name) - if err != nil { - t.Fatalf("Error parsing note %v", err) - } - if err := s.DeleteOperation(pID, oID); err == nil { - t.Error("Deleting nonexistant operation got success, want error") - } - if err := s.CreateOperation(o); err != nil { - t.Fatalf("CreateOperation got %v want success", err) - } - - if err := s.DeleteOperation(pID, oID); err != nil { - t.Errorf("DeleteOperation got %v, want success ", err) - } -} - -func TestUpdateOperation(t *testing.T) { - s := NewMemStore() - oPID := "vulnerability-scanner-a" - o := testutil.Operation(oPID) - - pID, oID, err := name.ParseOperation(o.Name) - if err != nil { - t.Fatalf("Error parsing projectID and operationID %v", err) - } - if err := s.UpdateOperation(pID, oID, o); err == nil { - t.Fatal("UpdateOperation got success want error") - } - if err := s.CreateOperation(o); err != nil { - t.Fatalf("CreateOperation got %v want success", err) - } - if got, err := s.GetOperation(pID, oID); err != nil { - t.Fatalf("GetOperation got %v, want success", err) - } else if !reflect.DeepEqual(got, o) { - t.Errorf("GetOperation got %v, want %v", got, o) - } - - o2 := o - o2.Done = true - if err := s.UpdateOperation(pID, oID, o2); err != nil { - t.Fatalf("UpdateOperation got %v want success", err) - } - - if got, err := s.GetOperation(pID, oID); err != nil { - t.Fatalf("GetOperation got %v, want success", err) - } else if !reflect.DeepEqual(got, o2) { - t.Errorf("GetOperation got %v, want %v", got, o2) - } -} - -func TestListProjects(t *testing.T) { - s := NewMemStore() - wantProjectNames := []string{} - for i := 0; i < 20; i++ { - pID := fmt.Sprint("Project", i) - if err := s.CreateProject(pID); err != nil { - t.Fatalf("CreateProject got %v want success", err) - } - wantProjectNames = append(wantProjectNames, name.FormatProject(pID)) - } - filter := "filters_are_yet_to_be_implemented" - gotProjects := s.ListProjects(filter) - if len(gotProjects) != 20 { - t.Errorf("ListProjects got %v operations, want 20", len(gotProjects)) - } - gotProjectNames := make([]string, len(gotProjects)) - for i, project := range gotProjects { - gotProjectNames[i] = project.Name - } - // Sort to handle that wantProjectNames are not guaranteed to be listed in insertion order - sort.Strings(wantProjectNames) - sort.Strings(gotProjectNames) - if !reflect.DeepEqual(gotProjectNames, wantProjectNames) { - t.Errorf("ListProjects got %v want %v", gotProjectNames, wantProjectNames) - } -} - -func TestListOperations(t *testing.T) { - s := NewMemStore() - ops := []opspb.Operation{} - findProject := "findThese" - dontFind := "dontFind" - for i := 0; i < 20; i++ { - o := testutil.Operation("") - if i < 5 { - o.Name = name.FormatOperation(findProject, string(i)) - } else { - o.Name = name.FormatOperation(dontFind, string(i)) - } - if err := s.CreateOperation(o); err != nil { - t.Fatalf("CreateOperation got %v want success", err) - } - ops = append(ops, *o) - } - gotOs := s.ListOperations(findProject, "") - - if len(gotOs) != 5 { - t.Errorf("ListOperations got %v operations, want 5", len(gotOs)) - } - for _, o := range gotOs { - want := name.FormatProject(findProject) - if !strings.HasPrefix(o.Name, want) { - t.Errorf("ListOperations got %v want prefix %v", o.Name, want) - } - } -} - -func TestListNotes(t *testing.T) { - s := NewMemStore() - ns := []*pb.Note{} - findProject := "findThese" - dontFind := "dontFind" - for i := 0; i < 20; i++ { - n := testutil.Note("") - if i < 5 { - n.Name = name.FormatNote(findProject, string(i)) - } else { - n.Name = name.FormatNote(dontFind, string(i)) - } - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - ns = append(ns, n) - } - gotNs := s.ListNotes(findProject, "") - if len(gotNs) != 5 { - t.Errorf("ListNotes got %v operations, want 5", len(gotNs)) - } - for _, n := range gotNs { - want := name.FormatProject(findProject) - if !strings.HasPrefix(n.Name, want) { - t.Errorf("ListNotes got %v want %v", n.Name, want) - } - } -} - -func TestListOccurrences(t *testing.T) { - s := NewMemStore() - os := []*pb.Occurrence{} - findProject := "findThese" - dontFind := "dontFind" - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - for i := 0; i < 20; i++ { - oPID := "_" - o := testutil.Occurrence(oPID, n.Name) - if i < 5 { - o.Name = name.FormatOccurrence(findProject, string(i)) - } else { - o.Name = name.FormatOccurrence(dontFind, string(i)) - } - if err := s.CreateOccurrence(o); err != nil { - t.Fatalf("CreateOccurrence got %v want success", err) - } - os = append(os, o) - } - gotOs := s.ListOccurrences(findProject, "") - if len(gotOs) != 5 { - t.Errorf("ListOccurrences got %v Occurrences, want 5", len(gotOs)) - } - for _, o := range gotOs { - want := name.FormatProject(findProject) - if !strings.HasPrefix(o.Name, want) { - t.Errorf("ListOccurrences got %v want %v", o.Name, want) - } - } -} - -func TestListNoteOccurrences(t *testing.T) { - s := NewMemStore() - os := []*pb.Occurrence{} - findProject := "findThese" - dontFind := "dontFind" - nPID := "vulnerability-scanner-a" - n := testutil.Note(nPID) - if err := s.CreateNote(n); err != nil { - t.Fatalf("CreateNote got %v want success", err) - } - for i := 0; i < 20; i++ { - oPID := "_" - o := testutil.Occurrence(oPID, n.Name) - if i < 5 { - o.Name = name.FormatOccurrence(findProject, string(i)) - } else { - o.Name = name.FormatOccurrence(dontFind, string(i)) - } - if err := s.CreateOccurrence(o); err != nil { - t.Fatalf("CreateOccurrence got %v want success", err) - } - os = append(os, o) - } - pID, nID, err := name.ParseNote(n.Name) - if err != nil { - t.Fatalf("Error parsing note name %v", err) - } - gotOs, err := s.ListNoteOccurrences(pID, nID, "") - if err != nil { - t.Fatalf("ListNoteOccurrences got %v want success", err) - } - if len(gotOs) != 20 { - t.Errorf("ListNoteOccurrences got %v Occurrences, want 20", len(gotOs)) - } - for _, o := range gotOs { - if o.NoteName != n.Name { - t.Errorf("ListNoteOccurrences got %v want %v", o.Name, o.NoteName) - } +func TestMemStore(t *testing.T) { + createMemStore := func(t *testing.T) (server.Storager, func()) { + return NewMemStore(), func() {} } + doTestStorager(t, createMemStore) } diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlconfig.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlconfig.go new file mode 100644 index 00000000..1a5ebb14 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlconfig.go @@ -0,0 +1,36 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import "fmt" + +type PgSQLConfig struct { + Host string `yaml:"host"` + DbName string `yaml:"dbname"` + User string `yaml:"user"` + Password string `yaml:"password"` + // Valid sslmodes: disable, allow, prefer, require, verify-ca, verify-full. + // See https://www.postgresql.org/docs/current/static/libpq-connect.html for details + SSLMode string `yaml:"sslmode"` + PaginationKey string `yaml:"paginationkey"` +} + +func createSourceString(config *PgSQLConfig) string { + return fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s", config.User, config.Password, config.Host, config.SSLMode) +} + +func createSourceStringWithDbName(config *PgSQLConfig) string { + return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", config.User, config.Password, config.Host, config.DbName, config.SSLMode) +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore.go new file mode 100644 index 00000000..007050dc --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore.go @@ -0,0 +1,550 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "database/sql" + "fmt" + "log" + "strconv" + "time" + + "github.com/fernet/fernet-go" + "github.com/golang/protobuf/proto" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + "github.com/lib/pq" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type pgSQLStore struct { + *sql.DB + paginationKey string +} + +func NewPgSQLStore(config *PgSQLConfig) *pgSQLStore { + err := createDatabase(createSourceString(config), config.DbName) + if err != nil { + log.Fatal(err.Error()) + } + db, err := sql.Open("postgres", createSourceStringWithDbName(config)) + if err != nil { + log.Fatal(err.Error()) + } + if db.Ping() != nil { + log.Fatal("Database server is not alive") + } + _, err = db.Exec(createTables) + if err != nil { + db.Close() + log.Fatal(err.Error()) + } + pg := pgSQLStore{ + DB: db, + paginationKey: config.PaginationKey, + } + return &pg +} + +func createDatabase(source, dbName string) error { + db, err := sql.Open("postgres", source) + if err != nil { + return err + } + defer db.Close() + // Check if db exists + res, err := db.Exec( + fmt.Sprintf("SELECT * FROM pg_catalog.pg_database WHERE datname='%s'", dbName)) + if err != nil { + return err + } + rowCnt, err := res.RowsAffected() + if err != nil { + return err + } + // Create database if it doesn't exist + if rowCnt == 0 { + _, err = db.Exec(fmt.Sprintf("CREATE DATABASE %s", dbName)) + if err != nil { + return err + } + } + return nil +} + +// CreateProject adds the specified project to the store +func (pg *pgSQLStore) CreateProject(pID string) error { + _, err := pg.DB.Exec(insertProject, name.FormatProject(pID)) + if err, ok := err.(*pq.Error); ok { + // Check for unique_violation + if err.Code == "23505" { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Project with name %q already exists", pID)) + } else { + log.Println("Failed to insert Project in database", err) + return status.Error(codes.Internal, "Failed to insert Project in database") + } + } + return nil +} + +// DeleteProject deletes the project with the given pID from the store +func (pg *pgSQLStore) DeleteProject(pID string) error { + pName := name.FormatProject(pID) + result, err := pg.DB.Exec(deleteProject, pName) + if err != nil { + return status.Error(codes.Internal, "Failed to delete Project from database") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to delete Project from database") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Project with name %q does not Exist", pName)) + } + return nil +} + +// GetProject returns the project with the given pID from the store +func (pg *pgSQLStore) GetProject(pID string) (*pb.Project, error) { + pName := name.FormatProject(pID) + var exists bool + err := pg.DB.QueryRow(projectExists, pName).Scan(&exists) + if err != nil { + return nil, status.Error(codes.Internal, "Failed to query Project from database") + } + if !exists { + return nil, status.Error(codes.NotFound, fmt.Sprintf("Project with name %q does not Exist", pName)) + } + return &pb.Project{Name: pName}, nil +} + +// ListProjects returns up to pageSize number of projects beginning at pageToken (or from +// start if pageToken is the emtpy string). +func (pg *pgSQLStore) ListProjects(filter string, pageSize int, pageToken string) ([]*pb.Project, string, error) { + var rows *sql.Rows + id := decryptInt64(pageToken, pg.paginationKey, 0) + rows, err := pg.DB.Query(listProjects, pageSize, id) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to list Projects from database") + } + var projects []*pb.Project + var lastId int64 + for rows.Next() { + var name string + err := rows.Scan(&lastId, &name) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to scan Project row") + } + projects = append(projects, &pb.Project{Name: name}) + } + encryptedPage, err := encryptInt64(lastId, pg.paginationKey) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to paginate projects") + } + return projects, encryptedPage, nil +} + +// CreateOccurrence adds the specified occurrence +func (pg *pgSQLStore) CreateOccurrence(o *pb.Occurrence) error { + oPID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + log.Printf("Invalid occurrence name: %v", o.Name) + return status.Error(codes.InvalidArgument, "Invalid occurrence name") + } + nPID, nID, err := name.ParseNote(o.NoteName) + if err != nil { + log.Printf("Invalid note name: %v", o.NoteName) + return status.Error(codes.InvalidArgument, "Invalid note name") + } + _, err = pg.DB.Exec(insertOccurrence, oPID, oID, nPID, nID, proto.MarshalTextString(o)) + if err, ok := err.(*pq.Error); ok { + // Check for unique_violation + if err.Code == "23505" { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Occurrence with name %q already exists", o.Name)) + } else { + log.Println("Failed to insert Occurrence in database", err) + return status.Error(codes.Internal, "Failed to insert Occurrence in database") + } + } + return nil +} + +// DeleteOccurrence deletes the occurrence with the given pID and oID +func (pg *pgSQLStore) DeleteOccurrence(pID, oID string) error { + result, err := pg.DB.Exec(deleteOccurrence, pID, oID) + if err != nil { + return status.Error(codes.Internal, "Failed to delete Occurrence from database") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to delete Occurrence from database") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Occurrence with name %q/%q does not Exist", pID, oID)) + } + return nil +} + +// UpdateOccurrence updates the existing occurrence with the given projectID and occurrenceID +func (pg *pgSQLStore) UpdateOccurrence(pID, oID string, o *pb.Occurrence) error { + result, err := pg.DB.Exec(updateOccurrence, pID, oID, proto.MarshalTextString(o)) + if err != nil { + return status.Error(codes.Internal, "Failed to update Occurrence") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to update Occurrence") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Occurrence with name %q/%q does not Exist", pID, oID)) + } + return nil +} + +// GetOccurrence returns the occurrence with pID and oID +func (pg *pgSQLStore) GetOccurrence(pID, oID string) (*pb.Occurrence, error) { + var data string + err := pg.DB.QueryRow(searchOccurrence, pID, oID).Scan(&data) + switch { + case err == sql.ErrNoRows: + return nil, status.Error(codes.NotFound, fmt.Sprintf("Occurrence with name %q/%q does not Exist", pID, oID)) + case err != nil: + return nil, status.Error(codes.Internal, "Failed to query Occurrence from database") + } + var o pb.Occurrence + proto.UnmarshalText(data, &o) + if err != nil { + return nil, status.Error(codes.Internal, "Failed to unmarshal Occurrence from database") + } + return &o, nil +} + +// ListOccurrences returns up to pageSize number of occurrences for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (pg *pgSQLStore) ListOccurrences(pID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) { + var rows *sql.Rows + id := decryptInt64(pageToken, pg.paginationKey, 0) + rows, err := pg.DB.Query(listOccurrences, pID, pageSize, id) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to list Occurrences from database") + } + os := []*pb.Occurrence{} + var lastId int64 + for rows.Next() { + var data string + err := rows.Scan(&lastId, &data) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to scan Occurrences row") + } + var o pb.Occurrence + proto.UnmarshalText(data, &o) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to unmarshal Occurrence from database") + } + os = append(os, &o) + } + encryptedPage, err := encryptInt64(lastId, pg.paginationKey) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to paginate projects") + } + return os, encryptedPage, nil +} + +// CreateNote adds the specified note +func (pg *pgSQLStore) CreateNote(n *pb.Note) error { + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + log.Printf("Invalid note name: %v", n.Name) + return status.Error(codes.InvalidArgument, "Invalid note name") + } + _, err = pg.DB.Exec(insertNote, pID, nID, proto.MarshalTextString(n)) + if err, ok := err.(*pq.Error); ok { + // Check for unique_violation + if err.Code == "23505" { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Note with name %q already exists", n.Name)) + } else { + log.Println("Failed to insert Note in database", err) + return status.Error(codes.Internal, "Failed to insert Note in database") + } + } + return nil +} + +// DeleteNote deletes the note with the given pID and nID +func (pg *pgSQLStore) DeleteNote(pID, nID string) error { + result, err := pg.DB.Exec(deleteNote, pID, nID) + if err != nil { + return status.Error(codes.Internal, "Failed to delete Note from database") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to delete Note from database") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Note with name %q/%q does not Exist", pID, nID)) + } + return nil +} + +// UpdateNote updates the existing note with the given pID and nID +func (pg *pgSQLStore) UpdateNote(pID, nID string, n *pb.Note) error { + result, err := pg.DB.Exec(updateNote, pID, nID, proto.MarshalTextString(n)) + if err != nil { + return status.Error(codes.Internal, "Failed to update Note") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to update Note") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Note with name %q/%q does not Exist", pID, nID)) + } + return nil +} + +// GetNote returns the note with project (pID) and note ID (nID) +func (pg *pgSQLStore) GetNote(pID, nID string) (*pb.Note, error) { + var data string + err := pg.DB.QueryRow(searchNote, pID, nID).Scan(&data) + switch { + case err == sql.ErrNoRows: + return nil, status.Error(codes.NotFound, fmt.Sprintf("Note with name %q/%q does not Exist", pID, nID)) + case err != nil: + return nil, status.Error(codes.Internal, "Failed to query Note from database") + } + var note pb.Note + proto.UnmarshalText(data, ¬e) + if err != nil { + return nil, status.Error(codes.Internal, "Failed to unmarshal Note from database") + } + return ¬e, nil +} + +// GetNoteByOccurrence returns the note attached to occurrence with pID and oID +func (pg *pgSQLStore) GetNoteByOccurrence(pID, oID string) (*pb.Note, error) { + o, err := pg.GetOccurrence(pID, oID) + if err != nil { + return nil, err + } + nPID, nID, err := name.ParseNote(o.NoteName) + if err != nil { + log.Printf("Error parsing name: %v", o.NoteName) + return nil, status.Error(codes.InvalidArgument, "Invalid Note name") + } + n, err := pg.GetNote(nPID, nID) + if err != nil { + return nil, err + } + return n, nil +} + +// ListNotes returns up to pageSize number of notes for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (pg *pgSQLStore) ListNotes(pID, filters string, pageSize int, pageToken string) ([]*pb.Note, string, error) { + var rows *sql.Rows + id := decryptInt64(pageToken, pg.paginationKey, 0) + rows, err := pg.DB.Query(listNotes, pID, pageSize, id) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to list Notes from database") + } + ns := []*pb.Note{} + var lastId int64 + for rows.Next() { + var data string + err := rows.Scan(&lastId, &data) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to scan Notes row") + } + var n pb.Note + proto.UnmarshalText(data, &n) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to unmarshal Note from database") + } + ns = append(ns, &n) + } + encryptedPage, err := encryptInt64(lastId, pg.paginationKey) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to paginate projects") + } + return ns, encryptedPage, nil +} + +// ListNoteOccurrences returns up to pageSize number of occcurrences on the particular note (nID) +// for this project (pID) projects beginning at pageToken (or from start if pageToken is the emtpy string). +func (pg *pgSQLStore) ListNoteOccurrences(pID, nID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) { + // Verify that note exists + if _, err := pg.GetNote(pID, nID); err != nil { + return nil, "", err + } + var rows *sql.Rows + id := decryptInt64(pageToken, pg.paginationKey, 0) + rows, err := pg.DB.Query(listNoteOccurrences, pID, nID, pageSize, id) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to list Occurrences from database") + } + os := []*pb.Occurrence{} + var lastId int64 + for rows.Next() { + var data string + err := rows.Scan(&lastId, &data) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to scan Occurrences row") + } + var o pb.Occurrence + proto.UnmarshalText(data, &o) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to unmarshal Occurrence from database") + } + os = append(os, &o) + } + encryptedPage, err := encryptInt64(lastId, pg.paginationKey) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to paginate projects") + } + return os, encryptedPage, nil +} + +// GetOperation returns the operation with pID and oID +func (pg *pgSQLStore) GetOperation(pID, opID string) (*opspb.Operation, error) { + var data string + err := pg.DB.QueryRow(searchOperation, pID, opID).Scan(&data) + switch { + case err == sql.ErrNoRows: + return nil, status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q/%q does not Exist", pID, opID)) + case err != nil: + return nil, status.Error(codes.Internal, "Failed to query Operation from database") + } + var op opspb.Operation + proto.UnmarshalText(data, &op) + if err != nil { + return nil, status.Error(codes.Internal, "Failed to unmarshal Operation from database") + } + return &op, nil +} + +// CreateOperation adds the specified operation +func (pg *pgSQLStore) CreateOperation(o *opspb.Operation) error { + pID, opID, err := name.ParseOperation(o.Name) + if err != nil { + log.Printf("Invalid operation name: %v", o.Name) + return status.Error(codes.InvalidArgument, "Invalid operation name") + } + _, err = pg.DB.Exec(insertOperation, pID, opID, proto.MarshalTextString(o)) + if err, ok := err.(*pq.Error); ok { + // Check for unique_violation + if err.Code == "23505" { + return status.Error(codes.AlreadyExists, fmt.Sprintf("Operation with name %q/%q already exists", pID, opID)) + } else { + log.Println("Failed to insert Operation in database", err) + return status.Error(codes.Internal, "Failed to insert Operation in database") + } + } + return nil +} + +// DeleteOperation deletes the operation with the given pID and oID +func (pg *pgSQLStore) DeleteOperation(pID, opID string) error { + result, err := pg.DB.Exec(deleteOperation, pID, opID) + if err != nil { + return status.Error(codes.Internal, "Failed to delete Operation from database") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to delete Operation from database") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q/%q does not Exist", pID, opID)) + } + return nil +} + +// UpdateOperation updates the existing operation with the given pID and nID +func (pg *pgSQLStore) UpdateOperation(pID, opID string, op *opspb.Operation) error { + result, err := pg.DB.Exec(updateOperation, pID, opID, proto.MarshalTextString(op)) + if err != nil { + return status.Error(codes.Internal, "Failed to update Operation") + } + count, err := result.RowsAffected() + if err != nil { + return status.Error(codes.Internal, "Failed to update Operation") + } + if count == 0 { + return status.Error(codes.NotFound, fmt.Sprintf("Operation with name %q/%q does not Exist", pID, opID)) + } + return nil +} + +// ListOperations returns up to pageSize number of operations for this project (pID) beginning +// at pageToken (or from start if pageToken is the emtpy string). +func (pg *pgSQLStore) ListOperations(pID, filters string, pageSize int, pageToken string) ([]*opspb.Operation, string, error) { + var rows *sql.Rows + id := decryptInt64(pageToken, pg.paginationKey, 0) + rows, err := pg.DB.Query(listOperations, pID, pageSize, id) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to list Operations from database") + } + ops := []*opspb.Operation{} + var lastId int64 + for rows.Next() { + var data string + err := rows.Scan(&lastId, &data) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to scan Operations row") + } + var op opspb.Operation + proto.UnmarshalText(data, &op) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to unmarshal Operation from database") + } + ops = append(ops, &op) + } + encryptedPage, err := encryptInt64(lastId, pg.paginationKey) + if err != nil { + return nil, "", status.Error(codes.Internal, "Failed to paginate projects") + } + return ops, encryptedPage, nil +} + +// Encrypt int64 using provided key +func encryptInt64(v int64, key string) (string, error) { + k, err := fernet.DecodeKey(key) + if err != nil { + return "", err + } + bytes, err := fernet.EncryptAndSign([]byte(strconv.FormatInt(v, 10)), k) + if err != nil { + return "", err + } + return string(bytes), nil +} + +// Decrypts encrypted int64 using provided key. Returns defaultValue if decryption fails. +func decryptInt64(encrypted string, key string, defaultValue int64) int64 { + k, err := fernet.DecodeKey(key) + if err != nil { + return defaultValue + } + bytes := fernet.VerifyAndDecrypt([]byte(encrypted), time.Hour, []*fernet.Key{k}) + if bytes == nil { + return defaultValue + } + decryptedValue, err := strconv.ParseInt(string(bytes), 10, 64) + if err != nil { + return defaultValue + } + return decryptedValue +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore_test.go new file mode 100644 index 00000000..62a7b893 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/pgsqlstore_test.go @@ -0,0 +1,61 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "database/sql" + "testing" + + server "github.com/grafeas/grafeas/server-go" +) + +func dropDatabase(t *testing.T, config *PgSQLConfig) { + t.Helper() + // Open database + source := createSourceString(config) + db, err := sql.Open("postgres", source) + if err != nil { + t.Fatalf("Failed to open database: %v", err) + } + // Kill opened connection + if _, err := db.Exec(` + SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = $1`, config.DbName); err != nil { + t.Fatalf("Failed to drop database: %v", err) + } + // Drop database + if _, err := db.Exec("DROP DATABASE " + config.DbName); err != nil { + t.Fatalf("Failed to drop database: %v", err) + } +} + +func TestPgSQLStore(t *testing.T) { + createPgSQLStore := func(t *testing.T) (server.Storager, func()) { + t.Helper() + config := &PgSQLConfig{ + Host: "127.0.0.1:5432", + DbName: "test_db", + User: "postgres", + Password: "password", + SSLMode: "disable", + PaginationKey: "XxoPtCUzrUv4JV5dS+yQ+MdW7yLEJnRMwigVY/bpgtQ=", + } + pg := NewPgSQLStore(config) + return pg, func() { dropDatabase(t, config); pg.Close() } + } + + doTestStorager(t, createPgSQLStore) +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/queries.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/queries.go new file mode 100644 index 00000000..de7e6336 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/queries.go @@ -0,0 +1,75 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +const ( + createTables = ` + CREATE TABLE IF NOT EXISTS projects ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL UNIQUE + ); + CREATE TABLE IF NOT EXISTS notes ( + id SERIAL PRIMARY KEY, + project_name TEXT NOT NULL, + note_name TEXT NOT NULL, + data TEXT, + UNIQUE (project_name, note_name) + ); + CREATE TABLE IF NOT EXISTS occurrences ( + id SERIAL PRIMARY KEY, + project_name TEXT NOT NULL, + occurrence_name TEXT NOT NULL, + data TEXT, + note_id int REFERENCES notes NOT NULL, + UNIQUE (project_name, occurrence_name) + ); + CREATE TABLE IF NOT EXISTS operations ( + id SERIAL PRIMARY KEY, + project_name TEXT NOT NULL, + operation_name TEXT NOT NULL, + data TEXT, + UNIQUE (project_name, operation_name) + );` + + insertProject = `INSERT INTO projects(name) VALUES ($1)` + projectExists = `SELECT EXISTS (SELECT 1 FROM projects WHERE name = $1)` + deleteProject = `DELETE FROM projects WHERE name = $1` + listProjects = `SELECT id, name FROM projects WHERE id > $2 LIMIT $1` + + insertOccurrence = `INSERT INTO occurrences(project_name, occurrence_name, note_id, data) + VALUES ($1, $2, (SELECT id FROM notes WHERE project_name = $3 AND note_name = $4), $5)` + searchOccurrence = `SELECT data FROM occurrences WHERE project_name = $1 AND occurrence_name = $2` + updateOccurrence = `UPDATE occurrences SET data = $3 WHERE project_name = $1 AND occurrence_name = $2` + deleteOccurrence = `DELETE FROM occurrences WHERE project_name = $1 AND occurrence_name = $2` + listOccurrences = `SELECT id, data FROM occurrences WHERE project_name = $1 AND id > $3 LIMIT $2` + + insertNote = `INSERT INTO notes(project_name, note_name, data) VALUES ($1, $2, $3)` + searchNote = `SELECT data FROM notes WHERE project_name = $1 AND note_name = $2` + updateNote = `UPDATE notes SET data = $3 WHERE project_name = $1 AND note_name = $2` + deleteNote = `DELETE FROM notes WHERE project_name = $1 AND note_name = $2` + listNotes = `SELECT id, data FROM notes WHERE project_name = $1 AND id > $3 LIMIT $2` + listNoteOccurrences = `SELECT o.id, o.data FROM occurrences as o, notes as n + WHERE n.id = o.note_id + AND n.project_name = $1 + AND n.note_name = $2 + AND o.id > $4 + LIMIT $3` + + insertOperation = `INSERT INTO operations(project_name, operation_name, data) VALUES ($1, $2, $3)` + searchOperation = `SELECT data FROM operations WHERE project_name = $1 AND operation_name = $2` + deleteOperation = `DELETE FROM operations WHERE project_name = $1 AND operation_name = $2` + updateOperation = `UPDATE operations SET data = $3 WHERE project_name = $1 AND operation_name = $2` + listOperations = `SELECT id, data FROM operations WHERE project_name = $1 AND id > $3 LIMIT $2` +) diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/storager_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/storager_test.go new file mode 100644 index 00000000..18310609 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/storage/storager_test.go @@ -0,0 +1,862 @@ +// Copyright 2017 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + "reflect" + "sort" + "strconv" + "strings" + "testing" + + "github.com/grafeas/grafeas/samples/server/go-server/api/server/name" + "github.com/grafeas/grafeas/samples/server/go-server/api/server/testing" + server "github.com/grafeas/grafeas/server-go" + pb "github.com/grafeas/grafeas/v1alpha1/proto" + opspb "google.golang.org/genproto/googleapis/longrunning" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// Tests implementations of server.Storager +// createStore is a function that creates new server.Storage instances and +// a corresponding cleanUp function that will be run at the end of each +// test case. +func doTestStorager(t *testing.T, createStore func(t *testing.T) (server.Storager, func())) { + t.Run("CreateProject", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + p := "myproject" + if err := s.CreateProject(p); err != nil { + t.Errorf("CreateProject got %v want success", err) + } + // Try to insert the same project twice, expect failure. + if err := s.CreateProject(p); err == nil { + t.Errorf("CreateProject got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateProject got code %v want %v", s.Code(), codes.AlreadyExists) + } + }) + + t.Run("CreateNote", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Errorf("CreateNote got %v want success", err) + } + // Try to insert the same note twice, expect failure. + if err := s.CreateNote(n); err == nil { + t.Errorf("CreateNote got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateNote got code %v want %v", s.Code(), codes.AlreadyExists) + } + }) + + t.Run("CreateOccurrence", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + // Try to insert the same occurrence twice, expect failure. + if err := s.CreateOccurrence(o); err == nil { + t.Errorf("CreateOccurrence got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateOccurrence got code %v want %v", s.Code(), codes.AlreadyExists) + } + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and occurrenceID %v", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } + }) + + t.Run("CreateOperation", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + opPID := "vulnerability-scanner-a" + op := testutil.Operation(opPID) + if err := s.CreateOperation(op); err != nil { + t.Errorf("CreateOperation got %v want success", err) + } + // Try to insert the same note twice, expect failure. + if err := s.CreateOperation(op); err == nil { + t.Errorf("CreateOperation got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.AlreadyExists { + t.Errorf("CreateOperation got code %v want %v", s.Code(), codes.AlreadyExists) + } + }) + + t.Run("DeleteProject", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "myproject" + // Delete before the note exists + if err := s.DeleteProject(pID); err == nil { + t.Error("Deleting nonexistant note got success, want error") + } + if err := s.CreateProject(pID); err != nil { + t.Fatalf("CreateProject got %v want success", err) + } + + if err := s.DeleteProject(pID); err != nil { + t.Errorf("DeleteProject got %v, want success ", err) + } + }) + + t.Run("DeleteOccurrence", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + // Delete before the occurrence exists + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if err := s.DeleteOccurrence(pID, oID); err == nil { + t.Error("Deleting nonexistant occurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + if err := s.DeleteOccurrence(pID, oID); err != nil { + t.Errorf("DeleteOccurrence got %v, want success ", err) + } + }) + + t.Run("UpdateOccurrence", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and occurrenceID %v", err) + } + if err := s.UpdateOccurrence(pID, oID, o); err == nil { + t.Fatal("UpdateOccurrence got success want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } + + o2 := o + o2.GetVulnerabilityDetails().CvssScore = 1.0 + if err := s.UpdateOccurrence(pID, oID, o2); err != nil { + t.Fatalf("UpdateOccurrence got %v want success", err) + } + + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o2) { + t.Errorf("GetOccurrence got %v, want %v", got, o2) + } + }) + + t.Run("DeleteNote", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + // Delete before the note exists + pID, oID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if err := s.DeleteNote(pID, oID); err == nil { + t.Error("Deleting nonexistant note got success, want error") + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + + if err := s.DeleteNote(pID, oID); err != nil { + t.Errorf("DeleteNote got %v, want success ", err) + } + }) + + t.Run("UpdateNote", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing projectID and noteID %v", err) + } + if err := s.UpdateNote(pID, nID, n); err == nil { + t.Fatal("UpdateNote got success want error") + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNote got %v, want %v", got, n) + } + + n2 := n + n2.GetVulnerabilityType().CvssScore = 1.0 + if err := s.UpdateNote(pID, nID, n2); err != nil { + t.Fatalf("UpdateNote got %v want success", err) + } + + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n2) { + t.Errorf("GetNote got %v, want %v", got, n2) + } + }) + + t.Run("GetProject", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "myproject" + // Try to get project before it has been created, expect failure. + if _, err := s.GetProject(pID); err == nil { + t.Errorf("GetProject got success, want Error") + } else if s, _ := status.FromError(err); s.Code() != codes.NotFound { + t.Errorf("GetProject got code %v want %v", s.Code(), codes.NotFound) + } + s.CreateProject(pID) + if p, err := s.GetProject(pID); err != nil { + t.Fatalf("GetProject got %v want success", err) + } else if p.Name != name.FormatProject(pID) { + t.Fatalf("Got %s want %s", p.Name, pID) + } + }) + + t.Run("GetOccurrence", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if _, err := s.GetOccurrence(pID, oID); err == nil { + t.Fatal("GetOccurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v, want Success", err) + } + if got, err := s.GetOccurrence(pID, oID); err != nil { + t.Fatalf("GetOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOccurrence got %v, want %v", got, o) + } + }) + + t.Run("GetNote", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if _, err := s.GetNote(pID, nID); err == nil { + t.Fatal("GetNote got success, want error") + } + if err := s.CreateNote(n); err != nil { + t.Errorf("CreateNote got %v, want Success", err) + } + if got, err := s.GetNote(pID, nID); err != nil { + t.Fatalf("GetNote got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNote got %v, want %v", got, n) + } + }) + + t.Run("GetNoteByOccurrence", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + oPID := "occurrence-project" + o := testutil.Occurrence(oPID, n.Name) + pID, oID, err := name.ParseOccurrence(o.Name) + if err != nil { + t.Fatalf("Error parsing occurrence %v", err) + } + if _, err := s.GetNoteByOccurrence(pID, oID); err == nil { + t.Fatal("GetNoteByOccurrence got success, want error") + } + if err := s.CreateOccurrence(o); err != nil { + t.Errorf("CreateOccurrence got %v, want Success", err) + } + if got, err := s.GetNoteByOccurrence(pID, oID); err != nil { + t.Fatalf("GetNoteByOccurrence got %v, want success", err) + } else if !reflect.DeepEqual(got, n) { + t.Errorf("GetNoteByOccurrence got %v, want %v", got, n) + } + }) + + t.Run("GetOperation", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing operation %v", err) + } + if _, err := s.GetOperation(pID, oID); err == nil { + t.Fatal("GetOperation got success, want error") + } + if err := s.CreateOperation(o); err != nil { + t.Errorf("CreateOperation got %v, want Success", err) + } + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOperation got %v, want %v", got, o) + } + }) + + t.Run("DeleteOperation", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + // Delete before the operation exists + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing note %v", err) + } + if err := s.DeleteOperation(pID, oID); err == nil { + t.Error("Deleting nonexistant operation got success, want error") + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + + if err := s.DeleteOperation(pID, oID); err != nil { + t.Errorf("DeleteOperation got %v, want success ", err) + } + }) + + t.Run("UpdateOperation", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + oPID := "vulnerability-scanner-a" + o := testutil.Operation(oPID) + + pID, oID, err := name.ParseOperation(o.Name) + if err != nil { + t.Fatalf("Error parsing projectID and operationID %v", err) + } + if err := s.UpdateOperation(pID, oID, o); err == nil { + t.Fatal("UpdateOperation got success want error") + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o) { + t.Errorf("GetOperation got %v, want %v", got, o) + } + + o2 := o + o2.Done = true + if err := s.UpdateOperation(pID, oID, o2); err != nil { + t.Fatalf("UpdateOperation got %v want success", err) + } + + if got, err := s.GetOperation(pID, oID); err != nil { + t.Fatalf("GetOperation got %v, want success", err) + } else if !reflect.DeepEqual(got, o2) { + t.Errorf("GetOperation got %v, want %v", got, o2) + } + }) + + t.Run("ListProjects", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + wantProjectNames := []string{} + for i := 0; i < 20; i++ { + pID := fmt.Sprint("Project", i) + if err := s.CreateProject(pID); err != nil { + t.Fatalf("CreateProject got %v want success", err) + } + wantProjectNames = append(wantProjectNames, name.FormatProject(pID)) + } + filter := "filters_are_yet_to_be_implemented" + gotProjects, _, err := s.ListProjects(filter, 100, "") + if err != nil { + t.Fatalf("ListProjects got %v want success", err) + } + if len(gotProjects) != 20 { + t.Errorf("ListProjects got %v projects, want 20", len(gotProjects)) + } + gotProjectNames := make([]string, len(gotProjects)) + for i, project := range gotProjects { + gotProjectNames[i] = project.Name + } + // Sort to handle that wantProjectNames are not guaranteed to be listed in insertion order + sort.Strings(wantProjectNames) + sort.Strings(gotProjectNames) + if !reflect.DeepEqual(gotProjectNames, wantProjectNames) { + t.Errorf("ListProjects got %v want %v", gotProjectNames, wantProjectNames) + } + }) + + t.Run("ListOperations", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + ops := []opspb.Operation{} + findProject := "findThese" + dontFind := "dontFind" + for i := 0; i < 20; i++ { + o := testutil.Operation("") + if i < 5 { + o.Name = name.FormatOperation(findProject, strconv.Itoa(i)) + } else { + o.Name = name.FormatOperation(dontFind, strconv.Itoa(i)) + } + if err := s.CreateOperation(o); err != nil { + t.Fatalf("CreateOperation got %v want success", err) + } + ops = append(ops, *o) + } + gotOs, _, err := s.ListOperations(findProject, "", 100, "") + if err != nil { + t.Fatalf("ListOperations got %v want success", err) + } + + if len(gotOs) != 5 { + t.Errorf("ListOperations got %v operations, want 5", len(gotOs)) + } + for _, o := range gotOs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(o.Name, want) { + t.Errorf("ListOperations got %v want prefix %v", o.Name, want) + } + } + }) + + t.Run("ListNotes", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + ns := []*pb.Note{} + findProject := "findThese" + dontFind := "dontFind" + for i := 0; i < 20; i++ { + n := testutil.Note("") + if i < 5 { + n.Name = name.FormatNote(findProject, strconv.Itoa(i)) + } else { + n.Name = name.FormatNote(dontFind, strconv.Itoa(i)) + } + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + ns = append(ns, n) + } + gotNs, _, err := s.ListNotes(findProject, "", 100, "") + if err != nil { + t.Fatalf("ListNotes got %v want success", err) + } + if len(gotNs) != 5 { + t.Errorf("ListNotes got %v notes, want 5", len(gotNs)) + } + for _, n := range gotNs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(n.Name, want) { + t.Errorf("ListNotes got %v want %v", n.Name, want) + } + } + }) + + t.Run("ListOccurrences", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + os := []*pb.Occurrence{} + findProject := "findThese" + dontFind := "dontFind" + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + for i := 0; i < 20; i++ { + oPID := "_" + o := testutil.Occurrence(oPID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, strconv.Itoa(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, strconv.Itoa(i)) + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + os = append(os, o) + } + gotOs, _, err := s.ListOccurrences(findProject, "", 100, "") + if err != nil { + t.Fatalf("ListOccurrences got %v want success", err) + } + if len(gotOs) != 5 { + t.Errorf("ListOccurrences got %v Occurrences, want 5", len(gotOs)) + } + for _, o := range gotOs { + want := name.FormatProject(findProject) + if !strings.HasPrefix(o.Name, want) { + t.Errorf("ListOccurrences got %v want %v", o.Name, want) + } + } + }) + + t.Run("ListNoteOccurrences", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + os := []*pb.Occurrence{} + findProject := "findThese" + dontFind := "dontFind" + nPID := "vulnerability-scanner-a" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + for i := 0; i < 20; i++ { + oPID := "_" + o := testutil.Occurrence(oPID, n.Name) + if i < 5 { + o.Name = name.FormatOccurrence(findProject, strconv.Itoa(i)) + } else { + o.Name = name.FormatOccurrence(dontFind, strconv.Itoa(i)) + } + if err := s.CreateOccurrence(o); err != nil { + t.Fatalf("CreateOccurrence got %v want success", err) + } + os = append(os, o) + } + pID, nID, err := name.ParseNote(n.Name) + if err != nil { + t.Fatalf("Error parsing note name %v", err) + } + gotOs, _, err := s.ListNoteOccurrences(pID, nID, "", 100, "") + if err != nil { + t.Fatalf("ListNoteOccurrences got %v want success", err) + } + if len(gotOs) != 20 { + t.Errorf("ListNoteOccurrences got %v Occurrences, want 20", len(gotOs)) + } + for _, o := range gotOs { + if o.NoteName != n.Name { + t.Errorf("ListNoteOccurrences got %v want %v", o.Name, o.NoteName) + } + } + }) + + t.Run("ProjectPagination", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID1 := "project1" + if err := s.CreateProject(pID1); err != nil { + t.Errorf("CreateProject got %v want success", err) + } + pID2 := "project2" + if err := s.CreateProject(pID2); err != nil { + t.Errorf("CreateProject got %v want success", err) + } + pID3 := "project3" + if err := s.CreateProject(pID3); err != nil { + t.Errorf("CreateProject got %v want success", err) + } + filter := "filters_are_yet_to_be_implemented" + // Get projects + gotProjects, lastPage, err := s.ListProjects(filter, 2, "") + if err != nil { + t.Fatalf("ListProjects got %v want success", err) + } + if len(gotProjects) != 2 { + t.Errorf("ListProjects got %v projects, want 2", len(gotProjects)) + } + if p := gotProjects[0]; p.Name != name.FormatProject(pID1) { + t.Fatalf("Got %s want %s", p.Name, name.FormatProject(pID1)) + } + if p := gotProjects[1]; p.Name != name.FormatProject(pID2) { + t.Fatalf("Got %s want %s", p.Name, name.FormatProject(pID2)) + } + // Get projects again + gotProjects, _, err = s.ListProjects(filter, 100, lastPage) + if err != nil { + t.Fatalf("ListProjects got %v want success", err) + } + if len(gotProjects) != 1 { + t.Errorf("ListProjects got %v projects, want 1", len(gotProjects)) + } + if p := gotProjects[0]; p.Name != name.FormatProject(pID3) { + t.Fatalf("Got %s want %s", p.Name, name.FormatProject(pID3)) + } + }) + + t.Run("NotesPagination", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "project" + nID1 := "note1" + op1 := testutil.Note(pID) + op1.Name = name.FormatNote(pID, nID1) + if err := s.CreateNote(op1); err != nil { + t.Errorf("CreateNote got %v want success", err) + } + nID2 := "note2" + op2 := testutil.Note(pID) + op2.Name = name.FormatNote(pID, nID2) + if err := s.CreateNote(op2); err != nil { + t.Errorf("CreateNote got %v want success", err) + } + nID3 := "note3" + op3 := testutil.Note(pID) + op3.Name = name.FormatNote(pID, nID3) + if err := s.CreateNote(op3); err != nil { + t.Errorf("CreateNote got %v want success", err) + } + filter := "filters_are_yet_to_be_implemented" + // Get occurrences + gotNotes, lastPage, err := s.ListNotes(pID, filter, 2, "") + if err != nil { + t.Fatalf("ListNotes got %v want success", err) + } + if len(gotNotes) != 2 { + t.Errorf("ListNotes got %v notes, want 2", len(gotNotes)) + } + if p := gotNotes[0]; p.Name != name.FormatNote(pID, nID1) { + t.Fatalf("Got %s want %s", p.Name, name.FormatNote(pID, nID1)) + } + if p := gotNotes[1]; p.Name != name.FormatNote(pID, nID2) { + t.Fatalf("Got %s want %s", p.Name, name.FormatNote(pID, nID2)) + } + // Get occurrences again + gotNotes, _, err = s.ListNotes(pID, filter, 100, lastPage) + if err != nil { + t.Fatalf("ListNotes got %v want success", err) + } + if len(gotNotes) != 1 { + t.Errorf("ListNotes got %v notes, want 1", len(gotNotes)) + } + if p := gotNotes[0]; p.Name != name.FormatNote(pID, nID3) { + t.Fatalf("Got %s want %s", p.Name, name.FormatNote(pID, nID3)) + } + }) + + t.Run("OccurrencePagination", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "project" + nPID := "noteproject" + oID1 := "occurrence1" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + op1 := testutil.Occurrence(pID, n.Name) + op1.Name = name.FormatOccurrence(pID, oID1) + if err := s.CreateOccurrence(op1); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + oID2 := "occurrence2" + op2 := testutil.Occurrence(pID, n.Name) + op2.Name = name.FormatOccurrence(pID, oID2) + if err := s.CreateOccurrence(op2); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + oID3 := "occurrence3" + op3 := testutil.Occurrence(pID, n.Name) + op3.Name = name.FormatOccurrence(pID, oID3) + if err := s.CreateOccurrence(op3); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + filter := "filters_are_yet_to_be_implemented" + // Get occurrences + gotOccurrences, lastPage, err := s.ListOccurrences(pID, filter, 2, "") + if err != nil { + t.Fatalf("ListOccurrences got %v want success", err) + } + if len(gotOccurrences) != 2 { + t.Errorf("ListOccurrences got %v occurrences, want 2", len(gotOccurrences)) + } + if p := gotOccurrences[0]; p.Name != name.FormatOccurrence(pID, oID1) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID1)) + } + if p := gotOccurrences[1]; p.Name != name.FormatOccurrence(pID, oID2) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID2)) + } + // Get occurrences again + gotOccurrences, _, err = s.ListOccurrences(pID, filter, 100, lastPage) + if err != nil { + t.Fatalf("ListOccurrences got %v want success", err) + } + if len(gotOccurrences) != 1 { + t.Errorf("ListOccurrences got %v operations, want 1", len(gotOccurrences)) + } + if p := gotOccurrences[0]; p.Name != name.FormatOccurrence(pID, oID3) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID3)) + } + }) + + t.Run("NoteOccurrencePagination", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "project" + nPID := "noteproject" + oID1 := "occurrence1" + n := testutil.Note(nPID) + if err := s.CreateNote(n); err != nil { + t.Fatalf("CreateNote got %v want success", err) + } + op1 := testutil.Occurrence(pID, n.Name) + op1.Name = name.FormatOccurrence(pID, oID1) + if err := s.CreateOccurrence(op1); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + oID2 := "occurrence2" + op2 := testutil.Occurrence(pID, n.Name) + op2.Name = name.FormatOccurrence(pID, oID2) + if err := s.CreateOccurrence(op2); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + oID3 := "occurrence3" + op3 := testutil.Occurrence(pID, n.Name) + op3.Name = name.FormatOccurrence(pID, oID3) + if err := s.CreateOccurrence(op3); err != nil { + t.Errorf("CreateOccurrence got %v want success", err) + } + filter := "filters_are_yet_to_be_implemented" + _, nID, err := name.ParseNote(n.Name) + // Get occurrences + gotOccurrences, lastPage, err := s.ListNoteOccurrences(nPID, nID, filter, 2, "") + if err != nil { + t.Fatalf("ListNoteOccurrences got %v want success", err) + } + if len(gotOccurrences) != 2 { + t.Errorf("ListNoteOccurrences got %v occurrences, want 2", len(gotOccurrences)) + } + if p := gotOccurrences[0]; p.Name != name.FormatOccurrence(pID, oID1) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID1)) + } + if p := gotOccurrences[1]; p.Name != name.FormatOccurrence(pID, oID2) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID2)) + } + // Get occurrences again + gotOccurrences, _, err = s.ListNoteOccurrences(nPID, nID, filter, 100, lastPage) + if err != nil { + t.Fatalf("ListNoteOccurrences got %v want success", err) + } + if len(gotOccurrences) != 1 { + t.Errorf("ListNoteOccurrences got %v operations, want 1", len(gotOccurrences)) + } + if p := gotOccurrences[0]; p.Name != name.FormatOccurrence(pID, oID3) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOccurrence(pID, oID3)) + } + }) + + t.Run("OperationPagination", func(t *testing.T) { + s, cleanUp := createStore(t) + defer cleanUp() + pID := "project1" + oID1 := "operation1" + op1 := testutil.Operation(pID) + op1.Name = name.FormatOperation(pID, oID1) + if err := s.CreateOperation(op1); err != nil { + t.Errorf("CreateOperation got %v want success", err) + } + oID2 := "operation2" + op2 := testutil.Operation(pID) + op2.Name = name.FormatOperation(pID, oID2) + if err := s.CreateOperation(op2); err != nil { + t.Errorf("CreateOperation got %v want success", err) + } + oID3 := "operation3" + op3 := testutil.Operation(pID) + op3.Name = name.FormatOperation(pID, oID3) + if err := s.CreateOperation(op3); err != nil { + t.Errorf("CreateOperation got %v want success", err) + } + filter := "filters_are_yet_to_be_implemented" + // Get operations + gotOperations, lastPage, err := s.ListOperations(pID, filter, 2, "") + if err != nil { + t.Fatalf("ListOperations got %v want success", err) + } + if len(gotOperations) != 2 { + t.Errorf("ListOperations got %v operations, want 2", len(gotOperations)) + } + if p := gotOperations[0]; p.Name != name.FormatOperation(pID, oID1) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOperation(pID, oID1)) + } + if p := gotOperations[1]; p.Name != name.FormatOperation(pID, oID2) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOperation(pID, oID2)) + } + // Get operations again + gotOperations, _, err = s.ListOperations(pID, filter, 100, lastPage) + if err != nil { + t.Fatalf("ListOperations got %v want success", err) + } + if len(gotOperations) != 1 { + t.Errorf("ListOperations got %v operations, want 1", len(gotOperations)) + } + if p := gotOperations[0]; p.Name != name.FormatOperation(pID, oID3) { + t.Fatalf("Got %s want %s", p.Name, name.FormatOperation(pID, oID3)) + } + }) +} diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go index 618b427d..994fc1d1 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl.go @@ -37,10 +37,19 @@ type Grafeas struct { // CreateProject validates that a project is valid and then creates a project in the backing datastore. func (g *Grafeas) CreateProject(ctx context.Context, req *pb.CreateProjectRequest) (*empty.Empty, error) { - pID, err := name.ParseProject(req.Name) + p := req.Project + if req == nil { + log.Print("Project must not be empty.") + return nil, status.Error(codes.InvalidArgument, "Project must not be empty") + } + if p.Name == "" { + log.Printf("Project name must not be empty: %v", p.Name) + return nil, status.Error(codes.InvalidArgument, "Project name must not be empty") + } + pID, err := name.ParseProject(p.Name) if err != nil { - log.Printf("Error parsing project name: %v", req.Name) - return nil, status.Error(codes.InvalidArgument, "Invalid Project name") + log.Printf("Invalid project name: %v", p.Name) + return nil, status.Error(codes.InvalidArgument, "Invalid project name") } return &empty.Empty{}, g.S.CreateProject(pID) } @@ -66,7 +75,19 @@ func (g *Grafeas) CreateNote(ctx context.Context, req *pb.CreateNoteRequest) (*p return nil, status.Error(codes.NotFound, fmt.Sprintf("Project %v not found", pID)) } - // TODO: Validate that operation exists if it is specified when get methods are implmented + // Validate that operation exists if it is specified when get methods are implmented + if n.OperationName != "" { + pID, oID, err := name.ParseOperation(n.OperationName) + if err != nil { + log.Printf("Error parsing name: %v", n.OperationName) + return nil, status.Error(codes.InvalidArgument, "Invalid Operation name") + + } + if _, err = g.S.GetOperation(pID, oID); err != nil { + log.Printf("Operation:%v for Note: %v not found in pID %v", oID, n.Name, pID) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Operation:%v for Note: %v not found", oID, n.Name)) + } + } return n, g.S.CreateNote(n) } @@ -98,7 +119,19 @@ func (g *Grafeas) CreateOccurrence(ctx context.Context, req *pb.CreateOccurrence log.Printf("Unable to getnote %v, err: %v", n, err) return nil, status.Error(codes.NotFound, fmt.Sprintf("Note %v not found", o.NoteName)) } - // TODO: Validate that operation exists if it is specified + // Validate that operation exists if it is specified + if o.OperationName != "" { + pID, oID, err := name.ParseOperation(o.OperationName) + if err != nil { + log.Printf("Error parsing name: %v", o.OperationName) + return nil, status.Error(codes.InvalidArgument, "Invalid Operation name") + + } + if _, err = g.S.GetOperation(pID, oID); err != nil { + log.Printf("Operation:%v for Occurrence: %v not found", oID, o.Name) + return nil, status.Error(codes.NotFound, fmt.Sprintf("Operation:%v for Occurrence: %v not found", oID, o.Name)) + } + } return o, g.S.CreateOccurrence(o) } @@ -314,8 +347,17 @@ func (g *Grafeas) UpdateOperation(ctx context.Context, req *pb.UpdateOperationRe // ListProjects returns the project id for all projects in the backing datastore. func (g *Grafeas) ListProjects(ctx context.Context, req *pb.ListProjectsRequest) (*pb.ListProjectsResponse, error) { // TODO: support filters - ns := g.S.ListProjects(req.Filter) - return &pb.ListProjectsResponse{Projects: ns}, nil + if req.PageSize == 0 { + req.PageSize = 100 + } + ps, nextToken, err := g.S.ListProjects(req.Filter, int(req.PageSize), req.PageToken) + if err != nil { + return nil, status.Error(codes.Unknown, "Failed to list projects") + } + return &pb.ListProjectsResponse{ + Projects: ps, + NextPageToken: nextToken, + }, nil } func (g *Grafeas) ListOperations(ctx context.Context, req *opspb.ListOperationsRequest) (*opspb.ListOperationsResponse, error) { @@ -325,8 +367,17 @@ func (g *Grafeas) ListOperations(ctx context.Context, req *opspb.ListOperationsR return nil, status.Error(codes.InvalidArgument, "Invalid Project name") } // TODO: support filters - ops := g.S.ListOperations(pID, req.Filter) - return &opspb.ListOperationsResponse{Operations: ops}, nil + if req.PageSize == 0 { + req.PageSize = 100 + } + ops, nextToken, err := g.S.ListOperations(pID, req.Filter, int(req.PageSize), req.PageToken) + if err != nil { + return nil, status.Error(codes.Unknown, "Failed to list operations") + } + return &opspb.ListOperationsResponse{ + Operations: ops, + NextPageToken: nextToken, + }, nil } func (g *Grafeas) ListNotes(ctx context.Context, req *pb.ListNotesRequest) (*pb.ListNotesResponse, error) { @@ -335,11 +386,18 @@ func (g *Grafeas) ListNotes(ctx context.Context, req *pb.ListNotesRequest) (*pb. log.Printf("Error parsing name: %v", req.Parent) return nil, status.Error(codes.InvalidArgument, "Invalid Project name") } - // TODO: support filters - ns := g.S.ListNotes(pID, req.Filter) - return &pb.ListNotesResponse{Notes: ns}, nil - + if req.PageSize == 0 { + req.PageSize = 100 + } + ns, nextToken, err := g.S.ListNotes(pID, req.Filter, int(req.PageSize), req.PageToken) + if err != nil { + return nil, status.Error(codes.Unknown, "Failed to list notes") + } + return &pb.ListNotesResponse{ + Notes: ns, + NextPageToken: nextToken, + }, nil } func (g *Grafeas) ListOccurrences(ctx context.Context, req *pb.ListOccurrencesRequest) (*pb.ListOccurrencesResponse, error) { @@ -348,10 +406,18 @@ func (g *Grafeas) ListOccurrences(ctx context.Context, req *pb.ListOccurrencesRe log.Printf("Error parsing name: %v", req.Parent) return nil, err } - // TODO: support filters - prioritizing resource url - os := g.S.ListOccurrences(pID, req.Filter) - return &pb.ListOccurrencesResponse{Occurrences: os}, nil + if req.PageSize == 0 { + req.PageSize = 100 + } + os, nextToken, err := g.S.ListOccurrences(pID, req.Filter, int(req.PageSize), req.PageToken) + if err != nil { + return nil, status.Error(codes.Unknown, "Failed to list occurrences") + } + return &pb.ListOccurrencesResponse{ + Occurrences: os, + NextPageToken: nextToken, + }, nil } func (g *Grafeas) ListNoteOccurrences(ctx context.Context, req *pb.ListNoteOccurrencesRequest) (*pb.ListNoteOccurrencesResponse, error) { @@ -361,11 +427,17 @@ func (g *Grafeas) ListNoteOccurrences(ctx context.Context, req *pb.ListNoteOccur return nil, status.Error(codes.InvalidArgument, "Invalid note name") } // TODO: support filters - prioritizing resource url - os, gErr := g.S.ListNoteOccurrences(pID, nID, req.Filter) + if req.PageSize == 0 { + req.PageSize = 100 + } + os, nextToken, gErr := g.S.ListNoteOccurrences(pID, nID, req.Filter, int(req.PageSize), req.PageToken) if gErr != nil { return nil, gErr } - return &pb.ListNoteOccurrencesResponse{Occurrences: os}, nil + return &pb.ListNoteOccurrencesResponse{ + Occurrences: os, + NextPageToken: nextToken, + }, nil } func (g *Grafeas) CancelOperation(context.Context, *opspb.CancelOperationRequest) (*empty.Empty, error) { diff --git a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go index bd6d858c..37842ae0 100644 --- a/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go +++ b/vendor/github.com/grafeas/grafeas/samples/server/go-server/api/server/v1alpha1/impl_test.go @@ -31,7 +31,7 @@ import ( ) func createProject(t *testing.T, pID string, ctx context.Context, g Grafeas) { - req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + req := pb.CreateProjectRequest{Project: &pb.Project{Name: name.FormatProject(pID)}} if _, err := g.CreateProject(ctx, &req); err != nil { t.Errorf("CreateProject(empty operation): got %v, want success", err) } @@ -41,7 +41,7 @@ func TestCreateProject(t *testing.T) { ctx := context.Background() pID := "myproject" g := Grafeas{storage.NewMemStore()} - req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + req := pb.CreateProjectRequest{Project: &pb.Project{Name: name.FormatProject(pID)}} _, err := g.CreateProject(ctx, &req) if err != nil { t.Errorf("CreateProject(empty operation): got %v, want success", err) @@ -146,6 +146,48 @@ func TestCreateNote(t *testing.T) { } } +func TestCreateNoteOccurrenceWithOperation(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "vulnerability-scanner-a" + o := testutil.Operation(pID) + createProject(t, pID, ctx, g) + parent := name.FormatProject(pID) + cReq := &pb.CreateOperationRequest{Parent: parent, Operation: o} + if _, err := g.CreateOperation(ctx, cReq); err != nil { + t.Fatalf("CreateOperation(%v) got %v, want success", o, err) + } + n := testutil.Note(pID) + n.OperationName = "projects/vulnerability-scanner-a/operation/junk" + nreq := &pb.CreateNoteRequest{Parent: parent, Note: n} + // Try to create a Note with operation that does not exist and expect failure + if _, err := g.CreateNote(ctx, nreq); err == nil { + t.Errorf("TestCreateNoteWithOperation: got %v, want %v", err, codes.NotFound) + } + n.OperationName = o.Name + nreq = &pb.CreateNoteRequest{Parent: parent, Note: n} + // Try to create a Note with operation that we just created and expect success + if _, err := g.CreateNote(ctx, nreq); err != nil { + t.Errorf("TestCreateNoteWithOperation(%v) got %v, want success", n.OperationName, err) + } + // Try to create occurrence with operation name + occ := testutil.Occurrence(pID, n.Name) + occ.OperationName = "projects/vulnerability-scanner-a/operation/junk" + parent = name.FormatProject(pID) + occReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: occ} + // Try to create an Occurrence with operation that does not exist and expect failure + if _, err := g.CreateOccurrence(ctx, occReq); err == nil { + t.Errorf("TestCreateNoteWithOperation: got %v, want %v", err, codes.NotFound) + } + occ.OperationName = o.Name + occReq = &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: occ} + // Try to create an Occurrence with operation that we just created and expect success + if _, err := g.CreateOccurrence(ctx, occReq); err != nil { + t.Errorf("TestCreateNoteWithOperation(%v) got %v, want success", occ.OperationName, err) + + } +} + func TestDeleteProject(t *testing.T) { ctx := context.Background() g := Grafeas{storage.NewMemStore()} @@ -498,7 +540,10 @@ func TestListOccurrences(t *testing.T) { os = append(os, o) } - lReq := &pb.ListOccurrencesRequest{Parent: name.FormatProject(findProject)} + lReq := &pb.ListOccurrencesRequest{ + Parent: name.FormatProject(findProject), + PageSize: 100, + } resp, lErr := g.ListOccurrences(ctx, lReq) if lErr != nil { t.Fatalf("ListOccurrences got %v want success", lErr) @@ -514,7 +559,7 @@ func TestListProjects(t *testing.T) { var projects []string for i := 0; i < 20; i++ { pID := fmt.Sprintf("proj%v", i) - req := pb.CreateProjectRequest{Name: name.FormatProject(pID)} + req := pb.CreateProjectRequest{Project: &pb.Project{Name: name.FormatProject(pID)}} if _, err := g.CreateProject(ctx, &req); err != nil { t.Errorf("CreateProject: got %v, want success", err) } @@ -657,3 +702,210 @@ func TestListNoteOccurrences(t *testing.T) { t.Errorf("resp.Occurrences got %d, want 20", len(resp.Occurrences)) } } + +func TestProjectsPagination(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + var projects []string + for i := 0; i < 20; i++ { + pID := fmt.Sprintf("proj%v", i) + req := pb.CreateProjectRequest{Project: &pb.Project{Name: name.FormatProject(pID)}} + if _, err := g.CreateProject(ctx, &req); err != nil { + t.Errorf("CreateProject: got %v, want success", err) + } + projects = append(projects, name.FormatProject(pID)) + } + req := pb.ListProjectsRequest{ + PageSize: 15, + } + resp, err := g.ListProjects(ctx, &req) + if err != nil { + t.Errorf("ListProjects: got %v, want success", err) + } + if 15 != len(resp.Projects) { + t.Errorf("ListProjects: expected 15 projects, got %d", len(resp.Projects)) + } + req = pb.ListProjectsRequest{ + PageSize: 15, + PageToken: resp.NextPageToken, + } + resp, err = g.ListProjects(ctx, &req) + if err != nil { + t.Errorf("ListProjects: got %v, want success", err) + } + if 5 != len(resp.Projects) { + t.Errorf("ListProjects: expected 5 projects, got %d", len(resp.Projects)) + } +} + +func TestNotePagination(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "myproject" + createProject(t, pID, ctx, g) + for i := 0; i < 20; i++ { + o := testutil.Note(pID) + o.Name = name.FormatNote(pID, string(i)) + parent := name.FormatProject(pID) + cReq := &pb.CreateNoteRequest{Parent: parent, Note: o} + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", o, err) + } + } + req := pb.ListNotesRequest{ + Parent: name.FormatProject(pID), + PageSize: 15, + } + resp, err := g.ListNotes(ctx, &req) + if err != nil { + t.Errorf("ListNotes: got %v, want success", err) + } + if 15 != len(resp.Notes) { + t.Errorf("ListNotes: expected 15 notes, got %d", len(resp.Notes)) + } + req = pb.ListNotesRequest{ + Parent: name.FormatProject(pID), + PageSize: 15, + PageToken: resp.NextPageToken, + } + resp, err = g.ListNotes(ctx, &req) + if err != nil { + t.Errorf("ListNotes: got %v, want success", err) + } + if 5 != len(resp.Notes) { + t.Errorf("ListNotes: expected 5 notes, got %d", len(resp.Notes)) + } +} + +func TestOccurrencePagination(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + createProject(t, npID, ctx, g) + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + pID := "myproject" + createProject(t, pID, ctx, g) + for i := 0; i < 20; i++ { + o := testutil.Occurrence(pID, n.Name) + o.Name = name.FormatOccurrence(pID, string(i)) + parent := name.FormatProject(pID) + cReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, cReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", o, err) + } + } + req := pb.ListOccurrencesRequest{ + Parent: name.FormatProject(pID), + PageSize: 15, + } + resp, err := g.ListOccurrences(ctx, &req) + if err != nil { + t.Errorf("ListOccurrences: got %v, want success", err) + } + if 15 != len(resp.Occurrences) { + t.Errorf("ListOccurrences: expected 15 occurrences, got %d", len(resp.Occurrences)) + } + req = pb.ListOccurrencesRequest{ + Parent: name.FormatProject(pID), + PageSize: 15, + PageToken: resp.NextPageToken, + } + resp, err = g.ListOccurrences(ctx, &req) + if err != nil { + t.Errorf("ListOccurrences: got %v, want success", err) + } + if 5 != len(resp.Occurrences) { + t.Errorf("ListOccurrences: expected 5 occurrences, got %d", len(resp.Occurrences)) + } +} + +func TestNoteOccurrencePagination(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + npID := "vulnerability-scanner-a" + n := testutil.Note(npID) + nParent := name.FormatProject(npID) + cReq := &pb.CreateNoteRequest{Parent: nParent, Note: n} + createProject(t, npID, ctx, g) + if _, err := g.CreateNote(ctx, cReq); err != nil { + t.Fatalf("CreateNote(%v) got %v, want success", n, err) + } + pID := "myproject" + createProject(t, pID, ctx, g) + for i := 0; i < 20; i++ { + o := testutil.Occurrence(pID, n.Name) + o.Name = name.FormatOccurrence(pID, string(i)) + parent := name.FormatProject(pID) + cReq := &pb.CreateOccurrenceRequest{Parent: parent, Occurrence: o} + if _, err := g.CreateOccurrence(ctx, cReq); err != nil { + t.Fatalf("CreateOccurrence(%v) got %v, want success", o, err) + } + } + req := pb.ListNoteOccurrencesRequest{ + Name: n.Name, + PageSize: 15, + } + resp, err := g.ListNoteOccurrences(ctx, &req) + if err != nil { + t.Errorf("ListNoteOccurrences: got %v, want success", err) + } + if 15 != len(resp.Occurrences) { + t.Errorf("ListNoteOccurrences: expected 15 occurrences, got %d", len(resp.Occurrences)) + } + req = pb.ListNoteOccurrencesRequest{ + Name: n.Name, + PageSize: 15, + PageToken: resp.NextPageToken, + } + resp, err = g.ListNoteOccurrences(ctx, &req) + if err != nil { + t.Errorf("ListNoteOccurrences: got %v, want success", err) + } + if 5 != len(resp.Occurrences) { + t.Errorf("ListNoteOccurrences: expected 5 occurrences, got %d", len(resp.Occurrences)) + } +} + +func TestOperationPagination(t *testing.T) { + ctx := context.Background() + g := Grafeas{storage.NewMemStore()} + pID := "myproject" + createProject(t, pID, ctx, g) + for i := 0; i < 20; i++ { + o := testutil.Operation(pID) + o.Name = name.FormatOperation(pID, string(i)) + parent := name.FormatProject(pID) + cReq := &pb.CreateOperationRequest{Parent: parent, Operation: o} + if _, err := g.CreateOperation(ctx, cReq); err != nil { + t.Fatalf("CreateOperation(%v) got %v, want success", o, err) + } + } + req := opspb.ListOperationsRequest{ + Name: name.FormatProject(pID), + PageSize: 15, + } + resp, err := g.ListOperations(ctx, &req) + if err != nil { + t.Errorf("ListOperations: got %v, want success", err) + } + if 15 != len(resp.Operations) { + t.Errorf("ListOperations: expected 15 operations, got %d", len(resp.Operations)) + } + req = opspb.ListOperationsRequest{ + Name: name.FormatProject(pID), + PageSize: 15, + PageToken: resp.NextPageToken, + } + resp, err = g.ListOperations(ctx, &req) + if err != nil { + t.Errorf("ListOperations: got %v, want success", err) + } + if 5 != len(resp.Operations) { + t.Errorf("ListOperations: expected 5 operations, got %d", len(resp.Operations)) + } +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/README.md b/vendor/github.com/grafeas/grafeas/server-go/filtering/README.md new file mode 100644 index 00000000..2d04d8fd --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/README.md @@ -0,0 +1,143 @@ +# Filtering + +Filters are used with HTTP LIST operations to filter the result set. They can be +thought of as a simplified query language which provides support for applying +boolean expressions across the attributes of the records in the result set. + +Filters can be extended with custom functions and can also express scoring +logic to help rank results which might be most relevant to the query. Filters +are not intended to be exact and may perform case-insensitive compares, +corrections of common misspellings, or fuzzy matching. + +For a pratical example of list filters in production, see Google Cloud Logging's +[Advanced Filters](https://cloud.google.com/logging/docs/view/advanced-filters). + +## Expressions + +The following expressions, listed in order of operator precedence, are supported +by the parser included in the repository. + +### Conjunction + +The logical ANDing of two filters such as `a` and `b` in the example below. + +``` +a:property AND b('args', 2) AND -c +``` + +The statement above parses as a series of function calls modelled as an AST +using the [common expression language](https://github.com/google/cel-spec): + +``` +and( + has(a, property), + and(b('args', 2), negation(c)) +) +``` + +The logical AND may be used for result scoring, or as a requirement that all +results from the operation must adhere to. For more information about how the +conjunction is interpreted see the API documentation. + +Note, the `AND` operator is case-sensitive. The following example illustrates +difference between `and` and `AND` + +``` +a AND b // parsed as and(a, b) +a and b // parsed as sequence(a, and, b) +``` + +### Sequence + +Sequences are space-delimited expressions which may either be treated as +equivalent to conjunctions representing strict requirements or as a result +scoring algorithm: + +``` +a.b = 'hello' a:world a.world != 'mars' +``` + +Parses to: + +``` +sequence( + equals(select(a, b), 'hello'), + has(a, world), + notEquals(select(a, world), 'mars') +) +``` + +Sequences have lower precedence than conjunctions and higher precedence than +disjunctions, so it is most natural to read sequences as being grouped between +`AND` operators. + +The following example should be read as `and(a, sequence(or(b, c), d))`: + +``` +a AND b OR c d +``` + +### Disjunction + +Disjunctions represent a logical ORing of restrictions. The example highlights +a series of restrictions ORed together. Note, that the timestamp string may +in some instance be treated as a timestamp by the API if `a` is a property of +timestamp type and the string on the right hand side is in a format compatible +with conversion from string to time such as +[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt). + +``` +a >= '2018-03-06T00:00:00Z` OR b < text OR c = 1 +``` + +### Negation + +The `NOT` and `-` operator may be treated synonymously and are used to +expression exclusion result meeting a certain condition in the expression or +for expressing the inversion of a logical expression. How these expressions are +applied depends somewhat on the resource. + +The following examples parse as `not(greater(a, b))`: + +``` +NOT (a > b) +NOT a > b +-a > b +``` + +To adjust the precedence to parse as `greater(not(a), b)`, use parentheses: + +``` +(-a) > b +``` + +### Restriction + +There are restrictions for the common boolean expressions related to equality +and ordering. + +* Equality `=` +* Inequality `!=` +* Greater than `>` +* Greater than or equal `>=` +* Less than `<` +* Less than or equal `<=` + +In addition to the typical restrictions, there is also the `HAS` operator +indicated by the `:` which is used to test whether a property exists on a value. +This is useful when filtering table driven results with nullable column values. + +## Gotchas + +Within the common expression langauge, all identifiers within an expression are +expected to be known at parse time. Within a list filter expression there are +scenarios where barewords (unquoted strings) in the expression may either be an +identifier or an unquoted string. How this ambiguity is resolved depends on the +filter interpreter implementation. + +## Usage + +The library expects the developer to provide a `Source` value, which could come +from a file, UI element, or URL query string to `parser.Parse()`. The output of +the `Parse` will be a `google.api.expr.v1.ParsedExpr` value or an error with a +formatted message indicating the location of the parse issue. \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/ast/types.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/ast/types.go new file mode 100644 index 00000000..f5ede8de --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/ast/types.go @@ -0,0 +1,97 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package ast provides methods for constructing CEL abstract syntax nodes. +package ast + +import ( + expr "github.com/google/cel-spec/proto/v1" +) + +// Create a constant from the primitive value with the given id. +func NewConst(id int64, value interface{}) *expr.Expr { + constant := expr.Constant{} + switch value.(type) { + case int64: + constant.ConstantKind = + &expr.Constant_Int64Value{Int64Value: value.(int64)} + case uint64: + constant.ConstantKind = + &expr.Constant_Uint64Value{Uint64Value: value.(uint64)} + case float64: + constant.ConstantKind = + &expr.Constant_DoubleValue{DoubleValue: value.(float64)} + // The advanced list filtering documentation indicates support + // for converting strings with specific formats into other constant + // type values. This is left as an exercise for the evaluator as + // the interpretation of the string type is often contextual. + case string: + constant.ConstantKind = + &expr.Constant_StringValue{StringValue: value.(string)} + } + return newExpr(id, &constant) +} + +// Create an identifier from the given name and id. +// +// Identifiers may either refer to a property that can be filtered within the +// API, or unquoted text. Interpretation of an identifier is highly contextual. +// +// Within CEL, type-checking asserts that all identifiers must be declared. To +// use type-checking with filters, a second processing step may be used to +// convert unknown identifiers to constant strings prior to the type-check. This +// may be desired as an algorithmic way for validating filters. +func NewIdent(id int64, name string) *expr.Expr { + return newExpr(id, &expr.Expr_Ident{Name: name}) +} + +// Create a select field expression from the operand, field name, and id. +// +// Selection occurs via the dot operator, but there is also an index expression +// which permits the selection of a field with non-identifier characters in it. +func NewSelect(id int64, operand *expr.Expr, field string) *expr.Expr { + return newExpr(id, &expr.Expr_Select{Operand: operand, Field: field}) +} + +// Create a function call expression from the function name, optional receiver, +// arguments, and id. +// +// All operators and user-defined functions are modelled as calls. For a list +// of built-in operators (restrictions), see the operators/operators.go file. +func NewCall(id int64, name string, target *expr.Expr, args []*expr.Expr) *expr.Expr { + return newExpr(id, &expr.Expr_Call{Function: name, Target: target, Args: args}) +} + +// Create a new expression from the given id and kind. +// Ast nodes must have a unique id which will be associated with source metadata. +func newExpr(id int64, kind interface{}) *expr.Expr { + value := expr.Expr{Id: id} + switch kind.(type) { + case *expr.Expr_Ident: + value.ExprKind = + &expr.Expr_IdentExpr{IdentExpr: kind.(*expr.Expr_Ident)} + case *expr.Expr_Select: + value.ExprKind = + &expr.Expr_SelectExpr{SelectExpr: kind.(*expr.Expr_Select)} + case *expr.Expr_Call: + value.ExprKind = + &expr.Expr_CallExpr{CallExpr: kind.(*expr.Expr_Call)} + case *expr.Constant: + value.ExprKind = + &expr.Expr_ConstExpr{ConstExpr: kind.(*expr.Constant)} + case nil: + // do nothing + } + return &value +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/error.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/error.go new file mode 100644 index 00000000..95642ba7 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/error.go @@ -0,0 +1,45 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines types common to parsing and other diagnostics. +package common + +import ( + "fmt" + "strings" +) + +// Error type which references a location within source and a message. +type Error struct { + Location Location + Source Source + Message string +} + +// Stringer implementation that places errors in context with the source. +func (e *Error) String() string { + var result = fmt.Sprintf("ERROR: %s:%d:%d: %s", + e.Source.Description(), + e.Location.GetLine(), + e.Location.GetColumn()+1, // add one to the 0-based column for display + e.Message) + if snippet, found := e.Source.Snippet(e.Location.GetLine()); found { + result += "\n | " + result += snippet + result += "\n | " + result += strings.Repeat(".", e.Location.GetColumn()) + result += "^" + } + return result +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors.go new file mode 100644 index 00000000..87d449c2 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors.go @@ -0,0 +1,59 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines elements common to parsing and other diagnostics. +package common + +import ( + "fmt" +) + +// Errors type which contains a list of errors observed during parsing. +type Errors struct { + errors []Error +} + +// Create a new instance of the Errors type. +func NewErrors() *Errors { + return &Errors{ + errors: []Error{}, + } +} + +// Report an error at a source location. +func (e *Errors) ReportError(s Source, l Location, format string, args ...interface{}) { + err := Error{ + Source: s, + Location: l, + Message: fmt.Sprintf(format, args...), + } + e.errors = append(e.errors, err) +} + +// Return this list of observed errors. +func (e *Errors) GetErrors() []Error { + return e.errors[:] +} + +// Convert the error set to a newline delimited string. +func (e *Errors) String() string { + var result = "" + for i, err := range e.errors { + if i >= 1 { + result += "\n" + } + result += err.String() + } + return result +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors_test.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors_test.go new file mode 100644 index 00000000..6b8c54e5 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/errors_test.go @@ -0,0 +1,45 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines types common to parsing and other diagnostics. +package common + +import ( + "testing" +) + +// Test the reporting and recording of errors. +func TestErrors(t *testing.T) { + source := NewStringSource("a.b\n&&arg(missing, paren", "errors-test") + errors := NewErrors() + errors.ReportError(source, NewLocation(1, 1), "No such field") + if len(errors.GetErrors()) != 1 { + t.Error("First eror not recorded") + } + errors.ReportError(source, NewLocation(2, 20), "Syntax error, missing paren") + if len(errors.GetErrors()) != 2 { + t.Error("Second error not recorded") + } + expected := + "ERROR: errors-test:1:2: No such field\n" + + " | a.b\n" + + " | .^\n" + + "ERROR: errors-test:2:21: Syntax error, missing paren\n" + + " | &&arg(missing, paren\n" + + " | ....................^" + actual := errors.String() + if actual != expected { + t.Errorf("Expected %s, received %s", expected, actual) + } +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/location.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/location.go new file mode 100644 index 00000000..3495fd67 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/location.go @@ -0,0 +1,44 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines types common to parsing and other diagnostics. +package common + +// Interface to represent a location within Source. +type Location interface { + GetLine() int // 1-based line number within source. + GetColumn() int // 0-based column number within source. +} + +// Helper type to manually construct a location. +type RawLocation struct { + line int + column int +} + +// Ensure the RawLocation implements the Location interface. +var _ Location = &RawLocation{} + +// Create a new location. +func NewLocation(line int, column int) Location { + return &RawLocation{line: line, column: column} +} + +func (l *RawLocation) GetLine() int { + return l.line +} + +func (l *RawLocation) GetColumn() int { + return l.column +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source.go new file mode 100644 index 00000000..8ef07306 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source.go @@ -0,0 +1,111 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines types common to parsing and other diagnostics. +package common + +import ( + "strings" +) + +// Interface for filter source contents. +type Source interface { + // The source content represented as a string, for example a single file, + // textbox field, or url parameter. + Content() string + + // Brief description of the source, such as a file name or ui element. + Description() string + + // The character offsets at which lines occur. The zero-th entry should + // refer to the break between the first and second line, or EOF if there + // is only one line of source. + LineOffsets() []int32 + + // The raw character offset at which the a location exists given the + // location line and column. + // Returns the line offset and whether the location was found. + CharacterOffset(location Location) (int32, bool) + + // Return a line of content from the source and whether the line was found. + Snippet(line int) (string, bool) +} + +// Ensure the StringSource implements the Source interface. +var _ Source = &StringSource{} + +// StringSource type implementation of the Source interface. +type StringSource struct { + contents string + description string + lineOffsets []int32 +} + +// Return a new Source given the string contents and description. +func NewStringSource(contents string, description string) Source { + // Compute line offsets up front as they are referred to frequently. + lines := strings.Split(contents, "\n") + offsets := make([]int32, len(lines)) + var offset int32 = 0 + for i, line := range lines { + offset = offset + int32(len(line)) + 1 + offsets[int32(i)] = offset + } + return &StringSource{ + contents: contents, + description: description, + lineOffsets: offsets, + } +} + +func (s *StringSource) Content() string { + return s.contents +} + +func (s *StringSource) Description() string { + return s.description +} + +func (s *StringSource) LineOffsets() []int32 { + return s.lineOffsets +} + +func (s *StringSource) CharacterOffset(location Location) (int32, bool) { + if lineOffset, found := s.findLineOffset(location.GetLine()); found { + return lineOffset + int32(location.GetColumn()), true + } + return -1, false +} + +func (s *StringSource) Snippet(line int) (string, bool) { + if charStart, found := s.findLineOffset(line); found { + charEnd, found := s.findLineOffset(line + 1) + if found { + return s.contents[charStart : charEnd-1], true + } + return s.contents[charStart:], true + } + return "", false +} + +func (s *StringSource) findLineOffset(line int) (int32, bool) { + if line == 1 { + return 0, true + } else if line > 1 && line <= int(len(s.lineOffsets)) { + offset := s.lineOffsets[line-2] + return offset, true + } + return -1, false + +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source_test.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source_test.go new file mode 100644 index 00000000..beaf80c2 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/common/source_test.go @@ -0,0 +1,132 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package common defines types common to parsing and other diagnostics. +package common + +import ( + "testing" +) + +const ( + UnexpectedValue = "Expected '%v', got '%v'" + UnexpectedSnippet = "Expected snippet '%s', got '%s'" + SnippetNotFound = "Expected snippet at line %d, but not found" + SnippetFound = "Found snippet at line %d, where none was expected" +) + +// Test the error description method. +func TestStringSource_Description(t *testing.T) { + contents := "example content\nsecond line" + source := NewStringSource(contents, "description-test") + // Verify the content + if source.Content() != contents { + t.Errorf(UnexpectedValue, contents, source.Content()) + } + // Verify the description + if source.Description() != "description-test" { + t.Errorf(UnexpectedValue, "description-test", source.Description()) + } + + // Assert that the snippets on lines 1 & 2 are what was expected. + if str2, found := source.Snippet(2); !found { + t.Errorf(SnippetNotFound, 2) + + } else if str2 != "second line" { + t.Errorf(UnexpectedSnippet, "second line", str2) + } + if str1, found := source.Snippet(1); !found { + t.Errorf(SnippetNotFound, 1) + + } else if str1 != "example content" { + t.Errorf(UnexpectedSnippet, "example content", str1) + } +} + +// Test the character offest to make sure that the offsets accurately reflect +// the location of a character in source. +func TestStringSource_CharacterOffset(t *testing.T) { + contents := "c.d &&\n\t b.c.arg(10) &&\n\t test(10)" + source := NewStringSource(contents, "offset-test") + expectedLineOffsets := []int32{7, 24, 35} + if len(expectedLineOffsets) != len(source.LineOffsets()) { + t.Errorf("Expected list of size '%d', got a list of size '%d'", + len(expectedLineOffsets), len(source.LineOffsets())) + } else { + for i, val := range expectedLineOffsets { + if val != source.LineOffsets()[i] { + t.Errorf("Expected line %d offset of %d, go %d", + i, val, source.LineOffsets()[i]) + } + } + } + // Ensure that selecting a set of characters across multiple lines works as + // expected. + charStart, _ := source.CharacterOffset(NewLocation(1, 2)) + charEnd, _ := source.CharacterOffset(NewLocation(3, 2)) + if "d &&\n\t b.c.arg(10) &&\n\t " != string(contents[charStart:charEnd]) { + t.Errorf(UnexpectedValue, "d &&\n\t b.c.arg(10) &&\n\t ", + string(contents[charStart:charEnd])) + } + if _, found := source.CharacterOffset(NewLocation(4, 0)); found { + t.Error("Character offset was out of range of source, but still found.") + } +} + +// Test the computation of snippets, single lines of text, from a multiline +// source. +func TestStringSource_SnippetMultiline(t *testing.T) { + source := NewStringSource("hello\nworld\nmy\nbub\n", "four-line-test") + if str, found := source.Snippet(1); !found { + t.Errorf(SnippetNotFound, 1) + } else if str != "hello" { + t.Errorf(UnexpectedSnippet, "hello", str) + } + if str2, found := source.Snippet(2); !found { + t.Errorf(SnippetNotFound, 2) + } else if str2 != "world" { + t.Errorf(UnexpectedSnippet, "world", str2) + } + if str3, found := source.Snippet(3); !found { + t.Errorf(SnippetNotFound, 3) + } else if str3 != "my" { + t.Errorf(UnexpectedSnippet, "my", str3) + } + if str4, found := source.Snippet(4); !found { + t.Errorf(SnippetNotFound, 4) + } else if str4 != "bub" { + t.Errorf(UnexpectedSnippet, "bub", str4) + } + if str5, found := source.Snippet(5); !found { + t.Errorf(SnippetNotFound, 5) + } else if str5 != "" { + t.Errorf(UnexpectedSnippet, "", str5) + } +} + +// Test the computation of snippets from a single line source. +func TestStringSource_SnippetSingleline(t *testing.T) { + source := NewStringSource("hello, world", "one-line-test") + if str, found := source.Snippet(1); !found { + t.Errorf(SnippetNotFound, 1) + + } else if str != "hello, world" { + t.Errorf(UnexpectedSnippet, "hello, world", str) + } + if str2, found := source.Snippet(2); found { + t.Error(SnippetFound, 2) + } else if str2 != "" { + t.Error(UnexpectedSnippet, "", str2) + } +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/operators/operators.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/operators/operators.go new file mode 100644 index 00000000..50d7b34b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/operators/operators.go @@ -0,0 +1,87 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package operators defines constant operator names and helper functions for +// identifying the operator and its kind. +package operators + +import "strings" + +// All CEL operators are modelled as function calls. The list of constants below +// uses mangled operator names to avoid collisions with user-defined functions. +const ( + Sequence = "_sequence_" // Sequence of conjunctions. + LogicalAnd = "_&&_" // Conjunction operator (a AND b). + LogicalOr = "_||_" // Disjunction operator (a OR b). + LogicalNot = "_!" // Negation using the keyword NOT + Negate = "-_" // Negation using the minus + Index = "_[_]" // Index operation on a map or list. + + // Restriction operations. + Global = "_global_" + Has = "_:_" + Equals = "_==_" + Greater = "_>_" + GreaterEquals = "_>=_" + Less = "_<_" + LessEquals = "_<=_" + NotEquals = "_!=_" +) + +var ( + // Mapping between textual operator strings and mangled operator names. + operators = map[string]string{ + "AND": LogicalAnd, + "OR": LogicalOr, + "NOT": LogicalNot, + "-": Negate, + "[": Index, + ":": Has, + "=": Equals, + "!=": NotEquals, + "<": Less, + "<=": LessEquals, + ">": Greater, + ">=": GreaterEquals, + } + + // The set of operators that are also restrictions. + restrictions = []string{ + Global, + Has, + Equals, + NotEquals, + Less, + LessEquals, + Greater, + GreaterEquals, + } +) + +// Find the operator name from the function name and whether it could be found. +func Find(text string) (string, bool) { + op, found := operators[strings.Trim(text, " ")] + return op, found +} + +// Determine whether the operator is a restriction. +func IsRestriction(op string) bool { + trimmed := strings.Trim(op, " ") + for _, restriction := range restrictions { + if restriction == trimmed { + return true + } + } + return false +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.g4 b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.g4 new file mode 100644 index 00000000..02767b87 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.g4 @@ -0,0 +1,160 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Filtering expression syntax. +parser grammar FilterExpression; + +options { tokenVocab = FilterExpressionLexer; } + +// Returns a filter expression. +filter + : expression? WS* EOF + ; + +// Conjunction of sequences. +// e.g. `a AND b` +expression + : expr=sequence (op+=andOp rest+=sequence)* + ; + +// Sequence of restrictions to be used for scoring list results. +// e.g. `a b 3.14` +sequence + : expr=factor (WS+ rest+=factor)* + ; + +// Disjunction of terms. +// e.g. `a OR b` +factor + : expr=term (op+=orOp rest+=term)* + ; + +// A restriction with optional negation. +// e.g. `NOT a`, `-a` +term + : op=notOp? expr=restriction + ; + +// A comparable value with optional comparison. +// e.g. ordering: `a < b`, equality: `a = b`, membership: `a.b:*` +restriction + : expr=comparable (WS* op=comparator WS* rest=comparable)? + ; + +// Comparables may either be numbers or values. +// NOTE: number is listed at a high precedence in order to avoid collisions +// between dot-delimited field selection within unquoted text and unambiguous +// classification of floating-point constants. +comparable + : number + | value + ; + +// The supported operator set. +comparator + : LESS_EQUALS + | LESS_THAN + | GREATER_EQUALS + | GREATER_THAN + | NOT_EQUALS + | EQUALS + | HAS + ; + +// Values may either be index lookups or member expressions or a primary +// syntax node. +// e.g. index: `a[b]`, select: `a.b`, call: `a.b()` +value + : primary #PrimaryExpr + | value op=DOT field (open=LPAREN argList? RPAREN)? #SelectOrCall + | value op=LBRACE WS* index=comparable WS* RBRACE #DynamicIndex + ; + +// Primary expressions include [un]quoted text, variables, functions, or +// nested (composite) expressions. +// e.g. id: `a`, call: `id()`, quotedText: `"hello"` +// +// NOTE: the id node is ambiguous as to whether it should be interpreted +// as a variable identifier or unquoted text. Interpretation is left to +// filter evaluation. +primary + : composite #NestedExpr + | id=text (open=LPAREN argList? RPAREN)? #IdentOrGlobalCall + | quotedText=STRING #StringVal + ; + +// A list of function arguments. +argList + : WS* args+=comparable (sep args+=comparable)* WS* + ; + +// Nested expression. +composite + : LPAREN WS* expression WS* RPAREN + ; + +// Text will be treated as bareword identifier if possible. The interpretation +// is somewhat subject to the filter consumer; however, the values `true` and +// `false` (case-insensitive) will be treated as booleans. +text + : (TEXT | EXCLAIM | DIGIT) (TEXT | EXCLAIM | DIGIT | MINUS)* + ; + +// Fields in a select statement are unambiguously string constants. +field + : id=text + | quotedText=STRING + | keyword + ; + +// Numeric constants +number + : floatVal + | intVal + ; + +// Positive and negative integers with hex support. +intVal + : MINUS? DIGIT+ + | MINUS? HEX_DIGIT + ; + +// Positive and negative floating point values. +floatVal + : MINUS? (DIGIT+ DOT DIGIT* | DOT DIGIT+) EXPONENT? + ; + +// Convenience parse terms +notOp + : MINUS + | NOT WS+ + ; + +andOp + : WS+ AND WS+ + ; + +orOp + : WS+ OR WS+ + ; + +sep + : WS* COMMA WS* + ; + +keyword + : OR + | AND + | NOT + ; diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.interp b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.interp new file mode 100644 index 00000000..1b780b95 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.interp @@ -0,0 +1,89 @@ +token literal names: +null +'.' +':' +'OR' +'AND' +'NOT' +'(' +')' +'[' +']' +'{' +'}' +',' +'<' +'<=' +'>' +'>=' +'!=' +'=' +'!' +'-' +'+' +null +null +null +null +null +null +'\\' + +token symbolic names: +null +DOT +HAS +OR +AND +NOT +LPAREN +RPAREN +LBRACE +RBRACE +LBRACKET +RBRACKET +COMMA +LESS_THAN +LESS_EQUALS +GREATER_THAN +GREATER_EQUALS +NOT_EQUALS +EQUALS +EXCLAIM +MINUS +PLUS +STRING +WS +DIGIT +HEX_DIGIT +EXPONENT +TEXT +BACKSLASH + +rule names: +filter +expression +sequence +factor +term +restriction +comparable +comparator +value +primary +argList +composite +text +field +number +intVal +floatVal +keyword +notOp +andOp +orOp +sep + + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 30, 303, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 3, 2, 5, 2, 48, 10, 2, 3, 2, 7, 2, 51, 10, 2, 12, 2, 14, 2, 54, 11, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 62, 10, 3, 12, 3, 14, 3, 65, 11, 3, 3, 4, 3, 4, 6, 4, 69, 10, 4, 13, 4, 14, 4, 70, 3, 4, 7, 4, 74, 10, 4, 12, 4, 14, 4, 77, 11, 4, 3, 5, 3, 5, 3, 5, 3, 5, 7, 5, 83, 10, 5, 12, 5, 14, 5, 86, 11, 5, 3, 6, 5, 6, 89, 10, 6, 3, 6, 3, 6, 3, 7, 3, 7, 7, 7, 95, 10, 7, 12, 7, 14, 7, 98, 11, 7, 3, 7, 3, 7, 7, 7, 102, 10, 7, 12, 7, 14, 7, 105, 11, 7, 3, 7, 3, 7, 5, 7, 109, 10, 7, 3, 8, 3, 8, 5, 8, 113, 10, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 5, 10, 125, 10, 10, 3, 10, 5, 10, 128, 10, 10, 3, 10, 3, 10, 3, 10, 7, 10, 133, 10, 10, 12, 10, 14, 10, 136, 11, 10, 3, 10, 3, 10, 7, 10, 140, 10, 10, 12, 10, 14, 10, 143, 11, 10, 3, 10, 3, 10, 7, 10, 147, 10, 10, 12, 10, 14, 10, 150, 11, 10, 3, 11, 3, 11, 3, 11, 3, 11, 5, 11, 156, 10, 11, 3, 11, 5, 11, 159, 10, 11, 3, 11, 5, 11, 162, 10, 11, 3, 12, 7, 12, 165, 10, 12, 12, 12, 14, 12, 168, 11, 12, 3, 12, 3, 12, 3, 12, 3, 12, 7, 12, 174, 10, 12, 12, 12, 14, 12, 177, 11, 12, 3, 12, 7, 12, 180, 10, 12, 12, 12, 14, 12, 183, 11, 12, 3, 13, 3, 13, 7, 13, 187, 10, 13, 12, 13, 14, 13, 190, 11, 13, 3, 13, 3, 13, 7, 13, 194, 10, 13, 12, 13, 14, 13, 197, 11, 13, 3, 13, 3, 13, 3, 14, 3, 14, 7, 14, 203, 10, 14, 12, 14, 14, 14, 206, 11, 14, 3, 15, 3, 15, 3, 15, 5, 15, 211, 10, 15, 3, 16, 3, 16, 5, 16, 215, 10, 16, 3, 17, 5, 17, 218, 10, 17, 3, 17, 6, 17, 221, 10, 17, 13, 17, 14, 17, 222, 3, 17, 5, 17, 226, 10, 17, 3, 17, 5, 17, 229, 10, 17, 3, 18, 5, 18, 232, 10, 18, 3, 18, 6, 18, 235, 10, 18, 13, 18, 14, 18, 236, 3, 18, 3, 18, 7, 18, 241, 10, 18, 12, 18, 14, 18, 244, 11, 18, 3, 18, 3, 18, 6, 18, 248, 10, 18, 13, 18, 14, 18, 249, 5, 18, 252, 10, 18, 3, 18, 5, 18, 255, 10, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 20, 6, 20, 262, 10, 20, 13, 20, 14, 20, 263, 5, 20, 266, 10, 20, 3, 21, 6, 21, 269, 10, 21, 13, 21, 14, 21, 270, 3, 21, 3, 21, 6, 21, 275, 10, 21, 13, 21, 14, 21, 276, 3, 22, 6, 22, 280, 10, 22, 13, 22, 14, 22, 281, 3, 22, 3, 22, 6, 22, 286, 10, 22, 13, 22, 14, 22, 287, 3, 23, 7, 23, 291, 10, 23, 12, 23, 14, 23, 294, 11, 23, 3, 23, 3, 23, 7, 23, 298, 10, 23, 12, 23, 14, 23, 301, 11, 23, 3, 23, 2, 3, 18, 24, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 2, 6, 4, 2, 4, 4, 15, 20, 5, 2, 21, 21, 26, 26, 29, 29, 5, 2, 21, 22, 26, 26, 29, 29, 3, 2, 5, 7, 2, 328, 2, 47, 3, 2, 2, 2, 4, 57, 3, 2, 2, 2, 6, 66, 3, 2, 2, 2, 8, 78, 3, 2, 2, 2, 10, 88, 3, 2, 2, 2, 12, 92, 3, 2, 2, 2, 14, 112, 3, 2, 2, 2, 16, 114, 3, 2, 2, 2, 18, 116, 3, 2, 2, 2, 20, 161, 3, 2, 2, 2, 22, 166, 3, 2, 2, 2, 24, 184, 3, 2, 2, 2, 26, 200, 3, 2, 2, 2, 28, 210, 3, 2, 2, 2, 30, 214, 3, 2, 2, 2, 32, 228, 3, 2, 2, 2, 34, 231, 3, 2, 2, 2, 36, 256, 3, 2, 2, 2, 38, 265, 3, 2, 2, 2, 40, 268, 3, 2, 2, 2, 42, 279, 3, 2, 2, 2, 44, 292, 3, 2, 2, 2, 46, 48, 5, 4, 3, 2, 47, 46, 3, 2, 2, 2, 47, 48, 3, 2, 2, 2, 48, 52, 3, 2, 2, 2, 49, 51, 7, 25, 2, 2, 50, 49, 3, 2, 2, 2, 51, 54, 3, 2, 2, 2, 52, 50, 3, 2, 2, 2, 52, 53, 3, 2, 2, 2, 53, 55, 3, 2, 2, 2, 54, 52, 3, 2, 2, 2, 55, 56, 7, 2, 2, 3, 56, 3, 3, 2, 2, 2, 57, 63, 5, 6, 4, 2, 58, 59, 5, 40, 21, 2, 59, 60, 5, 6, 4, 2, 60, 62, 3, 2, 2, 2, 61, 58, 3, 2, 2, 2, 62, 65, 3, 2, 2, 2, 63, 61, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, 5, 3, 2, 2, 2, 65, 63, 3, 2, 2, 2, 66, 75, 5, 8, 5, 2, 67, 69, 7, 25, 2, 2, 68, 67, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 68, 3, 2, 2, 2, 70, 71, 3, 2, 2, 2, 71, 72, 3, 2, 2, 2, 72, 74, 5, 8, 5, 2, 73, 68, 3, 2, 2, 2, 74, 77, 3, 2, 2, 2, 75, 73, 3, 2, 2, 2, 75, 76, 3, 2, 2, 2, 76, 7, 3, 2, 2, 2, 77, 75, 3, 2, 2, 2, 78, 84, 5, 10, 6, 2, 79, 80, 5, 42, 22, 2, 80, 81, 5, 10, 6, 2, 81, 83, 3, 2, 2, 2, 82, 79, 3, 2, 2, 2, 83, 86, 3, 2, 2, 2, 84, 82, 3, 2, 2, 2, 84, 85, 3, 2, 2, 2, 85, 9, 3, 2, 2, 2, 86, 84, 3, 2, 2, 2, 87, 89, 5, 38, 20, 2, 88, 87, 3, 2, 2, 2, 88, 89, 3, 2, 2, 2, 89, 90, 3, 2, 2, 2, 90, 91, 5, 12, 7, 2, 91, 11, 3, 2, 2, 2, 92, 108, 5, 14, 8, 2, 93, 95, 7, 25, 2, 2, 94, 93, 3, 2, 2, 2, 95, 98, 3, 2, 2, 2, 96, 94, 3, 2, 2, 2, 96, 97, 3, 2, 2, 2, 97, 99, 3, 2, 2, 2, 98, 96, 3, 2, 2, 2, 99, 103, 5, 16, 9, 2, 100, 102, 7, 25, 2, 2, 101, 100, 3, 2, 2, 2, 102, 105, 3, 2, 2, 2, 103, 101, 3, 2, 2, 2, 103, 104, 3, 2, 2, 2, 104, 106, 3, 2, 2, 2, 105, 103, 3, 2, 2, 2, 106, 107, 5, 14, 8, 2, 107, 109, 3, 2, 2, 2, 108, 96, 3, 2, 2, 2, 108, 109, 3, 2, 2, 2, 109, 13, 3, 2, 2, 2, 110, 113, 5, 30, 16, 2, 111, 113, 5, 18, 10, 2, 112, 110, 3, 2, 2, 2, 112, 111, 3, 2, 2, 2, 113, 15, 3, 2, 2, 2, 114, 115, 9, 2, 2, 2, 115, 17, 3, 2, 2, 2, 116, 117, 8, 10, 1, 2, 117, 118, 5, 20, 11, 2, 118, 148, 3, 2, 2, 2, 119, 120, 12, 4, 2, 2, 120, 121, 7, 3, 2, 2, 121, 127, 5, 28, 15, 2, 122, 124, 7, 8, 2, 2, 123, 125, 5, 22, 12, 2, 124, 123, 3, 2, 2, 2, 124, 125, 3, 2, 2, 2, 125, 126, 3, 2, 2, 2, 126, 128, 7, 9, 2, 2, 127, 122, 3, 2, 2, 2, 127, 128, 3, 2, 2, 2, 128, 147, 3, 2, 2, 2, 129, 130, 12, 3, 2, 2, 130, 134, 7, 10, 2, 2, 131, 133, 7, 25, 2, 2, 132, 131, 3, 2, 2, 2, 133, 136, 3, 2, 2, 2, 134, 132, 3, 2, 2, 2, 134, 135, 3, 2, 2, 2, 135, 137, 3, 2, 2, 2, 136, 134, 3, 2, 2, 2, 137, 141, 5, 14, 8, 2, 138, 140, 7, 25, 2, 2, 139, 138, 3, 2, 2, 2, 140, 143, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, 142, 3, 2, 2, 2, 142, 144, 3, 2, 2, 2, 143, 141, 3, 2, 2, 2, 144, 145, 7, 11, 2, 2, 145, 147, 3, 2, 2, 2, 146, 119, 3, 2, 2, 2, 146, 129, 3, 2, 2, 2, 147, 150, 3, 2, 2, 2, 148, 146, 3, 2, 2, 2, 148, 149, 3, 2, 2, 2, 149, 19, 3, 2, 2, 2, 150, 148, 3, 2, 2, 2, 151, 162, 5, 24, 13, 2, 152, 158, 5, 26, 14, 2, 153, 155, 7, 8, 2, 2, 154, 156, 5, 22, 12, 2, 155, 154, 3, 2, 2, 2, 155, 156, 3, 2, 2, 2, 156, 157, 3, 2, 2, 2, 157, 159, 7, 9, 2, 2, 158, 153, 3, 2, 2, 2, 158, 159, 3, 2, 2, 2, 159, 162, 3, 2, 2, 2, 160, 162, 7, 24, 2, 2, 161, 151, 3, 2, 2, 2, 161, 152, 3, 2, 2, 2, 161, 160, 3, 2, 2, 2, 162, 21, 3, 2, 2, 2, 163, 165, 7, 25, 2, 2, 164, 163, 3, 2, 2, 2, 165, 168, 3, 2, 2, 2, 166, 164, 3, 2, 2, 2, 166, 167, 3, 2, 2, 2, 167, 169, 3, 2, 2, 2, 168, 166, 3, 2, 2, 2, 169, 175, 5, 14, 8, 2, 170, 171, 5, 44, 23, 2, 171, 172, 5, 14, 8, 2, 172, 174, 3, 2, 2, 2, 173, 170, 3, 2, 2, 2, 174, 177, 3, 2, 2, 2, 175, 173, 3, 2, 2, 2, 175, 176, 3, 2, 2, 2, 176, 181, 3, 2, 2, 2, 177, 175, 3, 2, 2, 2, 178, 180, 7, 25, 2, 2, 179, 178, 3, 2, 2, 2, 180, 183, 3, 2, 2, 2, 181, 179, 3, 2, 2, 2, 181, 182, 3, 2, 2, 2, 182, 23, 3, 2, 2, 2, 183, 181, 3, 2, 2, 2, 184, 188, 7, 8, 2, 2, 185, 187, 7, 25, 2, 2, 186, 185, 3, 2, 2, 2, 187, 190, 3, 2, 2, 2, 188, 186, 3, 2, 2, 2, 188, 189, 3, 2, 2, 2, 189, 191, 3, 2, 2, 2, 190, 188, 3, 2, 2, 2, 191, 195, 5, 4, 3, 2, 192, 194, 7, 25, 2, 2, 193, 192, 3, 2, 2, 2, 194, 197, 3, 2, 2, 2, 195, 193, 3, 2, 2, 2, 195, 196, 3, 2, 2, 2, 196, 198, 3, 2, 2, 2, 197, 195, 3, 2, 2, 2, 198, 199, 7, 9, 2, 2, 199, 25, 3, 2, 2, 2, 200, 204, 9, 3, 2, 2, 201, 203, 9, 4, 2, 2, 202, 201, 3, 2, 2, 2, 203, 206, 3, 2, 2, 2, 204, 202, 3, 2, 2, 2, 204, 205, 3, 2, 2, 2, 205, 27, 3, 2, 2, 2, 206, 204, 3, 2, 2, 2, 207, 211, 5, 26, 14, 2, 208, 211, 7, 24, 2, 2, 209, 211, 5, 36, 19, 2, 210, 207, 3, 2, 2, 2, 210, 208, 3, 2, 2, 2, 210, 209, 3, 2, 2, 2, 211, 29, 3, 2, 2, 2, 212, 215, 5, 34, 18, 2, 213, 215, 5, 32, 17, 2, 214, 212, 3, 2, 2, 2, 214, 213, 3, 2, 2, 2, 215, 31, 3, 2, 2, 2, 216, 218, 7, 22, 2, 2, 217, 216, 3, 2, 2, 2, 217, 218, 3, 2, 2, 2, 218, 220, 3, 2, 2, 2, 219, 221, 7, 26, 2, 2, 220, 219, 3, 2, 2, 2, 221, 222, 3, 2, 2, 2, 222, 220, 3, 2, 2, 2, 222, 223, 3, 2, 2, 2, 223, 229, 3, 2, 2, 2, 224, 226, 7, 22, 2, 2, 225, 224, 3, 2, 2, 2, 225, 226, 3, 2, 2, 2, 226, 227, 3, 2, 2, 2, 227, 229, 7, 27, 2, 2, 228, 217, 3, 2, 2, 2, 228, 225, 3, 2, 2, 2, 229, 33, 3, 2, 2, 2, 230, 232, 7, 22, 2, 2, 231, 230, 3, 2, 2, 2, 231, 232, 3, 2, 2, 2, 232, 251, 3, 2, 2, 2, 233, 235, 7, 26, 2, 2, 234, 233, 3, 2, 2, 2, 235, 236, 3, 2, 2, 2, 236, 234, 3, 2, 2, 2, 236, 237, 3, 2, 2, 2, 237, 238, 3, 2, 2, 2, 238, 242, 7, 3, 2, 2, 239, 241, 7, 26, 2, 2, 240, 239, 3, 2, 2, 2, 241, 244, 3, 2, 2, 2, 242, 240, 3, 2, 2, 2, 242, 243, 3, 2, 2, 2, 243, 252, 3, 2, 2, 2, 244, 242, 3, 2, 2, 2, 245, 247, 7, 3, 2, 2, 246, 248, 7, 26, 2, 2, 247, 246, 3, 2, 2, 2, 248, 249, 3, 2, 2, 2, 249, 247, 3, 2, 2, 2, 249, 250, 3, 2, 2, 2, 250, 252, 3, 2, 2, 2, 251, 234, 3, 2, 2, 2, 251, 245, 3, 2, 2, 2, 252, 254, 3, 2, 2, 2, 253, 255, 7, 28, 2, 2, 254, 253, 3, 2, 2, 2, 254, 255, 3, 2, 2, 2, 255, 35, 3, 2, 2, 2, 256, 257, 9, 5, 2, 2, 257, 37, 3, 2, 2, 2, 258, 266, 7, 22, 2, 2, 259, 261, 7, 7, 2, 2, 260, 262, 7, 25, 2, 2, 261, 260, 3, 2, 2, 2, 262, 263, 3, 2, 2, 2, 263, 261, 3, 2, 2, 2, 263, 264, 3, 2, 2, 2, 264, 266, 3, 2, 2, 2, 265, 258, 3, 2, 2, 2, 265, 259, 3, 2, 2, 2, 266, 39, 3, 2, 2, 2, 267, 269, 7, 25, 2, 2, 268, 267, 3, 2, 2, 2, 269, 270, 3, 2, 2, 2, 270, 268, 3, 2, 2, 2, 270, 271, 3, 2, 2, 2, 271, 272, 3, 2, 2, 2, 272, 274, 7, 6, 2, 2, 273, 275, 7, 25, 2, 2, 274, 273, 3, 2, 2, 2, 275, 276, 3, 2, 2, 2, 276, 274, 3, 2, 2, 2, 276, 277, 3, 2, 2, 2, 277, 41, 3, 2, 2, 2, 278, 280, 7, 25, 2, 2, 279, 278, 3, 2, 2, 2, 280, 281, 3, 2, 2, 2, 281, 279, 3, 2, 2, 2, 281, 282, 3, 2, 2, 2, 282, 283, 3, 2, 2, 2, 283, 285, 7, 5, 2, 2, 284, 286, 7, 25, 2, 2, 285, 284, 3, 2, 2, 2, 286, 287, 3, 2, 2, 2, 287, 285, 3, 2, 2, 2, 287, 288, 3, 2, 2, 2, 288, 43, 3, 2, 2, 2, 289, 291, 7, 25, 2, 2, 290, 289, 3, 2, 2, 2, 291, 294, 3, 2, 2, 2, 292, 290, 3, 2, 2, 2, 292, 293, 3, 2, 2, 2, 293, 295, 3, 2, 2, 2, 294, 292, 3, 2, 2, 2, 295, 299, 7, 14, 2, 2, 296, 298, 7, 25, 2, 2, 297, 296, 3, 2, 2, 2, 298, 301, 3, 2, 2, 2, 299, 297, 3, 2, 2, 2, 299, 300, 3, 2, 2, 2, 300, 45, 3, 2, 2, 2, 301, 299, 3, 2, 2, 2, 48, 47, 52, 63, 70, 75, 84, 88, 96, 103, 108, 112, 124, 127, 134, 141, 146, 148, 155, 158, 161, 166, 175, 181, 188, 195, 204, 210, 214, 217, 222, 225, 228, 231, 236, 242, 249, 251, 254, 263, 265, 270, 276, 281, 287, 292, 299] \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.tokens b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.tokens new file mode 100644 index 00000000..990955c0 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpression.tokens @@ -0,0 +1,50 @@ +DOT=1 +HAS=2 +OR=3 +AND=4 +NOT=5 +LPAREN=6 +RPAREN=7 +LBRACE=8 +RBRACE=9 +LBRACKET=10 +RBRACKET=11 +COMMA=12 +LESS_THAN=13 +LESS_EQUALS=14 +GREATER_THAN=15 +GREATER_EQUALS=16 +NOT_EQUALS=17 +EQUALS=18 +EXCLAIM=19 +MINUS=20 +PLUS=21 +STRING=22 +WS=23 +DIGIT=24 +HEX_DIGIT=25 +EXPONENT=26 +TEXT=27 +BACKSLASH=28 +'.'=1 +':'=2 +'OR'=3 +'AND'=4 +'NOT'=5 +'('=6 +')'=7 +'['=8 +']'=9 +'{'=10 +'}'=11 +','=12 +'<'=13 +'<='=14 +'>'=15 +'>='=16 +'!='=17 +'='=18 +'!'=19 +'-'=20 +'+'=21 +'\\'=28 diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.g4 b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.g4 new file mode 100644 index 00000000..ae3130dd --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.g4 @@ -0,0 +1,123 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +lexer grammar FilterExpressionLexer; + +// Lexer Rules +// =========== + +DOT : '.'; +HAS : ':'; +OR : 'OR'; +AND : 'AND'; +NOT : 'NOT'; +LPAREN : '('; +RPAREN : ')'; +LBRACE : '['; +RBRACE : ']'; +LBRACKET : '{'; +RBRACKET : '}'; +COMMA : ','; +LESS_THAN : '<'; +LESS_EQUALS : '<='; +GREATER_THAN : '>'; +GREATER_EQUALS : '>='; +NOT_EQUALS : '!='; +EQUALS : '='; +EXCLAIM : '!'; +MINUS : '-'; +PLUS : '+'; +STRING : '"' Character* '"'; +WS : Whitespace; +DIGIT : Digit; +HEX_DIGIT : '0x' HexDigit+; +EXPONENT : Exponent; +TEXT : (StartChar | TextEsc) (MidChar | TextEsc)*; +BACKSLASH : '\\'; + +fragment Character + : ' ' | '!' | '#' .. '[' | ']' .. '~' + | CharactersFromU00A1 + | TextEsc + | '\\' ('a' | 'b' | 'f' | 'n' | 'r' | 't' | 'v')? + | Whitespace + ; + +fragment TextEsc + : EscapedChar + | UnicodeEsc + | OctalEsc + | HexEsc + ; + +fragment UnicodeEsc + : '\\' 'u' HexDigit HexDigit HexDigit HexDigit + ; + +fragment OctalEsc + : '\\' [0-3]? OctalDigit? OctalDigit + ; + +fragment HexEsc + : '\\x' HexDigit HexDigit + ; + +fragment Digit + : [0-9] + ; + +fragment Exponent + : [eE] (PLUS|MINUS)? Digit+ + ; + +fragment HexDigit + : Digit | [a-fA-F] + ; + +fragment OctalDigit + : [0-7] + ; + +fragment StartChar + : '#' .. '\'' + | '*' + | '/' + | ';' + | '?' + | '@' + | [A-Z] + | '^' .. 'z' + | '|' + | CharactersFromU00A1 + ; + +fragment MidChar + : StartChar + | Digit + | PLUS + | MINUS + ; + +fragment EscapedChar + : '\\' [:=<>+~"\\.*] + ; + +fragment Whitespace + : (' '|'\r'|'\t'|'\u000C'|'\n') + ; + +fragment CharactersFromU00A1 + : '\u00A1' .. '\ufffe' + ; + diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.interp b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.interp new file mode 100644 index 00000000..2899db0b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.interp @@ -0,0 +1,115 @@ +token literal names: +null +'.' +':' +'OR' +'AND' +'NOT' +'(' +')' +'[' +']' +'{' +'}' +',' +'<' +'<=' +'>' +'>=' +'!=' +'=' +'!' +'-' +'+' +null +null +null +null +null +null +'\\' + +token symbolic names: +null +DOT +HAS +OR +AND +NOT +LPAREN +RPAREN +LBRACE +RBRACE +LBRACKET +RBRACKET +COMMA +LESS_THAN +LESS_EQUALS +GREATER_THAN +GREATER_EQUALS +NOT_EQUALS +EQUALS +EXCLAIM +MINUS +PLUS +STRING +WS +DIGIT +HEX_DIGIT +EXPONENT +TEXT +BACKSLASH + +rule names: +DOT +HAS +OR +AND +NOT +LPAREN +RPAREN +LBRACE +RBRACE +LBRACKET +RBRACKET +COMMA +LESS_THAN +LESS_EQUALS +GREATER_THAN +GREATER_EQUALS +NOT_EQUALS +EQUALS +EXCLAIM +MINUS +PLUS +STRING +WS +DIGIT +HEX_DIGIT +EXPONENT +TEXT +BACKSLASH +Character +TextEsc +UnicodeEsc +OctalEsc +HexEsc +Digit +Exponent +HexDigit +OctalDigit +StartChar +MidChar +EscapedChar +Whitespace +CharactersFromU00A1 + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 30, 246, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33, 4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4, 39, 9, 39, 4, 40, 9, 40, 4, 41, 9, 41, 4, 42, 9, 42, 4, 43, 9, 43, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, 15, 3, 15, 3, 16, 3, 16, 3, 17, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 23, 3, 23, 7, 23, 140, 10, 23, 12, 23, 14, 23, 143, 11, 23, 3, 23, 3, 23, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3, 26, 3, 26, 3, 26, 6, 26, 155, 10, 26, 13, 26, 14, 26, 156, 3, 27, 3, 27, 3, 28, 3, 28, 5, 28, 163, 10, 28, 3, 28, 3, 28, 7, 28, 167, 10, 28, 12, 28, 14, 28, 170, 11, 28, 3, 29, 3, 29, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 5, 30, 179, 10, 30, 3, 30, 5, 30, 182, 10, 30, 3, 31, 3, 31, 3, 31, 3, 31, 5, 31, 188, 10, 31, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, 3, 33, 3, 33, 5, 33, 199, 10, 33, 3, 33, 5, 33, 202, 10, 33, 3, 33, 3, 33, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 35, 3, 35, 3, 36, 3, 36, 3, 36, 5, 36, 217, 10, 36, 3, 36, 6, 36, 220, 10, 36, 13, 36, 14, 36, 221, 3, 37, 3, 37, 5, 37, 226, 10, 37, 3, 38, 3, 38, 3, 39, 3, 39, 5, 39, 232, 10, 39, 3, 40, 3, 40, 3, 40, 3, 40, 5, 40, 238, 10, 40, 3, 41, 3, 41, 3, 41, 3, 42, 3, 42, 3, 43, 3, 43, 2, 2, 44, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, 29, 57, 30, 59, 2, 61, 2, 63, 2, 65, 2, 67, 2, 69, 2, 71, 2, 73, 2, 75, 2, 77, 2, 79, 2, 81, 2, 83, 2, 85, 2, 3, 2, 12, 5, 2, 34, 35, 37, 93, 95, 128, 8, 2, 99, 100, 104, 104, 112, 112, 116, 116, 118, 118, 120, 120, 3, 2, 50, 53, 3, 2, 50, 59, 4, 2, 71, 71, 103, 103, 4, 2, 67, 72, 99, 104, 3, 2, 50, 57, 9, 2, 37, 41, 44, 44, 49, 49, 61, 61, 65, 92, 96, 124, 126, 126, 9, 2, 36, 36, 44, 45, 48, 48, 60, 60, 62, 64, 94, 94, 128, 128, 5, 2, 11, 12, 14, 15, 34, 34, 2, 254, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 3, 87, 3, 2, 2, 2, 5, 89, 3, 2, 2, 2, 7, 91, 3, 2, 2, 2, 9, 94, 3, 2, 2, 2, 11, 98, 3, 2, 2, 2, 13, 102, 3, 2, 2, 2, 15, 104, 3, 2, 2, 2, 17, 106, 3, 2, 2, 2, 19, 108, 3, 2, 2, 2, 21, 110, 3, 2, 2, 2, 23, 112, 3, 2, 2, 2, 25, 114, 3, 2, 2, 2, 27, 116, 3, 2, 2, 2, 29, 118, 3, 2, 2, 2, 31, 121, 3, 2, 2, 2, 33, 123, 3, 2, 2, 2, 35, 126, 3, 2, 2, 2, 37, 129, 3, 2, 2, 2, 39, 131, 3, 2, 2, 2, 41, 133, 3, 2, 2, 2, 43, 135, 3, 2, 2, 2, 45, 137, 3, 2, 2, 2, 47, 146, 3, 2, 2, 2, 49, 148, 3, 2, 2, 2, 51, 150, 3, 2, 2, 2, 53, 158, 3, 2, 2, 2, 55, 162, 3, 2, 2, 2, 57, 171, 3, 2, 2, 2, 59, 181, 3, 2, 2, 2, 61, 187, 3, 2, 2, 2, 63, 189, 3, 2, 2, 2, 65, 196, 3, 2, 2, 2, 67, 205, 3, 2, 2, 2, 69, 211, 3, 2, 2, 2, 71, 213, 3, 2, 2, 2, 73, 225, 3, 2, 2, 2, 75, 227, 3, 2, 2, 2, 77, 231, 3, 2, 2, 2, 79, 237, 3, 2, 2, 2, 81, 239, 3, 2, 2, 2, 83, 242, 3, 2, 2, 2, 85, 244, 3, 2, 2, 2, 87, 88, 7, 48, 2, 2, 88, 4, 3, 2, 2, 2, 89, 90, 7, 60, 2, 2, 90, 6, 3, 2, 2, 2, 91, 92, 7, 81, 2, 2, 92, 93, 7, 84, 2, 2, 93, 8, 3, 2, 2, 2, 94, 95, 7, 67, 2, 2, 95, 96, 7, 80, 2, 2, 96, 97, 7, 70, 2, 2, 97, 10, 3, 2, 2, 2, 98, 99, 7, 80, 2, 2, 99, 100, 7, 81, 2, 2, 100, 101, 7, 86, 2, 2, 101, 12, 3, 2, 2, 2, 102, 103, 7, 42, 2, 2, 103, 14, 3, 2, 2, 2, 104, 105, 7, 43, 2, 2, 105, 16, 3, 2, 2, 2, 106, 107, 7, 93, 2, 2, 107, 18, 3, 2, 2, 2, 108, 109, 7, 95, 2, 2, 109, 20, 3, 2, 2, 2, 110, 111, 7, 125, 2, 2, 111, 22, 3, 2, 2, 2, 112, 113, 7, 127, 2, 2, 113, 24, 3, 2, 2, 2, 114, 115, 7, 46, 2, 2, 115, 26, 3, 2, 2, 2, 116, 117, 7, 62, 2, 2, 117, 28, 3, 2, 2, 2, 118, 119, 7, 62, 2, 2, 119, 120, 7, 63, 2, 2, 120, 30, 3, 2, 2, 2, 121, 122, 7, 64, 2, 2, 122, 32, 3, 2, 2, 2, 123, 124, 7, 64, 2, 2, 124, 125, 7, 63, 2, 2, 125, 34, 3, 2, 2, 2, 126, 127, 7, 35, 2, 2, 127, 128, 7, 63, 2, 2, 128, 36, 3, 2, 2, 2, 129, 130, 7, 63, 2, 2, 130, 38, 3, 2, 2, 2, 131, 132, 7, 35, 2, 2, 132, 40, 3, 2, 2, 2, 133, 134, 7, 47, 2, 2, 134, 42, 3, 2, 2, 2, 135, 136, 7, 45, 2, 2, 136, 44, 3, 2, 2, 2, 137, 141, 7, 36, 2, 2, 138, 140, 5, 59, 30, 2, 139, 138, 3, 2, 2, 2, 140, 143, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, 142, 3, 2, 2, 2, 142, 144, 3, 2, 2, 2, 143, 141, 3, 2, 2, 2, 144, 145, 7, 36, 2, 2, 145, 46, 3, 2, 2, 2, 146, 147, 5, 83, 42, 2, 147, 48, 3, 2, 2, 2, 148, 149, 5, 69, 35, 2, 149, 50, 3, 2, 2, 2, 150, 151, 7, 50, 2, 2, 151, 152, 7, 122, 2, 2, 152, 154, 3, 2, 2, 2, 153, 155, 5, 73, 37, 2, 154, 153, 3, 2, 2, 2, 155, 156, 3, 2, 2, 2, 156, 154, 3, 2, 2, 2, 156, 157, 3, 2, 2, 2, 157, 52, 3, 2, 2, 2, 158, 159, 5, 71, 36, 2, 159, 54, 3, 2, 2, 2, 160, 163, 5, 77, 39, 2, 161, 163, 5, 61, 31, 2, 162, 160, 3, 2, 2, 2, 162, 161, 3, 2, 2, 2, 163, 168, 3, 2, 2, 2, 164, 167, 5, 79, 40, 2, 165, 167, 5, 61, 31, 2, 166, 164, 3, 2, 2, 2, 166, 165, 3, 2, 2, 2, 167, 170, 3, 2, 2, 2, 168, 166, 3, 2, 2, 2, 168, 169, 3, 2, 2, 2, 169, 56, 3, 2, 2, 2, 170, 168, 3, 2, 2, 2, 171, 172, 7, 94, 2, 2, 172, 58, 3, 2, 2, 2, 173, 182, 9, 2, 2, 2, 174, 182, 5, 85, 43, 2, 175, 182, 5, 61, 31, 2, 176, 178, 7, 94, 2, 2, 177, 179, 9, 3, 2, 2, 178, 177, 3, 2, 2, 2, 178, 179, 3, 2, 2, 2, 179, 182, 3, 2, 2, 2, 180, 182, 5, 83, 42, 2, 181, 173, 3, 2, 2, 2, 181, 174, 3, 2, 2, 2, 181, 175, 3, 2, 2, 2, 181, 176, 3, 2, 2, 2, 181, 180, 3, 2, 2, 2, 182, 60, 3, 2, 2, 2, 183, 188, 5, 81, 41, 2, 184, 188, 5, 63, 32, 2, 185, 188, 5, 65, 33, 2, 186, 188, 5, 67, 34, 2, 187, 183, 3, 2, 2, 2, 187, 184, 3, 2, 2, 2, 187, 185, 3, 2, 2, 2, 187, 186, 3, 2, 2, 2, 188, 62, 3, 2, 2, 2, 189, 190, 7, 94, 2, 2, 190, 191, 7, 119, 2, 2, 191, 192, 5, 73, 37, 2, 192, 193, 5, 73, 37, 2, 193, 194, 5, 73, 37, 2, 194, 195, 5, 73, 37, 2, 195, 64, 3, 2, 2, 2, 196, 198, 7, 94, 2, 2, 197, 199, 9, 4, 2, 2, 198, 197, 3, 2, 2, 2, 198, 199, 3, 2, 2, 2, 199, 201, 3, 2, 2, 2, 200, 202, 5, 75, 38, 2, 201, 200, 3, 2, 2, 2, 201, 202, 3, 2, 2, 2, 202, 203, 3, 2, 2, 2, 203, 204, 5, 75, 38, 2, 204, 66, 3, 2, 2, 2, 205, 206, 7, 94, 2, 2, 206, 207, 7, 122, 2, 2, 207, 208, 3, 2, 2, 2, 208, 209, 5, 73, 37, 2, 209, 210, 5, 73, 37, 2, 210, 68, 3, 2, 2, 2, 211, 212, 9, 5, 2, 2, 212, 70, 3, 2, 2, 2, 213, 216, 9, 6, 2, 2, 214, 217, 5, 43, 22, 2, 215, 217, 5, 41, 21, 2, 216, 214, 3, 2, 2, 2, 216, 215, 3, 2, 2, 2, 216, 217, 3, 2, 2, 2, 217, 219, 3, 2, 2, 2, 218, 220, 5, 69, 35, 2, 219, 218, 3, 2, 2, 2, 220, 221, 3, 2, 2, 2, 221, 219, 3, 2, 2, 2, 221, 222, 3, 2, 2, 2, 222, 72, 3, 2, 2, 2, 223, 226, 5, 69, 35, 2, 224, 226, 9, 7, 2, 2, 225, 223, 3, 2, 2, 2, 225, 224, 3, 2, 2, 2, 226, 74, 3, 2, 2, 2, 227, 228, 9, 8, 2, 2, 228, 76, 3, 2, 2, 2, 229, 232, 9, 9, 2, 2, 230, 232, 5, 85, 43, 2, 231, 229, 3, 2, 2, 2, 231, 230, 3, 2, 2, 2, 232, 78, 3, 2, 2, 2, 233, 238, 5, 77, 39, 2, 234, 238, 5, 69, 35, 2, 235, 238, 5, 43, 22, 2, 236, 238, 5, 41, 21, 2, 237, 233, 3, 2, 2, 2, 237, 234, 3, 2, 2, 2, 237, 235, 3, 2, 2, 2, 237, 236, 3, 2, 2, 2, 238, 80, 3, 2, 2, 2, 239, 240, 7, 94, 2, 2, 240, 241, 9, 10, 2, 2, 241, 82, 3, 2, 2, 2, 242, 243, 9, 11, 2, 2, 243, 84, 3, 2, 2, 2, 244, 245, 4, 163, 0, 2, 245, 86, 3, 2, 2, 2, 18, 2, 141, 156, 162, 166, 168, 178, 181, 187, 198, 201, 216, 221, 225, 231, 237, 2] \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.tokens b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.tokens new file mode 100644 index 00000000..990955c0 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/FilterExpressionLexer.tokens @@ -0,0 +1,50 @@ +DOT=1 +HAS=2 +OR=3 +AND=4 +NOT=5 +LPAREN=6 +RPAREN=7 +LBRACE=8 +RBRACE=9 +LBRACKET=10 +RBRACKET=11 +COMMA=12 +LESS_THAN=13 +LESS_EQUALS=14 +GREATER_THAN=15 +GREATER_EQUALS=16 +NOT_EQUALS=17 +EQUALS=18 +EXCLAIM=19 +MINUS=20 +PLUS=21 +STRING=22 +WS=23 +DIGIT=24 +HEX_DIGIT=25 +EXPONENT=26 +TEXT=27 +BACKSLASH=28 +'.'=1 +':'=2 +'OR'=3 +'AND'=4 +'NOT'=5 +'('=6 +')'=7 +'['=8 +']'=9 +'{'=10 +'}'=11 +','=12 +'<'=13 +'<='=14 +'>'=15 +'>='=16 +'!='=17 +'='=18 +'!'=19 +'-'=20 +'+'=21 +'\\'=28 diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/README.md b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/README.md new file mode 100644 index 00000000..41939fe2 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/README.md @@ -0,0 +1,12 @@ +# Filter Grammar + +The filter grammar is based on the list filtering syntax supported by Google +Cloud Logging's [Advanced Filters](https://cloud.google.com/logging/docs/view/advanced-filters). + +The Antlr4 toolchain was used to model the grammar and generate go sources for +walking the parse tree (see the [installation instructions](https://github.com/antlr/antlr4/blob/master/doc/faq/installation.md)). +When updating a grammar file (*.g4), be sure to regenerate the source: + +``` +.../gen>antlr4 -no-listener -visitor -Dlanguage=Go -package gen FilterExpressionLexer.g4 FilterExpression.g4 +``` diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_base_visitor.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_base_visitor.go new file mode 100644 index 00000000..9efaed4c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_base_visitor.go @@ -0,0 +1,112 @@ +// Code generated from FilterExpression.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package gen // FilterExpression +import "github.com/antlr/antlr4/runtime/Go/antlr" + +type BaseFilterExpressionVisitor struct { + *antlr.BaseParseTreeVisitor +} + +func (v *BaseFilterExpressionVisitor) VisitFilter(ctx *FilterContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitExpression(ctx *ExpressionContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitSequence(ctx *SequenceContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitFactor(ctx *FactorContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitTerm(ctx *TermContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitRestriction(ctx *RestrictionContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitComparable(ctx *ComparableContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitComparator(ctx *ComparatorContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitSelectOrCall(ctx *SelectOrCallContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitDynamicIndex(ctx *DynamicIndexContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitPrimaryExpr(ctx *PrimaryExprContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitNestedExpr(ctx *NestedExprContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitIdentOrGlobalCall(ctx *IdentOrGlobalCallContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitStringVal(ctx *StringValContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitArgList(ctx *ArgListContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitComposite(ctx *CompositeContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitText(ctx *TextContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitField(ctx *FieldContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitNumber(ctx *NumberContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitIntVal(ctx *IntValContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitFloatVal(ctx *FloatValContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitKeyword(ctx *KeywordContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitNotOp(ctx *NotOpContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitAndOp(ctx *AndOpContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitOrOp(ctx *OrOpContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseFilterExpressionVisitor) VisitSep(ctx *SepContext) interface{} { + return v.VisitChildren(ctx) +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_lexer.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_lexer.go new file mode 100644 index 00000000..873ecad0 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_lexer.go @@ -0,0 +1,227 @@ +// Code generated from FilterExpressionLexer.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package gen + +import ( + "fmt" + "unicode" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import error +var _ = fmt.Printf +var _ = unicode.IsLetter + +var serializedLexerAtn = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 30, 246, + 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, + 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, + 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, + 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, + 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, + 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33, + 4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4, + 39, 9, 39, 4, 40, 9, 40, 4, 41, 9, 41, 4, 42, 9, 42, 4, 43, 9, 43, 3, 2, + 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, + 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, + 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, 15, 3, 15, 3, 16, + 3, 16, 3, 17, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, + 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 23, 3, 23, 7, 23, 140, 10, 23, 12, 23, + 14, 23, 143, 11, 23, 3, 23, 3, 23, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3, + 26, 3, 26, 3, 26, 6, 26, 155, 10, 26, 13, 26, 14, 26, 156, 3, 27, 3, 27, + 3, 28, 3, 28, 5, 28, 163, 10, 28, 3, 28, 3, 28, 7, 28, 167, 10, 28, 12, + 28, 14, 28, 170, 11, 28, 3, 29, 3, 29, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, + 5, 30, 179, 10, 30, 3, 30, 5, 30, 182, 10, 30, 3, 31, 3, 31, 3, 31, 3, + 31, 5, 31, 188, 10, 31, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, 3, 32, + 3, 33, 3, 33, 5, 33, 199, 10, 33, 3, 33, 5, 33, 202, 10, 33, 3, 33, 3, + 33, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 35, 3, 35, 3, 36, 3, 36, + 3, 36, 5, 36, 217, 10, 36, 3, 36, 6, 36, 220, 10, 36, 13, 36, 14, 36, 221, + 3, 37, 3, 37, 5, 37, 226, 10, 37, 3, 38, 3, 38, 3, 39, 3, 39, 5, 39, 232, + 10, 39, 3, 40, 3, 40, 3, 40, 3, 40, 5, 40, 238, 10, 40, 3, 41, 3, 41, 3, + 41, 3, 42, 3, 42, 3, 43, 3, 43, 2, 2, 44, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, + 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, + 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, + 26, 51, 27, 53, 28, 55, 29, 57, 30, 59, 2, 61, 2, 63, 2, 65, 2, 67, 2, + 69, 2, 71, 2, 73, 2, 75, 2, 77, 2, 79, 2, 81, 2, 83, 2, 85, 2, 3, 2, 12, + 5, 2, 34, 35, 37, 93, 95, 128, 8, 2, 99, 100, 104, 104, 112, 112, 116, + 116, 118, 118, 120, 120, 3, 2, 50, 53, 3, 2, 50, 59, 4, 2, 71, 71, 103, + 103, 4, 2, 67, 72, 99, 104, 3, 2, 50, 57, 9, 2, 37, 41, 44, 44, 49, 49, + 61, 61, 65, 92, 96, 124, 126, 126, 9, 2, 36, 36, 44, 45, 48, 48, 60, 60, + 62, 64, 94, 94, 128, 128, 5, 2, 11, 12, 14, 15, 34, 34, 2, 254, 2, 3, 3, + 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, + 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, + 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, + 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, + 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, + 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, + 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, + 2, 2, 2, 3, 87, 3, 2, 2, 2, 5, 89, 3, 2, 2, 2, 7, 91, 3, 2, 2, 2, 9, 94, + 3, 2, 2, 2, 11, 98, 3, 2, 2, 2, 13, 102, 3, 2, 2, 2, 15, 104, 3, 2, 2, + 2, 17, 106, 3, 2, 2, 2, 19, 108, 3, 2, 2, 2, 21, 110, 3, 2, 2, 2, 23, 112, + 3, 2, 2, 2, 25, 114, 3, 2, 2, 2, 27, 116, 3, 2, 2, 2, 29, 118, 3, 2, 2, + 2, 31, 121, 3, 2, 2, 2, 33, 123, 3, 2, 2, 2, 35, 126, 3, 2, 2, 2, 37, 129, + 3, 2, 2, 2, 39, 131, 3, 2, 2, 2, 41, 133, 3, 2, 2, 2, 43, 135, 3, 2, 2, + 2, 45, 137, 3, 2, 2, 2, 47, 146, 3, 2, 2, 2, 49, 148, 3, 2, 2, 2, 51, 150, + 3, 2, 2, 2, 53, 158, 3, 2, 2, 2, 55, 162, 3, 2, 2, 2, 57, 171, 3, 2, 2, + 2, 59, 181, 3, 2, 2, 2, 61, 187, 3, 2, 2, 2, 63, 189, 3, 2, 2, 2, 65, 196, + 3, 2, 2, 2, 67, 205, 3, 2, 2, 2, 69, 211, 3, 2, 2, 2, 71, 213, 3, 2, 2, + 2, 73, 225, 3, 2, 2, 2, 75, 227, 3, 2, 2, 2, 77, 231, 3, 2, 2, 2, 79, 237, + 3, 2, 2, 2, 81, 239, 3, 2, 2, 2, 83, 242, 3, 2, 2, 2, 85, 244, 3, 2, 2, + 2, 87, 88, 7, 48, 2, 2, 88, 4, 3, 2, 2, 2, 89, 90, 7, 60, 2, 2, 90, 6, + 3, 2, 2, 2, 91, 92, 7, 81, 2, 2, 92, 93, 7, 84, 2, 2, 93, 8, 3, 2, 2, 2, + 94, 95, 7, 67, 2, 2, 95, 96, 7, 80, 2, 2, 96, 97, 7, 70, 2, 2, 97, 10, + 3, 2, 2, 2, 98, 99, 7, 80, 2, 2, 99, 100, 7, 81, 2, 2, 100, 101, 7, 86, + 2, 2, 101, 12, 3, 2, 2, 2, 102, 103, 7, 42, 2, 2, 103, 14, 3, 2, 2, 2, + 104, 105, 7, 43, 2, 2, 105, 16, 3, 2, 2, 2, 106, 107, 7, 93, 2, 2, 107, + 18, 3, 2, 2, 2, 108, 109, 7, 95, 2, 2, 109, 20, 3, 2, 2, 2, 110, 111, 7, + 125, 2, 2, 111, 22, 3, 2, 2, 2, 112, 113, 7, 127, 2, 2, 113, 24, 3, 2, + 2, 2, 114, 115, 7, 46, 2, 2, 115, 26, 3, 2, 2, 2, 116, 117, 7, 62, 2, 2, + 117, 28, 3, 2, 2, 2, 118, 119, 7, 62, 2, 2, 119, 120, 7, 63, 2, 2, 120, + 30, 3, 2, 2, 2, 121, 122, 7, 64, 2, 2, 122, 32, 3, 2, 2, 2, 123, 124, 7, + 64, 2, 2, 124, 125, 7, 63, 2, 2, 125, 34, 3, 2, 2, 2, 126, 127, 7, 35, + 2, 2, 127, 128, 7, 63, 2, 2, 128, 36, 3, 2, 2, 2, 129, 130, 7, 63, 2, 2, + 130, 38, 3, 2, 2, 2, 131, 132, 7, 35, 2, 2, 132, 40, 3, 2, 2, 2, 133, 134, + 7, 47, 2, 2, 134, 42, 3, 2, 2, 2, 135, 136, 7, 45, 2, 2, 136, 44, 3, 2, + 2, 2, 137, 141, 7, 36, 2, 2, 138, 140, 5, 59, 30, 2, 139, 138, 3, 2, 2, + 2, 140, 143, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, 142, 3, 2, 2, 2, 142, + 144, 3, 2, 2, 2, 143, 141, 3, 2, 2, 2, 144, 145, 7, 36, 2, 2, 145, 46, + 3, 2, 2, 2, 146, 147, 5, 83, 42, 2, 147, 48, 3, 2, 2, 2, 148, 149, 5, 69, + 35, 2, 149, 50, 3, 2, 2, 2, 150, 151, 7, 50, 2, 2, 151, 152, 7, 122, 2, + 2, 152, 154, 3, 2, 2, 2, 153, 155, 5, 73, 37, 2, 154, 153, 3, 2, 2, 2, + 155, 156, 3, 2, 2, 2, 156, 154, 3, 2, 2, 2, 156, 157, 3, 2, 2, 2, 157, + 52, 3, 2, 2, 2, 158, 159, 5, 71, 36, 2, 159, 54, 3, 2, 2, 2, 160, 163, + 5, 77, 39, 2, 161, 163, 5, 61, 31, 2, 162, 160, 3, 2, 2, 2, 162, 161, 3, + 2, 2, 2, 163, 168, 3, 2, 2, 2, 164, 167, 5, 79, 40, 2, 165, 167, 5, 61, + 31, 2, 166, 164, 3, 2, 2, 2, 166, 165, 3, 2, 2, 2, 167, 170, 3, 2, 2, 2, + 168, 166, 3, 2, 2, 2, 168, 169, 3, 2, 2, 2, 169, 56, 3, 2, 2, 2, 170, 168, + 3, 2, 2, 2, 171, 172, 7, 94, 2, 2, 172, 58, 3, 2, 2, 2, 173, 182, 9, 2, + 2, 2, 174, 182, 5, 85, 43, 2, 175, 182, 5, 61, 31, 2, 176, 178, 7, 94, + 2, 2, 177, 179, 9, 3, 2, 2, 178, 177, 3, 2, 2, 2, 178, 179, 3, 2, 2, 2, + 179, 182, 3, 2, 2, 2, 180, 182, 5, 83, 42, 2, 181, 173, 3, 2, 2, 2, 181, + 174, 3, 2, 2, 2, 181, 175, 3, 2, 2, 2, 181, 176, 3, 2, 2, 2, 181, 180, + 3, 2, 2, 2, 182, 60, 3, 2, 2, 2, 183, 188, 5, 81, 41, 2, 184, 188, 5, 63, + 32, 2, 185, 188, 5, 65, 33, 2, 186, 188, 5, 67, 34, 2, 187, 183, 3, 2, + 2, 2, 187, 184, 3, 2, 2, 2, 187, 185, 3, 2, 2, 2, 187, 186, 3, 2, 2, 2, + 188, 62, 3, 2, 2, 2, 189, 190, 7, 94, 2, 2, 190, 191, 7, 119, 2, 2, 191, + 192, 5, 73, 37, 2, 192, 193, 5, 73, 37, 2, 193, 194, 5, 73, 37, 2, 194, + 195, 5, 73, 37, 2, 195, 64, 3, 2, 2, 2, 196, 198, 7, 94, 2, 2, 197, 199, + 9, 4, 2, 2, 198, 197, 3, 2, 2, 2, 198, 199, 3, 2, 2, 2, 199, 201, 3, 2, + 2, 2, 200, 202, 5, 75, 38, 2, 201, 200, 3, 2, 2, 2, 201, 202, 3, 2, 2, + 2, 202, 203, 3, 2, 2, 2, 203, 204, 5, 75, 38, 2, 204, 66, 3, 2, 2, 2, 205, + 206, 7, 94, 2, 2, 206, 207, 7, 122, 2, 2, 207, 208, 3, 2, 2, 2, 208, 209, + 5, 73, 37, 2, 209, 210, 5, 73, 37, 2, 210, 68, 3, 2, 2, 2, 211, 212, 9, + 5, 2, 2, 212, 70, 3, 2, 2, 2, 213, 216, 9, 6, 2, 2, 214, 217, 5, 43, 22, + 2, 215, 217, 5, 41, 21, 2, 216, 214, 3, 2, 2, 2, 216, 215, 3, 2, 2, 2, + 216, 217, 3, 2, 2, 2, 217, 219, 3, 2, 2, 2, 218, 220, 5, 69, 35, 2, 219, + 218, 3, 2, 2, 2, 220, 221, 3, 2, 2, 2, 221, 219, 3, 2, 2, 2, 221, 222, + 3, 2, 2, 2, 222, 72, 3, 2, 2, 2, 223, 226, 5, 69, 35, 2, 224, 226, 9, 7, + 2, 2, 225, 223, 3, 2, 2, 2, 225, 224, 3, 2, 2, 2, 226, 74, 3, 2, 2, 2, + 227, 228, 9, 8, 2, 2, 228, 76, 3, 2, 2, 2, 229, 232, 9, 9, 2, 2, 230, 232, + 5, 85, 43, 2, 231, 229, 3, 2, 2, 2, 231, 230, 3, 2, 2, 2, 232, 78, 3, 2, + 2, 2, 233, 238, 5, 77, 39, 2, 234, 238, 5, 69, 35, 2, 235, 238, 5, 43, + 22, 2, 236, 238, 5, 41, 21, 2, 237, 233, 3, 2, 2, 2, 237, 234, 3, 2, 2, + 2, 237, 235, 3, 2, 2, 2, 237, 236, 3, 2, 2, 2, 238, 80, 3, 2, 2, 2, 239, + 240, 7, 94, 2, 2, 240, 241, 9, 10, 2, 2, 241, 82, 3, 2, 2, 2, 242, 243, + 9, 11, 2, 2, 243, 84, 3, 2, 2, 2, 244, 245, 4, 163, 0, 2, 245, 86, 3, 2, + 2, 2, 18, 2, 141, 156, 162, 166, 168, 178, 181, 187, 198, 201, 216, 221, + 225, 231, 237, 2, +} + +var lexerDeserializer = antlr.NewATNDeserializer(nil) +var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) + +var lexerChannelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", +} + +var lexerModeNames = []string{ + "DEFAULT_MODE", +} + +var lexerLiteralNames = []string{ + "", "'.'", "':'", "'OR'", "'AND'", "'NOT'", "'('", "')'", "'['", "']'", + "'{'", "'}'", "','", "'<'", "'<='", "'>'", "'>='", "'!='", "'='", "'!'", + "'-'", "'+'", "", "", "", "", "", "", "'\\'", +} + +var lexerSymbolicNames = []string{ + "", "DOT", "HAS", "OR", "AND", "NOT", "LPAREN", "RPAREN", "LBRACE", "RBRACE", + "LBRACKET", "RBRACKET", "COMMA", "LESS_THAN", "LESS_EQUALS", "GREATER_THAN", + "GREATER_EQUALS", "NOT_EQUALS", "EQUALS", "EXCLAIM", "MINUS", "PLUS", "STRING", + "WS", "DIGIT", "HEX_DIGIT", "EXPONENT", "TEXT", "BACKSLASH", +} + +var lexerRuleNames = []string{ + "DOT", "HAS", "OR", "AND", "NOT", "LPAREN", "RPAREN", "LBRACE", "RBRACE", + "LBRACKET", "RBRACKET", "COMMA", "LESS_THAN", "LESS_EQUALS", "GREATER_THAN", + "GREATER_EQUALS", "NOT_EQUALS", "EQUALS", "EXCLAIM", "MINUS", "PLUS", "STRING", + "WS", "DIGIT", "HEX_DIGIT", "EXPONENT", "TEXT", "BACKSLASH", "Character", + "TextEsc", "UnicodeEsc", "OctalEsc", "HexEsc", "Digit", "Exponent", "HexDigit", + "OctalDigit", "StartChar", "MidChar", "EscapedChar", "Whitespace", "CharactersFromU00A1", +} + +type FilterExpressionLexer struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string +} + +var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState)) + +func init() { + for index, ds := range lexerAtn.DecisionToState { + lexerDecisionToDFA[index] = antlr.NewDFA(ds, index) + } +} + +func NewFilterExpressionLexer(input antlr.CharStream) *FilterExpressionLexer { + + l := new(FilterExpressionLexer) + + l.BaseLexer = antlr.NewBaseLexer(input) + l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache()) + + l.channelNames = lexerChannelNames + l.modeNames = lexerModeNames + l.RuleNames = lexerRuleNames + l.LiteralNames = lexerLiteralNames + l.SymbolicNames = lexerSymbolicNames + l.GrammarFileName = "FilterExpressionLexer.g4" + // TODO: l.EOF = antlr.TokenEOF + + return l +} + +// FilterExpressionLexer tokens. +const ( + FilterExpressionLexerDOT = 1 + FilterExpressionLexerHAS = 2 + FilterExpressionLexerOR = 3 + FilterExpressionLexerAND = 4 + FilterExpressionLexerNOT = 5 + FilterExpressionLexerLPAREN = 6 + FilterExpressionLexerRPAREN = 7 + FilterExpressionLexerLBRACE = 8 + FilterExpressionLexerRBRACE = 9 + FilterExpressionLexerLBRACKET = 10 + FilterExpressionLexerRBRACKET = 11 + FilterExpressionLexerCOMMA = 12 + FilterExpressionLexerLESS_THAN = 13 + FilterExpressionLexerLESS_EQUALS = 14 + FilterExpressionLexerGREATER_THAN = 15 + FilterExpressionLexerGREATER_EQUALS = 16 + FilterExpressionLexerNOT_EQUALS = 17 + FilterExpressionLexerEQUALS = 18 + FilterExpressionLexerEXCLAIM = 19 + FilterExpressionLexerMINUS = 20 + FilterExpressionLexerPLUS = 21 + FilterExpressionLexerSTRING = 22 + FilterExpressionLexerWS = 23 + FilterExpressionLexerDIGIT = 24 + FilterExpressionLexerHEX_DIGIT = 25 + FilterExpressionLexerEXPONENT = 26 + FilterExpressionLexerTEXT = 27 + FilterExpressionLexerBACKSLASH = 28 +) diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_parser.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_parser.go new file mode 100644 index 00000000..a36c081a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/gen/filterexpression_parser.go @@ -0,0 +1,4077 @@ +// Code generated from FilterExpression.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package gen // FilterExpression +import ( + "fmt" + "reflect" + "strconv" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import errors +var _ = fmt.Printf +var _ = reflect.Copy +var _ = strconv.Itoa + +var parserATN = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 30, 303, + 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, + 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, + 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, + 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, + 3, 2, 5, 2, 48, 10, 2, 3, 2, 7, 2, 51, 10, 2, 12, 2, 14, 2, 54, 11, 2, + 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 62, 10, 3, 12, 3, 14, 3, 65, + 11, 3, 3, 4, 3, 4, 6, 4, 69, 10, 4, 13, 4, 14, 4, 70, 3, 4, 7, 4, 74, 10, + 4, 12, 4, 14, 4, 77, 11, 4, 3, 5, 3, 5, 3, 5, 3, 5, 7, 5, 83, 10, 5, 12, + 5, 14, 5, 86, 11, 5, 3, 6, 5, 6, 89, 10, 6, 3, 6, 3, 6, 3, 7, 3, 7, 7, + 7, 95, 10, 7, 12, 7, 14, 7, 98, 11, 7, 3, 7, 3, 7, 7, 7, 102, 10, 7, 12, + 7, 14, 7, 105, 11, 7, 3, 7, 3, 7, 5, 7, 109, 10, 7, 3, 8, 3, 8, 5, 8, 113, + 10, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, + 10, 5, 10, 125, 10, 10, 3, 10, 5, 10, 128, 10, 10, 3, 10, 3, 10, 3, 10, + 7, 10, 133, 10, 10, 12, 10, 14, 10, 136, 11, 10, 3, 10, 3, 10, 7, 10, 140, + 10, 10, 12, 10, 14, 10, 143, 11, 10, 3, 10, 3, 10, 7, 10, 147, 10, 10, + 12, 10, 14, 10, 150, 11, 10, 3, 11, 3, 11, 3, 11, 3, 11, 5, 11, 156, 10, + 11, 3, 11, 5, 11, 159, 10, 11, 3, 11, 5, 11, 162, 10, 11, 3, 12, 7, 12, + 165, 10, 12, 12, 12, 14, 12, 168, 11, 12, 3, 12, 3, 12, 3, 12, 3, 12, 7, + 12, 174, 10, 12, 12, 12, 14, 12, 177, 11, 12, 3, 12, 7, 12, 180, 10, 12, + 12, 12, 14, 12, 183, 11, 12, 3, 13, 3, 13, 7, 13, 187, 10, 13, 12, 13, + 14, 13, 190, 11, 13, 3, 13, 3, 13, 7, 13, 194, 10, 13, 12, 13, 14, 13, + 197, 11, 13, 3, 13, 3, 13, 3, 14, 3, 14, 7, 14, 203, 10, 14, 12, 14, 14, + 14, 206, 11, 14, 3, 15, 3, 15, 3, 15, 5, 15, 211, 10, 15, 3, 16, 3, 16, + 5, 16, 215, 10, 16, 3, 17, 5, 17, 218, 10, 17, 3, 17, 6, 17, 221, 10, 17, + 13, 17, 14, 17, 222, 3, 17, 5, 17, 226, 10, 17, 3, 17, 5, 17, 229, 10, + 17, 3, 18, 5, 18, 232, 10, 18, 3, 18, 6, 18, 235, 10, 18, 13, 18, 14, 18, + 236, 3, 18, 3, 18, 7, 18, 241, 10, 18, 12, 18, 14, 18, 244, 11, 18, 3, + 18, 3, 18, 6, 18, 248, 10, 18, 13, 18, 14, 18, 249, 5, 18, 252, 10, 18, + 3, 18, 5, 18, 255, 10, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 20, 6, 20, 262, + 10, 20, 13, 20, 14, 20, 263, 5, 20, 266, 10, 20, 3, 21, 6, 21, 269, 10, + 21, 13, 21, 14, 21, 270, 3, 21, 3, 21, 6, 21, 275, 10, 21, 13, 21, 14, + 21, 276, 3, 22, 6, 22, 280, 10, 22, 13, 22, 14, 22, 281, 3, 22, 3, 22, + 6, 22, 286, 10, 22, 13, 22, 14, 22, 287, 3, 23, 7, 23, 291, 10, 23, 12, + 23, 14, 23, 294, 11, 23, 3, 23, 3, 23, 7, 23, 298, 10, 23, 12, 23, 14, + 23, 301, 11, 23, 3, 23, 2, 3, 18, 24, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 2, 6, 4, 2, 4, 4, 15, 20, + 5, 2, 21, 21, 26, 26, 29, 29, 5, 2, 21, 22, 26, 26, 29, 29, 3, 2, 5, 7, + 2, 328, 2, 47, 3, 2, 2, 2, 4, 57, 3, 2, 2, 2, 6, 66, 3, 2, 2, 2, 8, 78, + 3, 2, 2, 2, 10, 88, 3, 2, 2, 2, 12, 92, 3, 2, 2, 2, 14, 112, 3, 2, 2, 2, + 16, 114, 3, 2, 2, 2, 18, 116, 3, 2, 2, 2, 20, 161, 3, 2, 2, 2, 22, 166, + 3, 2, 2, 2, 24, 184, 3, 2, 2, 2, 26, 200, 3, 2, 2, 2, 28, 210, 3, 2, 2, + 2, 30, 214, 3, 2, 2, 2, 32, 228, 3, 2, 2, 2, 34, 231, 3, 2, 2, 2, 36, 256, + 3, 2, 2, 2, 38, 265, 3, 2, 2, 2, 40, 268, 3, 2, 2, 2, 42, 279, 3, 2, 2, + 2, 44, 292, 3, 2, 2, 2, 46, 48, 5, 4, 3, 2, 47, 46, 3, 2, 2, 2, 47, 48, + 3, 2, 2, 2, 48, 52, 3, 2, 2, 2, 49, 51, 7, 25, 2, 2, 50, 49, 3, 2, 2, 2, + 51, 54, 3, 2, 2, 2, 52, 50, 3, 2, 2, 2, 52, 53, 3, 2, 2, 2, 53, 55, 3, + 2, 2, 2, 54, 52, 3, 2, 2, 2, 55, 56, 7, 2, 2, 3, 56, 3, 3, 2, 2, 2, 57, + 63, 5, 6, 4, 2, 58, 59, 5, 40, 21, 2, 59, 60, 5, 6, 4, 2, 60, 62, 3, 2, + 2, 2, 61, 58, 3, 2, 2, 2, 62, 65, 3, 2, 2, 2, 63, 61, 3, 2, 2, 2, 63, 64, + 3, 2, 2, 2, 64, 5, 3, 2, 2, 2, 65, 63, 3, 2, 2, 2, 66, 75, 5, 8, 5, 2, + 67, 69, 7, 25, 2, 2, 68, 67, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 68, 3, + 2, 2, 2, 70, 71, 3, 2, 2, 2, 71, 72, 3, 2, 2, 2, 72, 74, 5, 8, 5, 2, 73, + 68, 3, 2, 2, 2, 74, 77, 3, 2, 2, 2, 75, 73, 3, 2, 2, 2, 75, 76, 3, 2, 2, + 2, 76, 7, 3, 2, 2, 2, 77, 75, 3, 2, 2, 2, 78, 84, 5, 10, 6, 2, 79, 80, + 5, 42, 22, 2, 80, 81, 5, 10, 6, 2, 81, 83, 3, 2, 2, 2, 82, 79, 3, 2, 2, + 2, 83, 86, 3, 2, 2, 2, 84, 82, 3, 2, 2, 2, 84, 85, 3, 2, 2, 2, 85, 9, 3, + 2, 2, 2, 86, 84, 3, 2, 2, 2, 87, 89, 5, 38, 20, 2, 88, 87, 3, 2, 2, 2, + 88, 89, 3, 2, 2, 2, 89, 90, 3, 2, 2, 2, 90, 91, 5, 12, 7, 2, 91, 11, 3, + 2, 2, 2, 92, 108, 5, 14, 8, 2, 93, 95, 7, 25, 2, 2, 94, 93, 3, 2, 2, 2, + 95, 98, 3, 2, 2, 2, 96, 94, 3, 2, 2, 2, 96, 97, 3, 2, 2, 2, 97, 99, 3, + 2, 2, 2, 98, 96, 3, 2, 2, 2, 99, 103, 5, 16, 9, 2, 100, 102, 7, 25, 2, + 2, 101, 100, 3, 2, 2, 2, 102, 105, 3, 2, 2, 2, 103, 101, 3, 2, 2, 2, 103, + 104, 3, 2, 2, 2, 104, 106, 3, 2, 2, 2, 105, 103, 3, 2, 2, 2, 106, 107, + 5, 14, 8, 2, 107, 109, 3, 2, 2, 2, 108, 96, 3, 2, 2, 2, 108, 109, 3, 2, + 2, 2, 109, 13, 3, 2, 2, 2, 110, 113, 5, 30, 16, 2, 111, 113, 5, 18, 10, + 2, 112, 110, 3, 2, 2, 2, 112, 111, 3, 2, 2, 2, 113, 15, 3, 2, 2, 2, 114, + 115, 9, 2, 2, 2, 115, 17, 3, 2, 2, 2, 116, 117, 8, 10, 1, 2, 117, 118, + 5, 20, 11, 2, 118, 148, 3, 2, 2, 2, 119, 120, 12, 4, 2, 2, 120, 121, 7, + 3, 2, 2, 121, 127, 5, 28, 15, 2, 122, 124, 7, 8, 2, 2, 123, 125, 5, 22, + 12, 2, 124, 123, 3, 2, 2, 2, 124, 125, 3, 2, 2, 2, 125, 126, 3, 2, 2, 2, + 126, 128, 7, 9, 2, 2, 127, 122, 3, 2, 2, 2, 127, 128, 3, 2, 2, 2, 128, + 147, 3, 2, 2, 2, 129, 130, 12, 3, 2, 2, 130, 134, 7, 10, 2, 2, 131, 133, + 7, 25, 2, 2, 132, 131, 3, 2, 2, 2, 133, 136, 3, 2, 2, 2, 134, 132, 3, 2, + 2, 2, 134, 135, 3, 2, 2, 2, 135, 137, 3, 2, 2, 2, 136, 134, 3, 2, 2, 2, + 137, 141, 5, 14, 8, 2, 138, 140, 7, 25, 2, 2, 139, 138, 3, 2, 2, 2, 140, + 143, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, 142, 3, 2, 2, 2, 142, 144, + 3, 2, 2, 2, 143, 141, 3, 2, 2, 2, 144, 145, 7, 11, 2, 2, 145, 147, 3, 2, + 2, 2, 146, 119, 3, 2, 2, 2, 146, 129, 3, 2, 2, 2, 147, 150, 3, 2, 2, 2, + 148, 146, 3, 2, 2, 2, 148, 149, 3, 2, 2, 2, 149, 19, 3, 2, 2, 2, 150, 148, + 3, 2, 2, 2, 151, 162, 5, 24, 13, 2, 152, 158, 5, 26, 14, 2, 153, 155, 7, + 8, 2, 2, 154, 156, 5, 22, 12, 2, 155, 154, 3, 2, 2, 2, 155, 156, 3, 2, + 2, 2, 156, 157, 3, 2, 2, 2, 157, 159, 7, 9, 2, 2, 158, 153, 3, 2, 2, 2, + 158, 159, 3, 2, 2, 2, 159, 162, 3, 2, 2, 2, 160, 162, 7, 24, 2, 2, 161, + 151, 3, 2, 2, 2, 161, 152, 3, 2, 2, 2, 161, 160, 3, 2, 2, 2, 162, 21, 3, + 2, 2, 2, 163, 165, 7, 25, 2, 2, 164, 163, 3, 2, 2, 2, 165, 168, 3, 2, 2, + 2, 166, 164, 3, 2, 2, 2, 166, 167, 3, 2, 2, 2, 167, 169, 3, 2, 2, 2, 168, + 166, 3, 2, 2, 2, 169, 175, 5, 14, 8, 2, 170, 171, 5, 44, 23, 2, 171, 172, + 5, 14, 8, 2, 172, 174, 3, 2, 2, 2, 173, 170, 3, 2, 2, 2, 174, 177, 3, 2, + 2, 2, 175, 173, 3, 2, 2, 2, 175, 176, 3, 2, 2, 2, 176, 181, 3, 2, 2, 2, + 177, 175, 3, 2, 2, 2, 178, 180, 7, 25, 2, 2, 179, 178, 3, 2, 2, 2, 180, + 183, 3, 2, 2, 2, 181, 179, 3, 2, 2, 2, 181, 182, 3, 2, 2, 2, 182, 23, 3, + 2, 2, 2, 183, 181, 3, 2, 2, 2, 184, 188, 7, 8, 2, 2, 185, 187, 7, 25, 2, + 2, 186, 185, 3, 2, 2, 2, 187, 190, 3, 2, 2, 2, 188, 186, 3, 2, 2, 2, 188, + 189, 3, 2, 2, 2, 189, 191, 3, 2, 2, 2, 190, 188, 3, 2, 2, 2, 191, 195, + 5, 4, 3, 2, 192, 194, 7, 25, 2, 2, 193, 192, 3, 2, 2, 2, 194, 197, 3, 2, + 2, 2, 195, 193, 3, 2, 2, 2, 195, 196, 3, 2, 2, 2, 196, 198, 3, 2, 2, 2, + 197, 195, 3, 2, 2, 2, 198, 199, 7, 9, 2, 2, 199, 25, 3, 2, 2, 2, 200, 204, + 9, 3, 2, 2, 201, 203, 9, 4, 2, 2, 202, 201, 3, 2, 2, 2, 203, 206, 3, 2, + 2, 2, 204, 202, 3, 2, 2, 2, 204, 205, 3, 2, 2, 2, 205, 27, 3, 2, 2, 2, + 206, 204, 3, 2, 2, 2, 207, 211, 5, 26, 14, 2, 208, 211, 7, 24, 2, 2, 209, + 211, 5, 36, 19, 2, 210, 207, 3, 2, 2, 2, 210, 208, 3, 2, 2, 2, 210, 209, + 3, 2, 2, 2, 211, 29, 3, 2, 2, 2, 212, 215, 5, 34, 18, 2, 213, 215, 5, 32, + 17, 2, 214, 212, 3, 2, 2, 2, 214, 213, 3, 2, 2, 2, 215, 31, 3, 2, 2, 2, + 216, 218, 7, 22, 2, 2, 217, 216, 3, 2, 2, 2, 217, 218, 3, 2, 2, 2, 218, + 220, 3, 2, 2, 2, 219, 221, 7, 26, 2, 2, 220, 219, 3, 2, 2, 2, 221, 222, + 3, 2, 2, 2, 222, 220, 3, 2, 2, 2, 222, 223, 3, 2, 2, 2, 223, 229, 3, 2, + 2, 2, 224, 226, 7, 22, 2, 2, 225, 224, 3, 2, 2, 2, 225, 226, 3, 2, 2, 2, + 226, 227, 3, 2, 2, 2, 227, 229, 7, 27, 2, 2, 228, 217, 3, 2, 2, 2, 228, + 225, 3, 2, 2, 2, 229, 33, 3, 2, 2, 2, 230, 232, 7, 22, 2, 2, 231, 230, + 3, 2, 2, 2, 231, 232, 3, 2, 2, 2, 232, 251, 3, 2, 2, 2, 233, 235, 7, 26, + 2, 2, 234, 233, 3, 2, 2, 2, 235, 236, 3, 2, 2, 2, 236, 234, 3, 2, 2, 2, + 236, 237, 3, 2, 2, 2, 237, 238, 3, 2, 2, 2, 238, 242, 7, 3, 2, 2, 239, + 241, 7, 26, 2, 2, 240, 239, 3, 2, 2, 2, 241, 244, 3, 2, 2, 2, 242, 240, + 3, 2, 2, 2, 242, 243, 3, 2, 2, 2, 243, 252, 3, 2, 2, 2, 244, 242, 3, 2, + 2, 2, 245, 247, 7, 3, 2, 2, 246, 248, 7, 26, 2, 2, 247, 246, 3, 2, 2, 2, + 248, 249, 3, 2, 2, 2, 249, 247, 3, 2, 2, 2, 249, 250, 3, 2, 2, 2, 250, + 252, 3, 2, 2, 2, 251, 234, 3, 2, 2, 2, 251, 245, 3, 2, 2, 2, 252, 254, + 3, 2, 2, 2, 253, 255, 7, 28, 2, 2, 254, 253, 3, 2, 2, 2, 254, 255, 3, 2, + 2, 2, 255, 35, 3, 2, 2, 2, 256, 257, 9, 5, 2, 2, 257, 37, 3, 2, 2, 2, 258, + 266, 7, 22, 2, 2, 259, 261, 7, 7, 2, 2, 260, 262, 7, 25, 2, 2, 261, 260, + 3, 2, 2, 2, 262, 263, 3, 2, 2, 2, 263, 261, 3, 2, 2, 2, 263, 264, 3, 2, + 2, 2, 264, 266, 3, 2, 2, 2, 265, 258, 3, 2, 2, 2, 265, 259, 3, 2, 2, 2, + 266, 39, 3, 2, 2, 2, 267, 269, 7, 25, 2, 2, 268, 267, 3, 2, 2, 2, 269, + 270, 3, 2, 2, 2, 270, 268, 3, 2, 2, 2, 270, 271, 3, 2, 2, 2, 271, 272, + 3, 2, 2, 2, 272, 274, 7, 6, 2, 2, 273, 275, 7, 25, 2, 2, 274, 273, 3, 2, + 2, 2, 275, 276, 3, 2, 2, 2, 276, 274, 3, 2, 2, 2, 276, 277, 3, 2, 2, 2, + 277, 41, 3, 2, 2, 2, 278, 280, 7, 25, 2, 2, 279, 278, 3, 2, 2, 2, 280, + 281, 3, 2, 2, 2, 281, 279, 3, 2, 2, 2, 281, 282, 3, 2, 2, 2, 282, 283, + 3, 2, 2, 2, 283, 285, 7, 5, 2, 2, 284, 286, 7, 25, 2, 2, 285, 284, 3, 2, + 2, 2, 286, 287, 3, 2, 2, 2, 287, 285, 3, 2, 2, 2, 287, 288, 3, 2, 2, 2, + 288, 43, 3, 2, 2, 2, 289, 291, 7, 25, 2, 2, 290, 289, 3, 2, 2, 2, 291, + 294, 3, 2, 2, 2, 292, 290, 3, 2, 2, 2, 292, 293, 3, 2, 2, 2, 293, 295, + 3, 2, 2, 2, 294, 292, 3, 2, 2, 2, 295, 299, 7, 14, 2, 2, 296, 298, 7, 25, + 2, 2, 297, 296, 3, 2, 2, 2, 298, 301, 3, 2, 2, 2, 299, 297, 3, 2, 2, 2, + 299, 300, 3, 2, 2, 2, 300, 45, 3, 2, 2, 2, 301, 299, 3, 2, 2, 2, 48, 47, + 52, 63, 70, 75, 84, 88, 96, 103, 108, 112, 124, 127, 134, 141, 146, 148, + 155, 158, 161, 166, 175, 181, 188, 195, 204, 210, 214, 217, 222, 225, 228, + 231, 236, 242, 249, 251, 254, 263, 265, 270, 276, 281, 287, 292, 299, +} +var deserializer = antlr.NewATNDeserializer(nil) +var deserializedATN = deserializer.DeserializeFromUInt16(parserATN) + +var literalNames = []string{ + "", "'.'", "':'", "'OR'", "'AND'", "'NOT'", "'('", "')'", "'['", "']'", + "'{'", "'}'", "','", "'<'", "'<='", "'>'", "'>='", "'!='", "'='", "'!'", + "'-'", "'+'", "", "", "", "", "", "", "'\\'", +} +var symbolicNames = []string{ + "", "DOT", "HAS", "OR", "AND", "NOT", "LPAREN", "RPAREN", "LBRACE", "RBRACE", + "LBRACKET", "RBRACKET", "COMMA", "LESS_THAN", "LESS_EQUALS", "GREATER_THAN", + "GREATER_EQUALS", "NOT_EQUALS", "EQUALS", "EXCLAIM", "MINUS", "PLUS", "STRING", + "WS", "DIGIT", "HEX_DIGIT", "EXPONENT", "TEXT", "BACKSLASH", +} + +var ruleNames = []string{ + "filter", "expression", "sequence", "factor", "term", "restriction", "comparable", + "comparator", "value", "primary", "argList", "composite", "text", "field", + "number", "intVal", "floatVal", "keyword", "notOp", "andOp", "orOp", "sep", +} +var decisionToDFA = make([]*antlr.DFA, len(deserializedATN.DecisionToState)) + +func init() { + for index, ds := range deserializedATN.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(ds, index) + } +} + +type FilterExpression struct { + *antlr.BaseParser +} + +func NewFilterExpression(input antlr.TokenStream) *FilterExpression { + this := new(FilterExpression) + + this.BaseParser = antlr.NewBaseParser(input) + + this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache()) + this.RuleNames = ruleNames + this.LiteralNames = literalNames + this.SymbolicNames = symbolicNames + this.GrammarFileName = "FilterExpression.g4" + + return this +} + +// FilterExpression tokens. +const ( + FilterExpressionEOF = antlr.TokenEOF + FilterExpressionDOT = 1 + FilterExpressionHAS = 2 + FilterExpressionOR = 3 + FilterExpressionAND = 4 + FilterExpressionNOT = 5 + FilterExpressionLPAREN = 6 + FilterExpressionRPAREN = 7 + FilterExpressionLBRACE = 8 + FilterExpressionRBRACE = 9 + FilterExpressionLBRACKET = 10 + FilterExpressionRBRACKET = 11 + FilterExpressionCOMMA = 12 + FilterExpressionLESS_THAN = 13 + FilterExpressionLESS_EQUALS = 14 + FilterExpressionGREATER_THAN = 15 + FilterExpressionGREATER_EQUALS = 16 + FilterExpressionNOT_EQUALS = 17 + FilterExpressionEQUALS = 18 + FilterExpressionEXCLAIM = 19 + FilterExpressionMINUS = 20 + FilterExpressionPLUS = 21 + FilterExpressionSTRING = 22 + FilterExpressionWS = 23 + FilterExpressionDIGIT = 24 + FilterExpressionHEX_DIGIT = 25 + FilterExpressionEXPONENT = 26 + FilterExpressionTEXT = 27 + FilterExpressionBACKSLASH = 28 +) + +// FilterExpression rules. +const ( + FilterExpressionRULE_filter = 0 + FilterExpressionRULE_expression = 1 + FilterExpressionRULE_sequence = 2 + FilterExpressionRULE_factor = 3 + FilterExpressionRULE_term = 4 + FilterExpressionRULE_restriction = 5 + FilterExpressionRULE_comparable = 6 + FilterExpressionRULE_comparator = 7 + FilterExpressionRULE_value = 8 + FilterExpressionRULE_primary = 9 + FilterExpressionRULE_argList = 10 + FilterExpressionRULE_composite = 11 + FilterExpressionRULE_text = 12 + FilterExpressionRULE_field = 13 + FilterExpressionRULE_number = 14 + FilterExpressionRULE_intVal = 15 + FilterExpressionRULE_floatVal = 16 + FilterExpressionRULE_keyword = 17 + FilterExpressionRULE_notOp = 18 + FilterExpressionRULE_andOp = 19 + FilterExpressionRULE_orOp = 20 + FilterExpressionRULE_sep = 21 +) + +// IFilterContext is an interface to support dynamic dispatch. +type IFilterContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsFilterContext differentiates from other interfaces. + IsFilterContext() +} + +type FilterContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyFilterContext() *FilterContext { + var p = new(FilterContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = FilterExpressionRULE_filter + return p +} + +func (*FilterContext) IsFilterContext() {} + +func NewFilterContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FilterContext { + var p = new(FilterContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = FilterExpressionRULE_filter + + return p +} + +func (s *FilterContext) GetParser() antlr.Parser { return s.parser } + +func (s *FilterContext) EOF() antlr.TerminalNode { + return s.GetToken(FilterExpressionEOF, 0) +} + +func (s *FilterContext) Expression() IExpressionContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IExpressionContext) +} + +func (s *FilterContext) AllWS() []antlr.TerminalNode { + return s.GetTokens(FilterExpressionWS) +} + +func (s *FilterContext) WS(i int) antlr.TerminalNode { + return s.GetToken(FilterExpressionWS, i) +} + +func (s *FilterContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *FilterContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *FilterContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case FilterExpressionVisitor: + return t.VisitFilter(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *FilterExpression) Filter() (localctx IFilterContext) { + localctx = NewFilterContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, FilterExpressionRULE_filter) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + p.SetState(45) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if ((_la)&-(0x1f+1)) == 0 && ((1<>") +} + +// Return the filter expression value. +func (p *parser) VisitFilter(ctx *gen.FilterContext) interface{} { + if ctx.Expression() == nil { + return &expr.Expr{Id: p.id(ctx)} + } + return p.Visit(ctx.Expression()) +} + +// Return a logically ANDed set of sequences. +func (p *parser) VisitExpression(ctx *gen.ExpressionContext) interface{} { + var result = p.Visit(ctx.GetExpr()).(*expr.Expr) + ops := ctx.GetOp() + if ops == nil { + return result + } + for i, sequence := range ctx.GetRest() { + next := p.Visit(sequence).(*expr.Expr) + op := ops[i] + result = p.newCall(op, operators.LogicalAnd, []*expr.Expr{result, next}) + } + return result +} + +// Return a sequence of factors. +func (p *parser) VisitSequence(ctx *gen.SequenceContext) interface{} { + result := p.Visit(ctx.GetExpr()).(*expr.Expr) + if ctx.GetRest() == nil || len(ctx.GetRest()) == 0 { + return result + } + args := make([]*expr.Expr, len(ctx.GetRest())+1) + args[0] = result + for i, factor := range ctx.GetRest() { + index := i + 1 + args[index] = p.Visit(factor).(*expr.Expr) + } + return p.newCall(ctx, operators.Sequence, args) +} + +// Return a logically ORed set of terms. +func (p *parser) VisitFactor(ctx *gen.FactorContext) interface{} { + var result = p.Visit(ctx.GetExpr()).(*expr.Expr) + ops := ctx.GetOp() + if ops == nil { + return result + } + for i, term := range ctx.GetRest() { + next := p.Visit(term).(*expr.Expr) + op := ops[i] + result = p.newCall(op, operators.LogicalOr, []*expr.Expr{result, next}) + } + return result +} + +// Return an expression or unary operation. +func (p *parser) VisitTerm(ctx *gen.TermContext) interface{} { + var result = p.Visit(ctx.GetExpr()).(*expr.Expr) + // Negations and sequences apply only to restrictions, so if the output + // of the restriction visitor is not a restriction, wrap the expression in + // a Global restriction. + if result.GetCallExpr() == nil || + !operators.IsRestriction(result.GetCallExpr().GetFunction()) { + result = p.newCall(ctx, operators.Global, []*expr.Expr{result}) + } + if ctx.GetOp() == nil { + return result + } + return p.newCall(ctx.GetOp(), + findOperator(ctx.GetOp()), + []*expr.Expr{result}) +} + +// Return a restriction expression, commonly equality, ordering, or presence. +// When a restriction returns a GLOBAL for an identifier, the global function +// must determine whether the identifier is bound to a value or whether to +// treat the identifier name as a string value within a Sequence. +func (p *parser) VisitRestriction(ctx *gen.RestrictionContext) interface{} { + comparable := p.Visit(ctx.GetExpr()).(*expr.Expr) + if ctx.GetOp() == nil { + return comparable + } + arg := p.Visit(ctx.GetRest()).(*expr.Expr) + return p.newCall(ctx.GetOp(), + findOperator(ctx.GetOp()), + []*expr.Expr{comparable, arg}) +} + +// Visit either the numeric constant or value expresssion. +func (p *parser) VisitComparable(ctx *gen.ComparableContext) interface{} { + if ctx.Number() != nil { + return p.Visit(ctx.Number()) + } + return p.Visit(ctx.Value()) +} + +// Visit the primary expression. +func (p *parser) VisitPrimaryExpr(ctx *gen.PrimaryExprContext) interface{} { + return p.Visit(ctx.Primary()) +} + +// Return the select exprssion of qualified/member function call. +func (p *parser) VisitSelectOrCall(ctx *gen.SelectOrCallContext) interface{} { + // Resolve the function target if one is present + target := p.Visit(ctx.Value()).(*expr.Expr) + field := p.Visit(ctx.Field()).(string) + if ctx.GetOpen() == nil { + return p.newSelect(ctx.GetOp(), target, field) + } + var args []*expr.Expr = nil + if ctx.ArgList() != nil { + args = p.Visit(ctx.ArgList()).([]*expr.Expr) + } + return p.newMemberCall(ctx.GetOpen(), field, target, args) +} + +// Return a dynamically computed index into a value or list. +func (p *parser) VisitDynamicIndex(ctx *gen.DynamicIndexContext) interface{} { + target := p.Visit(ctx.Value()).(*expr.Expr) + index := p.Visit(ctx.GetIndex()).(*expr.Expr) + return p.newCall(ctx.GetOp(), operators.Index, []*expr.Expr{target, index}) +} + +// Return an identifier or global function call expression. +func (p *parser) VisitIdentOrGlobalCall( + ctx *gen.IdentOrGlobalCallContext) interface{} { + id := p.Visit(ctx.GetId()).(string) + if ctx.GetOpen() == nil { + return p.newIdent(ctx, id) + } + var args []*expr.Expr = nil + if ctx.ArgList() != nil { + args = p.Visit(ctx.ArgList()).([]*expr.Expr) + } + return p.newCall(ctx.GetOpen(), id, args) +} + +// Return a list of Expr values to be used as arguments. +func (p *parser) VisitArgList(ctx *gen.ArgListContext) interface{} { + exprArgs := make([]*expr.Expr, len(ctx.GetArgs())) + for i, arg := range ctx.GetArgs() { + exprArgs[i] = p.Visit(arg).(*expr.Expr) + } + return exprArgs +} + +// Return a string field name for use with a selected field or qualified +// function. +func (p *parser) VisitField(ctx *gen.FieldContext) interface{} { + if ctx.Keyword() != nil { + return p.Visit(ctx.Keyword()) + } + if ctx.GetQuotedText() != nil { + return p.unquote(ctx, ctx.GetQuotedText().GetText()) + } + return p.Visit(ctx.GetId()) +} + +// Return a string constant value. +// Different filter consumers may choose to support conventions for converting +// a string to a Timestamp or Duration. This parser does not attempt any more +// intelligent interpretation of the literal. +func (p *parser) VisitStringVal(ctx *gen.StringValContext) interface{} { + text := ctx.GetText() + return p.newConst(ctx, p.unquote(ctx, text)) +} + +func (p *parser) VisitNumber(ctx *gen.NumberContext) interface{} { + if ctx.FloatVal() != nil { + return p.Visit(ctx.FloatVal()) + } + return p.Visit(ctx.IntVal()) +} + +// Return an int64 value from the parsed string. +func (p *parser) VisitIntVal(ctx *gen.IntValContext) interface{} { + text := ctx.GetText() + val, err := strconv.ParseInt(text, 0, 64) + if err == nil { + return p.newConst(ctx, val) + } + p.errors.ReportError( + p.source, + common.Location(ctx.GetStart()), + fmt.Sprintf("Unrecognized integer value: %s", text)) + return p.newConst(ctx, "<>") +} + +// Return a float64 value from the parsed string. +func (p *parser) VisitFloatVal(ctx *gen.FloatValContext) interface{} { + text := ctx.GetText() + val, err := strconv.ParseFloat(text, 64) + if err == nil { + return p.newConst(ctx, val) + } + p.errors.ReportError( + p.source, + common.Location(ctx.GetStart()), + fmt.Sprintf("Unrecognized floating point value: %s", text)) + return p.newConst(ctx, "<>") +} + +// Listener implementations +func (p *parser) SyntaxError(recognizer antlr.Recognizer, + offendingSymbol interface{}, line, column int, msg string, + e antlr.RecognitionException) { + var errorMsg = "Syntax error" + switch e.(type) { + case antlr.InputMisMatchException: + errorMsg = "Input mismatch" + case antlr.NoViableAltException: + errorMsg = "Unexpected token" + } + p.errors.ReportError(p.source, common.NewLocation(line, column), errorMsg) +} + +// Ambiguities in the grammar can arise under rare circumstances, but typically +// only add a small look-ahead burden on parsing, where some number of lex +// tokens must be read before disambiguation can be done for the parse term. +func (p *parser) ReportAmbiguity(recognizer antlr.Parser, dfa *antlr.DFA, + startIndex, stopIndex int, exact bool, ambigAlts *antlr.BitSet, + configs antlr.ATNConfigSet) { + // Intentional +} + +// Indicates some added parsing overhead, but nothing problematic. +func (p *parser) ReportAttemptingFullContext(recognizer antlr.Parser, + dfa *antlr.DFA, startIndex, stopIndex int, + conflictingAlts *antlr.BitSet, configs antlr.ATNConfigSet) { + // Intentional +} + +// Indicates some added parsing overhead, but nothing problematic. +func (p *parser) ReportContextSensitivity(recognizer antlr.Parser, + dfa *antlr.DFA, startIndex, stopIndex, prediction int, + configs antlr.ATNConfigSet) { + // Intentional +} + +// Helper functions for attaching source context to the expression node. +func (p *parser) newIdent(token interface{}, name string) *expr.Expr { + return ast.NewIdent(p.id(token), name) +} + +func (p *parser) newSelect(token interface{}, operand *expr.Expr, + field string) *expr.Expr { + return ast.NewSelect(p.id(token), operand, field) +} + +func (p *parser) newConst(token interface{}, value interface{}) *expr.Expr { + return ast.NewConst(p.id(token), value) +} + +func (p *parser) newCall(token interface{}, name string, + args []*expr.Expr) *expr.Expr { + return p.newMemberCall(token, name, nil, args) +} + +func (p *parser) newMemberCall(token interface{}, name string, + target *expr.Expr, args []*expr.Expr) *expr.Expr { + return ast.NewCall(p.id(token), name, target, args) +} + +func (p *parser) id(ctx interface{}) int64 { + var token antlr.Token = nil + switch ctx.(type) { + case antlr.ParserRuleContext: + token = (ctx.(antlr.ParserRuleContext)).GetStart() + case antlr.Token: + token = ctx.(antlr.Token) + default: + // This should only happen if the ctx is nil + return -1 + } + location := common.Location(token) + id := p.nextId + p.positions[id], _ = p.source.CharacterOffset(location) + p.nextId++ + return id +} + +func (p *parser) unquote(ctx antlr.ParserRuleContext, value string) string { + if text, err := strconv.Unquote(value); err == nil { + return text + } + p.errors.ReportError(p.source, common.Location(ctx.GetStart()), + "Unable to unquote string") + return value +} + +func findOperator(value antlr.ParseTree) string { + op := value.GetText() + if name, found := operators.Find(op); found { + return name + } + return op +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/parser_test.go b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/parser_test.go new file mode 100644 index 00000000..d26e14d0 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/parser_test.go @@ -0,0 +1,121 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package parser provides methods to parse filter sources to CEL-based ASTs. +package parser + +import ( + "fmt" + "io/ioutil" + "strings" + "testing" + + pb "github.com/golang/protobuf/proto" + expr "github.com/google/cel-spec/proto/v1" + "github.com/grafeas/grafeas/server-go/filtering/common" +) + +const ( + DiagnosticsDelimiter = "\nDiagnostics:\n" + NoResult = "" + InputOutputDelimiter = "\n==================================================\n" + TestCaseDelimiter = "\n\n" +) + +type baseline struct { + expected *expr.ParsedExpr + errors string + source common.Source +} + +func TestParse_Complex(t *testing.T) { + runBaselines(t, "complex") +} + +func TestParse_Equality(t *testing.T) { + runBaselines(t, "equality") +} + +func TestParse_Error(t *testing.T) { + runBaselines(t, "error") +} + +func TestParse_Expression(t *testing.T) { + runBaselines(t, "expression") +} + +func TestParse_Function(t *testing.T) { + runBaselines(t, "function") +} + +func TestParse_Member(t *testing.T) { + runBaselines(t, "member") +} + +func TestParse_Unicode(t *testing.T) { + runBaselines(t, "unicode") +} + +func runBaselines(t *testing.T, filename string) { + baselines, err := newTestBaselines(filename) + if err != nil { + t.Errorf("Baselines could not be read: %v", err) + return + } + for _, baseline := range baselines { + verifyBaseline(t, baseline) + } +} + +func verifyBaseline(t *testing.T, baseline baseline) { + result, err := Parse(baseline.source) + if err != nil { + if err.String() != baseline.errors { + t.Errorf("Expected error not equal to actual. expected: %s\nactual: %s\n", + baseline.errors, err.String()) + } + } else if !pb.Equal(baseline.expected, result) { + t.Errorf("Expected proto not equal to actual. expected: %s\nactual: %s\n", + pb.MarshalTextString(baseline.expected), + pb.MarshalTextString(result)) + } +} + +func newTestBaselines(filename string) ([]baseline, error) { + bytes, err := ioutil.ReadFile(fmt.Sprintf("testdata/%s.baseline", filename)) + if err != nil { + panic(fmt.Sprintf("Could not read provided file: %s", filename)) + } + testCases := strings.Split(string(bytes), TestCaseDelimiter) + baselines := make([]baseline, len(testCases)) + for i, testCase := range testCases { + testCaseName := fmt.Sprintf("%s[%d]", filename, i) + inputOutput := strings.Split(testCase, InputOutputDelimiter) + input, output := inputOutput[0], inputOutput[1] + baselines[i] = baseline{ + source: common.NewStringSource(input, testCaseName), + expected: &expr.ParsedExpr{}, + } + resultOrError := strings.Split(output, DiagnosticsDelimiter) + result := resultOrError[0] + if result != NoResult { + if err := pb.UnmarshalText(output, baselines[i].expected); err != nil { + return nil, err + } + } else { + baselines[i].errors = resultOrError[1] + } + } + return baselines, nil +} diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/README.md b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/README.md new file mode 100644 index 00000000..e7b8ea61 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/README.md @@ -0,0 +1,32 @@ +# Filter Baselines + +The test baselines contain filter strings with the desired outputs separated by +50 `=` characters. Test cases within a baseline are separated by two newlines +`\n\n`. When the test case produces an error the line `` must appear +after the `=` separator, and the error output must appear after the +`Diagnostics:` line. + +## Supported Format + +``` +a OR b +================================================== +expr: < ... > + +~error-case +================================================== + +Diagnostics: +ERROR: ... +``` + +The baseline file must not end with an empty line as the line will be included +comparisions. + +## Test Output + +The baseline is self-contained in the sense that it capture the input and +expected output of the test. However, when there is a failure in the test, the +baseline case will be printed to stderr. At the moment, there is no support for +diffing actual versus expected baseline values, but this would be a nice future +refinement. \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/complex.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/complex.baseline new file mode 100644 index 00000000..ed0586bf --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/complex.baseline @@ -0,0 +1,697 @@ +c.d="e" f.g.3>argument(h) AND getattr(i.map, arg(j)) < arg(10) +================================================== +expr: < + id: 20 + call_expr: < + function: "_&&_" + args: < + id: 11 + call_expr: < + function: "_sequence_" + args: < + id: 4 + call_expr: < + function: "_==_" + args: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "c" + > + > + field: "d" + > + > + args: < + id: 3 + const_expr: < + string_value: "e" + > + > + > + > + args: < + id: 10 + call_expr: < + function: "_>_" + args: < + id: 7 + select_expr: < + operand: < + id: 6 + select_expr: < + operand: < + id: 5 + ident_expr: < + name: "f" + > + > + field: "g" + > + > + field: "3" + > + > + args: < + id: 9 + call_expr: < + function: "argument" + args: < + id: 8 + ident_expr: < + name: "h" + > + > + > + > + > + > + > + > + args: < + id: 19 + call_expr: < + function: "_<_" + args: < + id: 16 + call_expr: < + function: "getattr" + args: < + id: 13 + select_expr: < + operand: < + id: 12 + ident_expr: < + name: "i" + > + > + field: "map" + > + > + args: < + id: 15 + call_expr: < + function: "arg" + args: < + id: 14 + ident_expr: < + name: "j" + > + > + > + > + > + > + args: < + id: 18 + call_expr: < + function: "arg" + args: < + id: 17 + const_expr: < + int64_value: 10 + > + > + > + > + > + > + > +> +source_info: < + location: "complex[0]" + line_offsets: 63 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 4 + > + positions: < + key: 4 + value: 3 + > + positions: < + key: 5 + value: 8 + > + positions: < + key: 6 + value: 9 + > + positions: < + key: 7 + value: 11 + > + positions: < + key: 8 + value: 23 + > + positions: < + key: 9 + value: 22 + > + positions: < + key: 10 + value: 13 + > + positions: < + key: 11 + value: 0 + > + positions: < + key: 12 + value: 38 + > + positions: < + key: 13 + value: 39 + > + positions: < + key: 14 + value: 49 + > + positions: < + key: 15 + value: 48 + > + positions: < + key: 16 + value: 37 + > + positions: < + key: 17 + value: 59 + > + positions: < + key: 18 + value: 58 + > + positions: < + key: 19 + value: 53 + > + positions: < + key: 20 + value: 25 + > +> + +k.3 < 7.6 arg(name): (1 2 3) +================================================== +expr: < + id: 15 + call_expr: < + function: "_sequence_" + args: < + id: 4 + call_expr: < + function: "_<_" + args: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "k" + > + > + field: "3" + > + > + args: < + id: 3 + const_expr: < + double_value: 7.6 + > + > + > + > + args: < + id: 14 + call_expr: < + function: "_:_" + args: < + id: 6 + call_expr: < + function: "arg" + args: < + id: 5 + ident_expr: < + name: "name" + > + > + > + > + args: < + id: 13 + call_expr: < + function: "_sequence_" + args: < + id: 8 + call_expr: < + function: "_global_" + args: < + id: 7 + const_expr: < + int64_value: 1 + > + > + > + > + args: < + id: 10 + call_expr: < + function: "_global_" + args: < + id: 9 + const_expr: < + int64_value: 2 + > + > + > + > + args: < + id: 12 + call_expr: < + function: "_global_" + args: < + id: 11 + const_expr: < + int64_value: 3 + > + > + > + > + > + > + > + > + > +> +source_info: < + location: "complex[1]" + line_offsets: 29 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 6 + > + positions: < + key: 4 + value: 4 + > + positions: < + key: 5 + value: 14 + > + positions: < + key: 6 + value: 13 + > + positions: < + key: 7 + value: 22 + > + positions: < + key: 8 + value: 22 + > + positions: < + key: 9 + value: 24 + > + positions: < + key: 10 + value: 24 + > + positions: < + key: 11 + value: 26 + > + positions: < + key: 12 + value: 26 + > + positions: < + key: 13 + value: 22 + > + positions: < + key: 14 + value: 19 + > + positions: < + key: 15 + value: 0 + > +> + +a:1 OR b=2 AND c OR NOT d.call(10, 11, 12) +================================================== +expr: < + id: 18 + call_expr: < + function: "_&&_" + args: < + id: 7 + call_expr: < + function: "_||_" + args: < + id: 3 + call_expr: < + function: "_:_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + const_expr: < + int64_value: 1 + > + > + > + > + args: < + id: 6 + call_expr: < + function: "_==_" + args: < + id: 4 + ident_expr: < + name: "b" + > + > + args: < + id: 5 + const_expr: < + int64_value: 2 + > + > + > + > + > + > + args: < + id: 17 + call_expr: < + function: "_||_" + args: < + id: 9 + call_expr: < + function: "_global_" + args: < + id: 8 + ident_expr: < + name: "c" + > + > + > + > + args: < + id: 16 + call_expr: < + function: "_!" + args: < + id: 15 + call_expr: < + function: "_global_" + args: < + id: 14 + call_expr: < + target: < + id: 10 + ident_expr: < + name: "d" + > + > + function: "call" + args: < + id: 11 + const_expr: < + int64_value: 10 + > + > + args: < + id: 12 + const_expr: < + int64_value: 11 + > + > + args: < + id: 13 + const_expr: < + int64_value: 12 + > + > + > + > + > + > + > + > + > + > + > +> +source_info: < + location: "complex[2]" + line_offsets: 43 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 2 + > + positions: < + key: 3 + value: 1 + > + positions: < + key: 4 + value: 7 + > + positions: < + key: 5 + value: 9 + > + positions: < + key: 6 + value: 8 + > + positions: < + key: 7 + value: 3 + > + positions: < + key: 8 + value: 15 + > + positions: < + key: 9 + value: 15 + > + positions: < + key: 10 + value: 24 + > + positions: < + key: 11 + value: 31 + > + positions: < + key: 12 + value: 35 + > + positions: < + key: 13 + value: 39 + > + positions: < + key: 14 + value: 30 + > + positions: < + key: 15 + value: 20 + > + positions: < + key: 16 + value: 20 + > + positions: < + key: 17 + value: 16 + > + positions: < + key: 18 + value: 10 + > +> + +-a NOT a[b].call(arg)[0] > 0 +================================================== +expr: < + id: 14 + call_expr: < + function: "_sequence_" + args: < + id: 3 + call_expr: < + function: "-_" + args: < + id: 2 + call_expr: < + function: "_global_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + > + > + > + > + args: < + id: 13 + call_expr: < + function: "_!" + args: < + id: 12 + call_expr: < + function: "_>_" + args: < + id: 10 + call_expr: < + function: "_[_]" + args: < + id: 8 + call_expr: < + target: < + id: 6 + call_expr: < + function: "_[_]" + args: < + id: 4 + ident_expr: < + name: "a" + > + > + args: < + id: 5 + ident_expr: < + name: "b" + > + > + > + > + function: "call" + args: < + id: 7 + ident_expr: < + name: "arg" + > + > + > + > + args: < + id: 9 + const_expr: < + int64_value: 0 + > + > + > + > + args: < + id: 11 + const_expr: < + int64_value: 0 + > + > + > + > + > + > + > +> +source_info: < + location: "complex[3]" + line_offsets: 29 + positions: < + key: 1 + value: 1 + > + positions: < + key: 2 + value: 0 + > + positions: < + key: 3 + value: 0 + > + positions: < + key: 4 + value: 7 + > + positions: < + key: 5 + value: 9 + > + positions: < + key: 6 + value: 8 + > + positions: < + key: 7 + value: 17 + > + positions: < + key: 8 + value: 16 + > + positions: < + key: 9 + value: 22 + > + positions: < + key: 10 + value: 21 + > + positions: < + key: 11 + value: 27 + > + positions: < + key: 12 + value: 25 + > + positions: < + key: 13 + value: 3 + > + positions: < + key: 14 + value: 0 + > +> diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/equality.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/equality.baseline new file mode 100644 index 00000000..94c646bd --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/equality.baseline @@ -0,0 +1,221 @@ +a!=b +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b" + > + > + > +> +source_info: < + location: "equality[0]" + line_offsets: 5 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 3 + > + positions: < + key: 3 + value: 1 + > +> + +a != b +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b" + > + > + > +> +source_info: < + location: "equality[1]" + line_offsets: 7 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 5 + > + positions: < + key: 3 + value: 2 + > +> + +a!= b +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b" + > + > + > +> +source_info: < + location: "equality[2]" + line_offsets: 6 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 4 + > + positions: < + key: 3 + value: 1 + > +> + +a !=b +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b" + > + > + > +> +source_info: < + location: "equality[3]" + line_offsets: 6 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 4 + > + positions: < + key: 3 + value: 2 + > +> + +a!b!=c +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a!b" + > + > + args: < + id: 2 + ident_expr: < + name: "c" + > + > + > +> +source_info: < + location: "equality[4]" + line_offsets: 7 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 5 + > + positions: < + key: 3 + value: 3 + > +> + +a!=b!c +================================================== +expr: < + id: 3 + call_expr: < + function: "_!=_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b!c" + > + > + > +> +source_info: < + location: "equality[5]" + line_offsets: 7 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 3 + > + positions: < + key: 3 + value: 1 + > +> \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/error.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/error.baseline new file mode 100644 index 00000000..1f55202b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/error.baseline @@ -0,0 +1,139 @@ +a: .a +================================================== + +Diagnostics: +ERROR: error[0]:1:5: Syntax error + | a: .a + | ....^ +ERROR: error[0]:1:4: Unrecognized floating point value: .a + | a: .a + | ...^ + +a:-b +================================================== + +Diagnostics: +ERROR: error[1]:1:4: Syntax error + | a:-b + | ...^ + +a~b +================================================== + +Diagnostics: +ERROR: error[2]:1:2: Syntax error + | a~b + | .^ + +a[$b] ~ "c*" +================================================== + +Diagnostics: +ERROR: error[3]:1:7: Syntax error + | a[$b] ~ "c*" + | ......^ + +( +================================================== + +Diagnostics: +ERROR: error[4]:1:2: Syntax error + | ( + | .^ + +argument(1, 2 +================================================== + +Diagnostics: +ERROR: error[5]:1:14: Syntax error + | argument(1, 2 + | .............^ + +.( +================================================== + +Diagnostics: +ERROR: error[6]:1:2: Syntax error + | .( + | .^ +ERROR: error[6]:1:1: Unrecognized floating point value: .( + | .( + | ^ + +- +================================================== + +Diagnostics: +ERROR: error[7]:1:2: Syntax error + | - + | .^ + +~= +================================================== + +Diagnostics: +ERROR: error[8]:1:1: Syntax error + | ~= + | ^ +ERROR: error[8]:1:2: Syntax error + | ~= + | .^ + +NOT +================================================== + +Diagnostics: +ERROR: error[9]:1:4: Syntax error + | NOT + | ...^ + +" +================================================== + +Diagnostics: +ERROR: error[10]:1:1: Syntax error + | " + | ^ + +a  b +================================================== + +Diagnostics: +ERROR: error[11]:1:3: Syntax error + | a  b + | ..^ + +() +================================================== + +Diagnostics: +ERROR: error[12]:1:2: Syntax error + | () + | .^ + +\not: \and +================================================== + +Diagnostics: +ERROR: error[13]:1:1: Syntax error + | \not: \and + | ^ +ERROR: error[13]:1:7: Syntax error + | \not: \and + | ......^ + +NOT:AND +================================================== + +Diagnostics: +ERROR: error[14]:1:4: Syntax error + | NOT:AND + | ...^ + +\u10 +================================================== + +Diagnostics: +ERROR: error[15]:1:1: Syntax error + | \u10 + | ^ \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/expression.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/expression.baseline new file mode 100644 index 00000000..8e590804 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/expression.baseline @@ -0,0 +1,46 @@ +a.c:b +================================================== +expr: < + id: 4 + call_expr: < + function: "_:_" + args: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "a" + > + > + field: "c" + > + > + args: < + id: 3 + ident_expr: < + name: "b" + > + > + > +> +source_info: < + location: "expression[0]" + line_offsets: 6 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 4 + > + positions: < + key: 4 + value: 3 + > +> \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/function.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/function.baseline new file mode 100644 index 00000000..3ec371bf --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/function.baseline @@ -0,0 +1,174 @@ +!@#$%^&*() +================================================== +expr: < + id: 2 + call_expr: < + function: "_global_" + args: < + id: 1 + call_expr: < + function: "!@#$%^&*" + > + > + > +> +source_info: < + location: "function[0]" + line_offsets: 11 + positions: < + key: 1 + value: 8 + > + positions: < + key: 2 + value: 0 + > +> + +!@#$%^&*(arg) +================================================== +expr: < + id: 3 + call_expr: < + function: "_global_" + args: < + id: 2 + call_expr: < + function: "!@#$%^&*" + args: < + id: 1 + ident_expr: < + name: "arg" + > + > + > + > + > +> +source_info: < + location: "function[1]" + line_offsets: 14 + positions: < + key: 1 + value: 9 + > + positions: < + key: 2 + value: 8 + > + positions: < + key: 3 + value: 0 + > +> + +foo() +================================================== +expr: < + id: 2 + call_expr: < + function: "_global_" + args: < + id: 1 + call_expr: < + function: "foo" + > + > + > +> +source_info: < + location: "function[2]" + line_offsets: 6 + positions: < + key: 1 + value: 3 + > + positions: < + key: 2 + value: 0 + > +> + +a > foo() +================================================== +expr: < + id: 3 + call_expr: < + function: "_>_" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + call_expr: < + function: "foo" + > + > + > +> +source_info: < + location: "function[3]" + line_offsets: 10 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 7 + > + positions: < + key: 3 + value: 2 + > +> + +b < foo(3) +================================================== +expr: < + id: 4 + call_expr: < + function: "_<_" + args: < + id: 1 + ident_expr: < + name: "b" + > + > + args: < + id: 3 + call_expr: < + function: "foo" + args: < + id: 2 + const_expr: < + int64_value: 3 + > + > + > + > + > +> +source_info: < + location: "function[4]" + line_offsets: 11 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 8 + > + positions: < + key: 3 + value: 7 + > + positions: < + key: 4 + value: 2 + > +> \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/member.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/member.baseline new file mode 100644 index 00000000..7da78a66 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/member.baseline @@ -0,0 +1,207 @@ +a.b +================================================== +expr: < + id: 3 + call_expr: < + function: "_global_" + args: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "a" + > + > + field: "b" + > + > + > +> +source_info: < + location: "member[0]" + line_offsets: 4 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 0 + > +> + +a[b] +================================================== +expr: < + id: 4 + call_expr: < + function: "_global_" + args: < + id: 3 + call_expr: < + function: "_[_]" + args: < + id: 1 + ident_expr: < + name: "a" + > + > + args: < + id: 2 + ident_expr: < + name: "b" + > + > + > + > + > +> +source_info: < + location: "member[1]" + line_offsets: 5 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 2 + > + positions: < + key: 3 + value: 1 + > + positions: < + key: 4 + value: 0 + > +> + +a.NOT.OR.AND:value +================================================== +expr: < + id: 6 + call_expr: < + function: "_:_" + args: < + id: 4 + select_expr: < + operand: < + id: 3 + select_expr: < + operand: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "a" + > + > + field: "NOT" + > + > + field: "OR" + > + > + field: "AND" + > + > + args: < + id: 5 + ident_expr: < + name: "value" + > + > + > +> +source_info: < + location: "member[2]" + line_offsets: 19 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 5 + > + positions: < + key: 4 + value: 8 + > + positions: < + key: 5 + value: 13 + > + positions: < + key: 6 + value: 12 + > +> + +a."string field" > id.call() +================================================== +expr: < + id: 5 + call_expr: < + function: "_>_" + args: < + id: 2 + select_expr: < + operand: < + id: 1 + ident_expr: < + name: "a" + > + > + field: "string field" + > + > + args: < + id: 4 + call_expr: < + target: < + id: 3 + ident_expr: < + name: "id" + > + > + function: "call" + > + > + > +> +source_info: < + location: "member[3]" + line_offsets: 29 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 1 + > + positions: < + key: 3 + value: 19 + > + positions: < + key: 4 + value: 26 + > + positions: < + key: 5 + value: 17 + > +> \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/unicode.baseline b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/unicode.baseline new file mode 100644 index 00000000..594a4ffc --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/server-go/filtering/parser/testdata/unicode.baseline @@ -0,0 +1,63 @@ +fờö +================================================== +expr: < + id: 2 + call_expr: < + function: "_global_" + args: < + id: 1 + ident_expr: < + name: "f\341\273\235\303\266" + > + > + > +> +source_info: < + location: "unicode[0]" + line_offsets: 7 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 0 + > +> + +answer:Да! +================================================== +expr: < + id: 3 + call_expr: < + function: "_:_" + args: < + id: 1 + ident_expr: < + name: "answer" + > + > + args: < + id: 2 + ident_expr: < + name: "\320\224\320\260!" + > + > + > +> +source_info: < + location: "unicode[1]" + line_offsets: 13 + positions: < + key: 1 + value: 0 + > + positions: < + key: 2 + value: 7 + > + positions: < + key: 3 + value: 6 + > +> \ No newline at end of file diff --git a/vendor/github.com/grafeas/grafeas/server-go/storage.go b/vendor/github.com/grafeas/grafeas/server-go/storage.go index 67950dd5..1022eead 100644 --- a/vendor/github.com/grafeas/grafeas/server-go/storage.go +++ b/vendor/github.com/grafeas/grafeas/server-go/storage.go @@ -60,20 +60,25 @@ type Storager interface { // GetOperation returns the operation with pID and oID GetOperation(pID, opID string) (*opspb.Operation, error) - // ListProjects returns the project id for all projects - ListProjects(filters string) []*pb.Project + // ListProjects returns up to pageSize number of projects beginning at pageToken (or from + // start if pageToken is the emtpy string). + ListProjects(filter string, pageSize int, pageToken string) ([]*pb.Project, string, error) - // ListNoteOccurrences returns the occcurrences on the particular note (nID) for this project (pID) - ListNoteOccurrences(pID, nID, filters string) ([]*pb.Occurrence, error) + // ListNoteOccurrences returns up to pageSize number of occcurrences on the particular note (nID) + // for this project (pID) projects beginning at pageToken (or from start if pageToken is the emtpy string). + ListNoteOccurrences(pID, nID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) - // ListNotes returns the notes for for this project (pID) - ListNotes(pID, filters string) []*pb.Note + // ListNotes returns up to pageSize number of notes for this project (pID) beginning + // at pageToken (or from start if pageToken is the emtpy string). + ListNotes(pID, filters string, pageSize int, pageToken string) ([]*pb.Note, string, error) - // ListOccurrences returns the occurrences for this project ID (pID) - ListOccurrences(pID, filters string) []*pb.Occurrence + // ListOccurrences returns up to pageSize number of occurrences for this project (pID) beginning + // at pageToken (or from start if pageToken is the emtpy string). + ListOccurrences(pID, filters string, pageSize int, pageToken string) ([]*pb.Occurrence, string, error) - // ListOperations returns the operations for this project (pID) - ListOperations(pID, filters string) []*opspb.Operation + // ListOperations returns up to pageSize number of operations for this project (pID) beginning + // at pageToken (or from start if pageToken is the emtpy string). + ListOperations(pID, filters string, pageSize int, pageToken string) ([]*opspb.Operation, string, error) // UpdateNote updates the existing note with the given pID and nID UpdateNote(pID, nID string, n *pb.Note) error diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContext.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContext.md new file mode 100644 index 00000000..249ce619 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContext.md @@ -0,0 +1,11 @@ +# ApiAliasContext + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Kind** | [**ApiAliasContextKind**](apiAliasContextKind.md) | The alias kind. | [optional] [default to null] +**Name** | **string** | The alias name. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContextKind.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContextKind.md new file mode 100644 index 00000000..25be4f8c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiAliasContextKind.md @@ -0,0 +1,9 @@ +# ApiAliasContextKind + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiArtifact.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiArtifact.md new file mode 100644 index 00000000..e9a7a125 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiArtifact.md @@ -0,0 +1,13 @@ +# ApiArtifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. | [optional] [default to null] +**Checksum** | **string** | Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. | [optional] [default to null] +**Id** | **string** | | [optional] [default to null] +**Names** | **[]string** | Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildDetails.md new file mode 100644 index 00000000..7ca11e4e --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildDetails.md @@ -0,0 +1,11 @@ +# ApiBuildDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Provenance** | [**ApiBuildProvenance**](apiBuildProvenance.md) | | [optional] [default to null] +**ProvenanceBytes** | **string** | Serialized JSON representation of the provenance, used in generating the `BuildSignature` in the corresponding Result. After verifying the signature, `provenance_bytes` can be unmarshalled and compared to the provenance to confirm that it is unchanged. A base64-encoded string representation of the provenance bytes is used for the signature in order to interoperate with openssl which expects this format for signature verification. The serialized form is captured both to avoid ambiguity in how the provenance is marshalled to json as well to prevent incompatibilities with future changes. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildProvenance.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildProvenance.md new file mode 100644 index 00000000..abb09d29 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildProvenance.md @@ -0,0 +1,22 @@ +# ApiBuildProvenance + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Id** | **string** | Unique identifier of the build. | [optional] [default to null] +**ProjectId** | **string** | ID of the project. | [optional] [default to null] +**Commands** | [**[]ApiCommand**](apiCommand.md) | Commands requested by the build. | [optional] [default to null] +**BuiltArtifacts** | [**[]ApiArtifact**](apiArtifact.md) | Output of the build. | [optional] [default to null] +**CreateTime** | [**time.Time**](time.Time.md) | Time at which the build was created. | [optional] [default to null] +**StartTime** | [**time.Time**](time.Time.md) | Time at which execution of the build was started. | [optional] [default to null] +**FinishTime** | [**time.Time**](time.Time.md) | Time at which execution of the build was finished. | [optional] [default to null] +**Creator** | **string** | E-mail address of the user who initiated this build. Note that this was the user's e-mail address at the time the build was initiated; this address may not represent the same end-user for all time. | [optional] [default to null] +**LogsBucket** | **string** | Google Cloud Storage bucket where logs were written. | [optional] [default to null] +**SourceProvenance** | [**ApiSource**](apiSource.md) | Details of the Source input to the build. | [optional] [default to null] +**TriggerId** | **string** | Trigger identifier if the build was triggered automatically; empty if not. | [optional] [default to null] +**BuildOptions** | **map[string]string** | Special options applied to this build. This is a catch-all field where build providers can enter any desired additional details. | [optional] [default to null] +**BuilderVersion** | **string** | Version string of the builder at the time this build was executed. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildSignature.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildSignature.md new file mode 100644 index 00000000..31263522 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildSignature.md @@ -0,0 +1,13 @@ +# ApiBuildSignature + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**PublicKey** | **string** | Public key of the builder which can be used to verify that the related findings are valid and unchanged. If `key_type` is empty, this defaults to PEM encoded public keys. This field may be empty if `key_id` references an external key. For Cloud Container Builder based signatures, this is a PEM encoded public key. To verify the Cloud Container Builder signature, place the contents of this field into a file (public.pem). The signature field is base64-decoded into its binary representation in signature.bin, and the provenance bytes from `BuildDetails` are base64-decoded into a binary representation in signed.bin. OpenSSL can then verify the signature: `openssl sha256 -verify public.pem -signature signature.bin signed.bin` | [optional] [default to null] +**Signature** | **string** | Signature of the related `BuildProvenance`, encoded in a base64 string. | [optional] [default to null] +**KeyId** | **string** | An Id for the key used to sign. This could be either an Id for the key stored in `public_key` (such as the Id or fingerprint for a PGP key, or the CN for a cert), or a reference to an external key (such as a reference to a key in Cloud Key Management Service). | [optional] [default to null] +**KeyType** | [**BuildSignatureKeyType**](BuildSignatureKeyType.md) | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildType.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildType.md new file mode 100644 index 00000000..07bfbf38 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiBuildType.md @@ -0,0 +1,11 @@ +# ApiBuildType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**BuilderVersion** | **string** | Version of the builder which produced this Note. | [optional] [default to null] +**Signature** | [**ApiBuildSignature**](apiBuildSignature.md) | Signature of the build in Occurrences pointing to the Note containing this `BuilderDetails`. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCloudRepoSourceContext.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCloudRepoSourceContext.md new file mode 100644 index 00000000..98a59f19 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCloudRepoSourceContext.md @@ -0,0 +1,12 @@ +# ApiCloudRepoSourceContext + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**RepoId** | [**ApiRepoId**](apiRepoId.md) | The ID of the repo. | [optional] [default to null] +**RevisionId** | **string** | A revision ID. | [optional] [default to null] +**AliasContext** | [**ApiAliasContext**](apiAliasContext.md) | An alias, which may be a branch or tag. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCommand.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCommand.md new file mode 100644 index 00000000..349f225a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCommand.md @@ -0,0 +1,15 @@ +# ApiCommand + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | Name of the command, as presented on the command line, or if the command is packaged as a Docker container, as presented to `docker pull`. | [optional] [default to null] +**Env** | **[]string** | Environment variables set before running this Command. | [optional] [default to null] +**Args** | **[]string** | Command-line arguments used when executing this Command. | [optional] [default to null] +**Dir** | **string** | Working directory (relative to project source root) used when running this Command. | [optional] [default to null] +**Id** | **string** | Optional unique identifier for this Command, used in wait_for to reference this Command as a dependency. | [optional] [default to null] +**WaitFor** | **[]string** | The ID(s) of the Command(s) that this Command depends on. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCreateOperationRequest.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCreateOperationRequest.md new file mode 100644 index 00000000..86dff491 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiCreateOperationRequest.md @@ -0,0 +1,12 @@ +# ApiCreateOperationRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Parent** | **string** | The projectId that this operation should be created under. | [optional] [default to null] +**OperationId** | **string** | The ID to use for this operation. | [optional] [default to null] +**Operation** | [**LongrunningOperation**](longrunningOperation.md) | The operation to create. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDeployable.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDeployable.md new file mode 100644 index 00000000..85c28f86 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDeployable.md @@ -0,0 +1,10 @@ +# ApiDeployable + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ResourceUri** | **[]string** | Resource URI for the artifact being deployed. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDiscovery.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDiscovery.md new file mode 100644 index 00000000..b361f7d8 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiDiscovery.md @@ -0,0 +1,10 @@ +# ApiDiscovery + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**AnalysisKind** | [**ApiNoteKind**](apiNoteKind.md) | The kind of analysis that is handled by this discovery. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiFileHashes.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiFileHashes.md new file mode 100644 index 00000000..2cfa5812 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiFileHashes.md @@ -0,0 +1,10 @@ +# ApiFileHashes + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**FileHash** | [**[]ApiHash**](apiHash.md) | Collection of file hashes. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGerritSourceContext.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGerritSourceContext.md new file mode 100644 index 00000000..ff01b30d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGerritSourceContext.md @@ -0,0 +1,13 @@ +# ApiGerritSourceContext + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**HostUri** | **string** | The URI of a running Gerrit instance. | [optional] [default to null] +**GerritProject** | **string** | The full project name within the host. Projects may be nested, so \"project/subproject\" is a valid project name. The \"repo name\" is the hostURI/project. | [optional] [default to null] +**RevisionId** | **string** | A revision (commit) ID. | [optional] [default to null] +**AliasContext** | [**ApiAliasContext**](apiAliasContext.md) | An alias, which may be a branch or tag. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGitSourceContext.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGitSourceContext.md new file mode 100644 index 00000000..1fc8fcf6 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiGitSourceContext.md @@ -0,0 +1,11 @@ +# ApiGitSourceContext + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Url** | **string** | Git repository URL. | [optional] [default to null] +**RevisionId** | **string** | Required. Git commit hash. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiHash.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiHash.md new file mode 100644 index 00000000..17cce699 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiHash.md @@ -0,0 +1,11 @@ +# ApiHash + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Type_** | [**HashHashType**](HashHashType.md) | The type of hash that was performed. | [optional] [default to null] +**Value** | **string** | The hash value. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNoteOccurrencesResponse.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNoteOccurrencesResponse.md new file mode 100644 index 00000000..f4858e1a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNoteOccurrencesResponse.md @@ -0,0 +1,11 @@ +# ApiListNoteOccurrencesResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Occurrences** | [**[]ApiOccurrence**](apiOccurrence.md) | The occurrences attached to the specified note. | [optional] [default to null] +**NextPageToken** | **string** | Token to receive the next page of notes. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNotesResponse.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNotesResponse.md new file mode 100644 index 00000000..c06ade78 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListNotesResponse.md @@ -0,0 +1,11 @@ +# ApiListNotesResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Notes** | [**[]ApiNote**](apiNote.md) | | [optional] [default to null] +**NextPageToken** | **string** | The next pagination token in the list response. It should be used as page_token for the following request. An empty value means no more result. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListOccurrencesResponse.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListOccurrencesResponse.md new file mode 100644 index 00000000..640480b9 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListOccurrencesResponse.md @@ -0,0 +1,11 @@ +# ApiListOccurrencesResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Occurrences** | [**[]ApiOccurrence**](apiOccurrence.md) | The occurrences requested. | [optional] [default to null] +**NextPageToken** | **string** | The next pagination token in the list response. It should be used as `page_token` for the following request. An empty value means no more results. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListProjectsResponse.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListProjectsResponse.md new file mode 100644 index 00000000..e6fd1061 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiListProjectsResponse.md @@ -0,0 +1,11 @@ +# ApiListProjectsResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Projects** | [**[]ApiProject**](apiProject.md) | The projects requested. | [optional] [default to null] +**NextPageToken** | **string** | The next pagination token in the list response. It should be used as `page_token` for the following request. An empty value means no more results. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNote.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNote.md new file mode 100644 index 00000000..7221f9de --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNote.md @@ -0,0 +1,24 @@ +# ApiNote + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | | [optional] [default to null] +**ShortDescription** | **string** | A one sentence description of this `Note`. | [optional] [default to null] +**LongDescription** | **string** | A detailed description of this `Note`. | [optional] [default to null] +**Kind** | [**ApiNoteKind**](apiNoteKind.md) | Output only. This explicitly denotes which kind of note is specified. This field can be used as a filter in list requests. | [optional] [default to null] +**VulnerabilityType** | [**ApiVulnerabilityType**](apiVulnerabilityType.md) | A package vulnerability type of note. | [optional] [default to null] +**BuildType** | [**ApiBuildType**](apiBuildType.md) | Build provenance type for a verifiable build. | [optional] [default to null] +**BaseImage** | [**DockerImageBasis**](DockerImageBasis.md) | A note describing a base image. | [optional] [default to null] +**Package_** | [**PackageManagerPackage**](PackageManagerPackage.md) | A note describing a package hosted by various package managers. | [optional] [default to null] +**Deployable** | [**ApiDeployable**](apiDeployable.md) | A note describing something that can be deployed. | [optional] [default to null] +**Discovery** | [**ApiDiscovery**](apiDiscovery.md) | A note describing a provider/analysis type. | [optional] [default to null] +**RelatedUrl** | [**[]NoteRelatedUrl**](NoteRelatedUrl.md) | | [optional] [default to null] +**ExpirationTime** | [**time.Time**](time.Time.md) | Time of expiration for this note, null if note does not expire. | [optional] [default to null] +**CreateTime** | [**time.Time**](time.Time.md) | Output only. The time this note was created. This field can be used as a filter in list requests. | [optional] [default to null] +**UpdateTime** | [**time.Time**](time.Time.md) | Output only. The time this note was last updated. This field can be used as a filter in list requests. | [optional] [default to null] +**OperationName** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNoteKind.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNoteKind.md new file mode 100644 index 00000000..fe12893e --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiNoteKind.md @@ -0,0 +1,9 @@ +# ApiNoteKind + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiOccurrence.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiOccurrence.md new file mode 100644 index 00000000..e74a5ac9 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiOccurrence.md @@ -0,0 +1,24 @@ +# ApiOccurrence + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | | [optional] [default to null] +**ResourceUrl** | **string** | The unique URL of the image or the container for which the `Occurrence` applies. For example, https://gcr.io/project/image@sha256:foo This field can be used as a filter in list requests. | [optional] [default to null] +**NoteName** | **string** | An analysis note associated with this image, in the form \"providers/{provider_id}/notes/{NOTE_ID}\" This field can be used as a filter in list requests. | [optional] [default to null] +**Kind** | [**ApiNoteKind**](apiNoteKind.md) | Output only. This explicitly denotes which of the `Occurrence` details are specified. This field can be used as a filter in list requests. | [optional] [default to null] +**VulnerabilityDetails** | [**VulnerabilityTypeVulnerabilityDetails**](VulnerabilityTypeVulnerabilityDetails.md) | Details of a security vulnerability note. | [optional] [default to null] +**BuildDetails** | [**ApiBuildDetails**](apiBuildDetails.md) | Build details for a verifiable build. | [optional] [default to null] +**DerivedImageDetails** | [**DockerImageDerivedDetails**](DockerImageDerivedDetails.md) | Describes how this resource derives from the basis in the associated note. | [optional] [default to null] +**InstallationDetails** | [**PackageManagerInstallationDetails**](PackageManagerInstallationDetails.md) | Describes the installation of a package on the linked resource. | [optional] [default to null] +**DeploymentDetails** | [**DeployableDeploymentDetails**](DeployableDeploymentDetails.md) | Describes the deployment of an artifact on a runtime. | [optional] [default to null] +**DiscoveredDetails** | [**DiscoveryDiscoveredDetails**](DiscoveryDiscoveredDetails.md) | Describes the initial scan status for this resource. | [optional] [default to null] +**AttestationDetails** | [**AttestationAuthorityAttestationDetails**](AttestationAuthorityAttestationDetails.md) | Describes an attestation of an artifact. | [optional] [default to null] +**Remediation** | **string** | | [optional] [default to null] +**CreateTime** | [**time.Time**](time.Time.md) | Output only. The time this `Occurrence` was created. | [optional] [default to null] +**UpdateTime** | [**time.Time**](time.Time.md) | Output only. The time this `Occurrence` was last updated. | [optional] [default to null] +**OperationName** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPackageManagerLocation.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPackageManagerLocation.md new file mode 100644 index 00000000..1577ee4d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPackageManagerLocation.md @@ -0,0 +1,12 @@ +# ApiPackageManagerLocation + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CpeUri** | **string** | The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) denoting the package manager version distributing a package. | [optional] [default to null] +**Version** | [**VulnerabilityTypeVersion**](VulnerabilityTypeVersion.md) | The version installed at this location. | [optional] [default to null] +**Path** | **string** | The path from which we gathered that this package/version is installed. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPgpSignedAttestation.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPgpSignedAttestation.md new file mode 100644 index 00000000..f2495887 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiPgpSignedAttestation.md @@ -0,0 +1,12 @@ +# ApiPgpSignedAttestation + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Signature** | **string** | The raw content of the signature, as output by gpg or equivalent. Since this message only supports attached signatures, the payload that was signed must be attached. While the signature format supported is dependent on the verification implementation, currently only ASCII-armored (`--armor` to gpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are supported. Concretely, `gpg --sign --armor --output=signature.gpg payload.json` will create the signature content expected in this field in `signature.gpg` for the `payload.json` attestation payload. | [optional] [default to null] +**ContentType** | [**PgpSignedAttestationContentType**](PgpSignedAttestationContentType.md) | Type (e.g. schema) of the attestation payload that was signed. The verifier must ensure that the provided type is one that the verifier supports, and that the attestation payload is a valid instantiation of that type (e.g. by validating a JSON schema). | [optional] [default to null] +**PgpKeyId** | **string** | The cryptographic fingerprint of the key used to generate the signature, as output by, e.g. `gpg --list-keys`. This should be the version 4, full 160-bit fingerprint, expressed as a 40 character hexidecimal string. See https://tools.ietf.org/html/rfc4880#section-12.2 for details. Implementations may choose to acknowledge \"LONG\", \"SHORT\", or other abbreviated key IDs, but only the full fingerprint is guaranteed to work. In gpg, the full fingerprint can be retrieved from the `fpr` field returned when calling --list-keys with --with-colons. For example: ``` gpg --with-colons --with-fingerprint --force-v4-certs \\ --list-keys attester@example.com tru::1:1513631572:0:3:1:5 pub:...<SNIP>... fpr:::::::::24FF6481B76AC91E66A00AC657A93A81EF3AE6FB: ``` Above, the fingerprint is `24FF6481B76AC91E66A00AC657A93A81EF3AE6FB`. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProject.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProject.md new file mode 100644 index 00000000..6ac9997b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProject.md @@ -0,0 +1,10 @@ +# ApiProject + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProjectRepoId.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProjectRepoId.md new file mode 100644 index 00000000..73eb7542 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiProjectRepoId.md @@ -0,0 +1,11 @@ +# ApiProjectRepoId + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ProjectId** | **string** | The ID of the project. | [optional] [default to null] +**RepoName** | **string** | The name of the repo. Leave empty for the default repo. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoId.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoId.md new file mode 100644 index 00000000..db74e41a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoId.md @@ -0,0 +1,11 @@ +# ApiRepoId + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ProjectRepoId** | [**ApiProjectRepoId**](apiProjectRepoId.md) | A combination of a project ID and a repo name. | [optional] [default to null] +**Uid** | **string** | A server-assigned, globally unique identifier. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoSource.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoSource.md new file mode 100644 index 00000000..4a3653d6 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiRepoSource.md @@ -0,0 +1,14 @@ +# ApiRepoSource + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ProjectId** | **string** | ID of the project that owns the repo. | [optional] [default to null] +**RepoName** | **string** | Name of the repo. | [optional] [default to null] +**BranchName** | **string** | Name of the branch to build. | [optional] [default to null] +**TagName** | **string** | Name of the tag to build. | [optional] [default to null] +**CommitSha** | **string** | Explicit commit SHA to build. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSource.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSource.md new file mode 100644 index 00000000..21e220a4 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSource.md @@ -0,0 +1,15 @@ +# ApiSource + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**StorageSource** | [**ApiStorageSource**](apiStorageSource.md) | If provided, get the source from this location in in Google Cloud Storage. | [optional] [default to null] +**RepoSource** | [**ApiRepoSource**](apiRepoSource.md) | If provided, get source from this location in a Cloud Repo. | [optional] [default to null] +**ArtifactStorageSource** | [**ApiStorageSource**](apiStorageSource.md) | If provided, the input binary artifacts for the build came from this location. | [optional] [default to null] +**FileHashes** | [**map[string]ApiFileHashes**](apiFileHashes.md) | Hash(es) of the build source, which can be used to verify that the original source integrity was maintained in the build. The keys to this map are file paths used as build source and the values contain the hash values for those files. If the build source came in a single package such as a gzipped tarfile (.tar.gz), the FileHash will be for the single path to that file. | [optional] [default to null] +**Context** | [**ApiSourceContext**](apiSourceContext.md) | If provided, the source code used for the build came from this location. | [optional] [default to null] +**AdditionalContexts** | [**[]ApiSourceContext**](apiSourceContext.md) | If provided, some of the source code used for the build may be found in these locations, in the case where the source repository had multiple remotes or submodules. This list will not include the context specified in the context field. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSourceContext.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSourceContext.md new file mode 100644 index 00000000..3d34357a --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiSourceContext.md @@ -0,0 +1,13 @@ +# ApiSourceContext + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CloudRepo** | [**ApiCloudRepoSourceContext**](apiCloudRepoSourceContext.md) | A SourceContext referring to a revision in a Google Cloud Source Repo. | [optional] [default to null] +**Gerrit** | [**ApiGerritSourceContext**](apiGerritSourceContext.md) | A SourceContext referring to a Gerrit project. | [optional] [default to null] +**Git** | [**ApiGitSourceContext**](apiGitSourceContext.md) | A SourceContext referring to any third party Git repo (e.g., GitHub). | [optional] [default to null] +**Labels** | **map[string]string** | Labels with user defined metadata. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiStorageSource.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiStorageSource.md new file mode 100644 index 00000000..5553fe4b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiStorageSource.md @@ -0,0 +1,12 @@ +# ApiStorageSource + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Bucket** | **string** | Google Cloud Storage bucket containing source (see [Bucket Name Requirements] (https://cloud.google.com/storage/docs/bucket-naming#requirements)). | [optional] [default to null] +**Object** | **string** | Google Cloud Storage object containing source. | [optional] [default to null] +**Generation** | **string** | Google Cloud Storage generation for the object. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiUpdateOperationRequest.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiUpdateOperationRequest.md new file mode 100644 index 00000000..aef61725 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiUpdateOperationRequest.md @@ -0,0 +1,11 @@ +# ApiUpdateOperationRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | The name of the Operation. Should be of the form \"projects/{provider_id}/operations/{operation_id}\". | [optional] [default to null] +**Operation** | [**LongrunningOperation**](longrunningOperation.md) | The operation to create. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiVulnerabilityType.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiVulnerabilityType.md new file mode 100644 index 00000000..02d393f6 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ApiVulnerabilityType.md @@ -0,0 +1,12 @@ +# ApiVulnerabilityType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CvssScore** | **float32** | The CVSS score for this Vulnerability. | [optional] [default to null] +**Severity** | [**VulnerabilityTypeSeverity**](VulnerabilityTypeSeverity.md) | | [optional] [default to null] +**Details** | [**[]VulnerabilityTypeDetail**](VulnerabilityTypeDetail.md) | All information about the package to specifically identify this vulnerability. One entry per (version range and cpe_uri) the package vulnerability has manifested in. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/AttestationAuthorityAttestationDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/AttestationAuthorityAttestationDetails.md new file mode 100644 index 00000000..3f0664d8 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/AttestationAuthorityAttestationDetails.md @@ -0,0 +1,10 @@ +# AttestationAuthorityAttestationDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**PgpSignedAttestation** | [**ApiPgpSignedAttestation**](apiPgpSignedAttestation.md) | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/BuildSignatureKeyType.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/BuildSignatureKeyType.md new file mode 100644 index 00000000..c422692d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/BuildSignatureKeyType.md @@ -0,0 +1,9 @@ +# BuildSignatureKeyType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeployableDeploymentDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeployableDeploymentDetails.md new file mode 100644 index 00000000..436474a7 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeployableDeploymentDetails.md @@ -0,0 +1,16 @@ +# DeployableDeploymentDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**UserEmail** | **string** | Identity of the user that triggered this deployment. | [optional] [default to null] +**DeployTime** | [**time.Time**](time.Time.md) | Beginning of the lifetime of this deployment. | [optional] [default to null] +**UndeployTime** | [**time.Time**](time.Time.md) | End of the lifetime of this deployment. | [optional] [default to null] +**Config** | **string** | Configuration used to create this deployment. | [optional] [default to null] +**Address** | **string** | Address of the runtime element hosting this deployment. | [optional] [default to null] +**ResourceUri** | **[]string** | Output only. Resource URI for the artifact being deployed taken from the deployable field with the same name. | [optional] [default to null] +**Platform** | [**DeploymentDetailsPlatform**](DeploymentDetailsPlatform.md) | Platform hosting this deployment. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeploymentDetailsPlatform.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeploymentDetailsPlatform.md new file mode 100644 index 00000000..90c00c1f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DeploymentDetailsPlatform.md @@ -0,0 +1,9 @@ +# DeploymentDetailsPlatform + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DiscoveryDiscoveredDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DiscoveryDiscoveredDetails.md new file mode 100644 index 00000000..5371f071 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DiscoveryDiscoveredDetails.md @@ -0,0 +1,10 @@ +# DiscoveryDiscoveredDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Operation** | [**LongrunningOperation**](longrunningOperation.md) | Output only. An operation that indicates the status of the current scan. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageBasis.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageBasis.md new file mode 100644 index 00000000..ed4c64fe --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageBasis.md @@ -0,0 +1,11 @@ +# DockerImageBasis + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ResourceUrl** | **string** | The resource_url for the resource representing the basis of associated occurrence images. | [optional] [default to null] +**Fingerprint** | [**DockerImageFingerprint**](DockerImageFingerprint.md) | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageDerivedDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageDerivedDetails.md new file mode 100644 index 00000000..1a7147eb --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageDerivedDetails.md @@ -0,0 +1,13 @@ +# DockerImageDerivedDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Fingerprint** | [**DockerImageFingerprint**](DockerImageFingerprint.md) | | [optional] [default to null] +**Distance** | **int64** | Output only. The number of layers by which this image differs from the associated image basis. | [optional] [default to null] +**LayerInfo** | [**[]DockerImageLayer**](DockerImageLayer.md) | This contains layer-specific metadata, if populated it has length \"distance\" and is ordered with [distance] being the layer immediately following the base image and [1] being the final layer. | [optional] [default to null] +**BaseResourceUrl** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageFingerprint.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageFingerprint.md new file mode 100644 index 00000000..ef3ee11f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageFingerprint.md @@ -0,0 +1,12 @@ +# DockerImageFingerprint + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**V1Name** | **string** | The layer-id of the final layer in the Docker image's v1 representation. This field can be used as a filter in list requests. | [optional] [default to null] +**V2Blob** | **[]string** | The ordered list of v2 blobs that represent a given image. | [optional] [default to null] +**V2Name** | **string** | Output only. The name of the image's v2 blobs computed via: [bottom] := v2_blob[bottom] [N] := sha256(v2_blob[N] + \" \" + v2_name[N+1]) Only the name of the final blob is kept. This field can be used as a filter in list requests. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageLayer.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageLayer.md new file mode 100644 index 00000000..2782ff85 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/DockerImageLayer.md @@ -0,0 +1,11 @@ +# DockerImageLayer + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Directive** | [**LayerDirective**](LayerDirective.md) | The recovered Dockerfile directive used to construct this layer. | [optional] [default to null] +**Arguments** | **string** | The recovered arguments to the Dockerfile directive. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasApi.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasApi.md new file mode 100644 index 00000000..9cab337f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasApi.md @@ -0,0 +1,245 @@ +# \GrafeasApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**CreateNote**](GrafeasApi.md#CreateNote) | **Post** /v1alpha1/{parent}/notes | Creates a new `Note`. +[**CreateOccurrence**](GrafeasApi.md#CreateOccurrence) | **Post** /v1alpha1/{parent}/occurrences | Creates a new `Occurrence`. Use this method to create `Occurrences` for a resource. +[**CreateOperation**](GrafeasApi.md#CreateOperation) | **Post** /v1alpha1/{parent}/operations | Creates a new `Operation`. +[**GetOccurrenceNote**](GrafeasApi.md#GetOccurrenceNote) | **Get** /v1alpha1/{name}/notes | Gets the `Note` attached to the given `Occurrence`. +[**ListNoteOccurrences**](GrafeasApi.md#ListNoteOccurrences) | **Get** /v1alpha1/{name}/occurrences | Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects. +[**ListNotes**](GrafeasApi.md#ListNotes) | **Get** /v1alpha1/{parent}/notes | Lists all `Notes` for a given project. +[**ListOccurrences**](GrafeasApi.md#ListOccurrences) | **Get** /v1alpha1/{parent}/occurrences | Lists active `Occurrences` for a given project matching the filters. +[**UpdateNote**](GrafeasApi.md#UpdateNote) | **Patch** /v1alpha1/{name} | Updates an existing `Note`. + + +# **CreateNote** +> ApiNote CreateNote($parent, $body) + +Creates a new `Note`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **string**| | + **body** | [**ApiNote**](ApiNote.md)| | + +### Return type + +[**ApiNote**](apiNote.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **CreateOccurrence** +> ApiOccurrence CreateOccurrence($parent, $body) + +Creates a new `Occurrence`. Use this method to create `Occurrences` for a resource. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **string**| | + **body** | [**ApiOccurrence**](ApiOccurrence.md)| | + +### Return type + +[**ApiOccurrence**](apiOccurrence.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **CreateOperation** +> LongrunningOperation CreateOperation($parent, $body) + +Creates a new `Operation`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **string**| | + **body** | [**ApiCreateOperationRequest**](ApiCreateOperationRequest.md)| | + +### Return type + +[**LongrunningOperation**](longrunningOperation.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **GetOccurrenceNote** +> ApiNote GetOccurrenceNote($name) + +Gets the `Note` attached to the given `Occurrence`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **string**| | + +### Return type + +[**ApiNote**](apiNote.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ListNoteOccurrences** +> ApiListNoteOccurrencesResponse ListNoteOccurrences($name, $filter, $pageSize, $pageToken) + +Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **string**| | + **filter** | **string**| The filter expression. | [optional] + **pageSize** | **int32**| Number of notes to return in the list. | [optional] + **pageToken** | **string**| Token to provide to skip to a particular spot in the list. | [optional] + +### Return type + +[**ApiListNoteOccurrencesResponse**](apiListNoteOccurrencesResponse.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ListNotes** +> ApiListNotesResponse ListNotes($parent, $filter, $pageSize, $pageToken) + +Lists all `Notes` for a given project. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **string**| | + **filter** | **string**| The filter expression. | [optional] + **pageSize** | **int32**| Number of notes to return in the list. | [optional] + **pageToken** | **string**| Token to provide to skip to a particular spot in the list. | [optional] + +### Return type + +[**ApiListNotesResponse**](apiListNotesResponse.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ListOccurrences** +> ApiListOccurrencesResponse ListOccurrences($parent, $filter, $pageSize, $pageToken) + +Lists active `Occurrences` for a given project matching the filters. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **string**| | + **filter** | **string**| The filter expression. | [optional] + **pageSize** | **int32**| Number of occurrences to return in the list. | [optional] + **pageToken** | **string**| Token to provide to skip to a particular spot in the list. | [optional] + +### Return type + +[**ApiListOccurrencesResponse**](apiListOccurrencesResponse.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **UpdateNote** +> ApiNote UpdateNote($name, $body) + +Updates an existing `Note`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **string**| | + **body** | [**ApiNote**](ApiNote.md)| | + +### Return type + +[**ApiNote**](apiNote.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasProjectsApi.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasProjectsApi.md new file mode 100644 index 00000000..6e95dfab --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/GrafeasProjectsApi.md @@ -0,0 +1,122 @@ +# \GrafeasProjectsApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**CreateProject**](GrafeasProjectsApi.md#CreateProject) | **Post** /v1alpha1/projects | Creates a new `Project`. +[**DeleteProject**](GrafeasProjectsApi.md#DeleteProject) | **Delete** /v1alpha1/{name} | Deletes the given `Project` from the system. +[**GetProject**](GrafeasProjectsApi.md#GetProject) | **Get** /v1alpha1/{name} | Returns the requested `Project`. +[**ListProjects**](GrafeasProjectsApi.md#ListProjects) | **Get** /v1alpha1/projects | Lists `Projects` + + +# **CreateProject** +> ProtobufEmpty CreateProject($body) + +Creates a new `Project`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**ApiProject**](ApiProject.md)| | + +### Return type + +[**ProtobufEmpty**](protobufEmpty.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **DeleteProject** +> ProtobufEmpty DeleteProject($name) + +Deletes the given `Project` from the system. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **string**| | + +### Return type + +[**ProtobufEmpty**](protobufEmpty.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **GetProject** +> ApiProject GetProject($name) + +Returns the requested `Project`. + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **string**| | + +### Return type + +[**ApiProject**](apiProject.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **ListProjects** +> ApiListProjectsResponse ListProjects($filter, $pageSize, $pageToken) + +Lists `Projects` + + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **filter** | **string**| The filter expression. | [optional] + **pageSize** | **int32**| Number of projects to return in the list. | [optional] + **pageToken** | **string**| Token to provide to skip to a particular spot in the list. | [optional] + +### Return type + +[**ApiListProjectsResponse**](apiListProjectsResponse.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/HashHashType.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/HashHashType.md new file mode 100644 index 00000000..5c1fdd81 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/HashHashType.md @@ -0,0 +1,9 @@ +# HashHashType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LayerDirective.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LayerDirective.md new file mode 100644 index 00000000..445dd912 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LayerDirective.md @@ -0,0 +1,9 @@ +# LayerDirective + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LongrunningOperation.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LongrunningOperation.md new file mode 100644 index 00000000..c03f40ff --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/LongrunningOperation.md @@ -0,0 +1,14 @@ +# LongrunningOperation + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should have the format of `operations/some/unique/name`. | [optional] [default to null] +**Metadata** | [**ProtobufAny**](protobufAny.md) | Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any. | [optional] [default to null] +**Done** | **bool** | If the value is `false`, it means the operation is still in progress. If true, the operation is completed, and either `error` or `response` is available. | [optional] [default to null] +**Error_** | [**RpcStatus**](rpcStatus.md) | The error result of the operation in case of failure or cancellation. | [optional] [default to null] +**Response** | [**ProtobufAny**](protobufAny.md) | The normal response of the operation in case of success. If the original method returns no data on success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/NoteRelatedUrl.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/NoteRelatedUrl.md new file mode 100644 index 00000000..5dfdba9f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/NoteRelatedUrl.md @@ -0,0 +1,11 @@ +# NoteRelatedUrl + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Url** | **string** | | [optional] [default to null] +**Label** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerArchitecture.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerArchitecture.md new file mode 100644 index 00000000..cf38124c --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerArchitecture.md @@ -0,0 +1,9 @@ +# PackageManagerArchitecture + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerDistribution.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerDistribution.md new file mode 100644 index 00000000..b8e3e0d1 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerDistribution.md @@ -0,0 +1,15 @@ +# PackageManagerDistribution + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CpeUri** | **string** | The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) denoting the package manager version distributing a package. | [optional] [default to null] +**Architecture** | [**PackageManagerArchitecture**](PackageManagerArchitecture.md) | | [optional] [default to null] +**LatestVersion** | [**VulnerabilityTypeVersion**](VulnerabilityTypeVersion.md) | The latest available version of this package in this distribution channel. | [optional] [default to null] +**Maintainer** | **string** | A freeform string denoting the maintainer of this package. | [optional] [default to null] +**Url** | **string** | The distribution channel-specific homepage for this package. | [optional] [default to null] +**Description** | **string** | The distribution channel-specific description of this package. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerInstallationDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerInstallationDetails.md new file mode 100644 index 00000000..a984b4a3 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerInstallationDetails.md @@ -0,0 +1,11 @@ +# PackageManagerInstallationDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | Output only. The name of the installed package. | [optional] [default to null] +**Location** | [**[]ApiPackageManagerLocation**](apiPackageManagerLocation.md) | All of the places within the filesystem versions of this package have been found. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerPackage.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerPackage.md new file mode 100644 index 00000000..b22d9009 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PackageManagerPackage.md @@ -0,0 +1,11 @@ +# PackageManagerPackage + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Name** | **string** | The name of the package. | [optional] [default to null] +**Distribution** | [**[]PackageManagerDistribution**](PackageManagerDistribution.md) | The various channels by which a package is distributed. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PgpSignedAttestationContentType.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PgpSignedAttestationContentType.md new file mode 100644 index 00000000..51ddd2ac --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/PgpSignedAttestationContentType.md @@ -0,0 +1,9 @@ +# PgpSignedAttestationContentType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufAny.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufAny.md new file mode 100644 index 00000000..f4ad733d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufAny.md @@ -0,0 +1,11 @@ +# ProtobufAny + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**TypeUrl** | **string** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] [default to null] +**Value** | **string** | Must be a valid serialized protocol buffer of the above specified type. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufEmpty.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufEmpty.md new file mode 100644 index 00000000..692f7346 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufEmpty.md @@ -0,0 +1,9 @@ +# ProtobufEmpty + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufFieldMask.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufFieldMask.md new file mode 100644 index 00000000..abf911d8 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/ProtobufFieldMask.md @@ -0,0 +1,10 @@ +# ProtobufFieldMask + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Paths** | **[]string** | The set of field mask paths. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/RpcStatus.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/RpcStatus.md new file mode 100644 index 00000000..52a18490 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/RpcStatus.md @@ -0,0 +1,12 @@ +# RpcStatus + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Code** | **int32** | The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. | [optional] [default to null] +**Message** | **string** | A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. | [optional] [default to null] +**Details** | [**[]ProtobufAny**](protobufAny.md) | A list of messages that carry the error details. There is a common set of message types for APIs to use. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VersionVersionKind.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VersionVersionKind.md new file mode 100644 index 00000000..6af21699 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VersionVersionKind.md @@ -0,0 +1,9 @@ +# VersionVersionKind + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeDetail.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeDetail.md new file mode 100644 index 00000000..daad17b9 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeDetail.md @@ -0,0 +1,17 @@ +# VulnerabilityTypeDetail + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CpeUri** | **string** | The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in which the vulnerability manifests. Examples include distro or storage location for vulnerable jar. This field can be used as a filter in list requests. | [optional] [default to null] +**Package_** | **string** | The name of the package where the vulnerability was found. This field can be used as a filter in list requests. | [optional] [default to null] +**MinAffectedVersion** | [**VulnerabilityTypeVersion**](VulnerabilityTypeVersion.md) | The min version of the package in which the vulnerability exists. | [optional] [default to null] +**MaxAffectedVersion** | [**VulnerabilityTypeVersion**](VulnerabilityTypeVersion.md) | The max version of the package in which the vulnerability exists. This field can be used as a filter in list requests. | [optional] [default to null] +**SeverityName** | **string** | The severity (eg: distro assigned severity) for this vulnerability. | [optional] [default to null] +**Description** | **string** | A vendor-specific description of this note. | [optional] [default to null] +**FixedLocation** | [**VulnerabilityTypeVulnerabilityLocation**](VulnerabilityTypeVulnerabilityLocation.md) | The fix for this specific package version. | [optional] [default to null] +**PackageType** | **string** | | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypePackageIssue.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypePackageIssue.md new file mode 100644 index 00000000..62a94f18 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypePackageIssue.md @@ -0,0 +1,12 @@ +# VulnerabilityTypePackageIssue + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**AffectedLocation** | [**VulnerabilityTypeVulnerabilityLocation**](VulnerabilityTypeVulnerabilityLocation.md) | The location of the vulnerability. | [optional] [default to null] +**FixedLocation** | [**VulnerabilityTypeVulnerabilityLocation**](VulnerabilityTypeVulnerabilityLocation.md) | The location of the available fix for vulnerability. | [optional] [default to null] +**SeverityName** | **string** | The severity (eg: distro assigned severity) for this vulnerability. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeSeverity.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeSeverity.md new file mode 100644 index 00000000..82f13c73 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeSeverity.md @@ -0,0 +1,9 @@ +# VulnerabilityTypeSeverity + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVersion.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVersion.md new file mode 100644 index 00000000..f6d8cf97 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVersion.md @@ -0,0 +1,13 @@ +# VulnerabilityTypeVersion + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Epoch** | **int32** | Used to correct mistakes in the version numbering scheme. | [optional] [default to null] +**Name** | **string** | The main part of the version name. | [optional] [default to null] +**Revision** | **string** | The iteration of the package build from the above version. | [optional] [default to null] +**Kind** | [**VersionVersionKind**](VersionVersionKind.md) | Distinguish between sentinel MIN/MAX versions and normal versions. If kind is not NORMAL, then the other fields are ignored. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityDetails.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityDetails.md new file mode 100644 index 00000000..9d610f37 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityDetails.md @@ -0,0 +1,13 @@ +# VulnerabilityTypeVulnerabilityDetails + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Type_** | **string** | | [optional] [default to null] +**Severity** | [**VulnerabilityTypeSeverity**](VulnerabilityTypeSeverity.md) | Output only. The note provider assigned Severity of the vulnerability. | [optional] [default to null] +**CvssScore** | **float32** | Output only. The CVSS score of this vulnerability. CVSS score is on a scale of 0-10 where 0 indicates low severity and 10 indicates high severity. | [optional] [default to null] +**PackageIssue** | [**[]VulnerabilityTypePackageIssue**](VulnerabilityTypePackageIssue.md) | The set of affected locations and their fixes (if available) within the associated resource. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityLocation.md b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityLocation.md new file mode 100644 index 00000000..33e7d5a3 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/docs/VulnerabilityTypeVulnerabilityLocation.md @@ -0,0 +1,12 @@ +# VulnerabilityTypeVulnerabilityLocation + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CpeUri** | **string** | The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) format. Examples include distro or storage location for vulnerable jar. This field can be used as a filter in list requests. | [optional] [default to null] +**Package_** | **string** | The package being described. | [optional] [default to null] +**Version** | [**VulnerabilityTypeVersion**](VulnerabilityTypeVersion.md) | The version of the package being described. This field can be used as a filter in list requests. | [optional] [default to null] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json b/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json deleted file mode 100644 index 0ae7bdf5..00000000 --- a/vendor/github.com/grafeas/grafeas/v1alpha1/grafeas.json +++ /dev/null @@ -1,2007 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "Grafeas API", - "description": "An API to insert and retrieve annotations on cloud artifacts.", - "version": "0.1", - "license": { - "name": "Apache 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0.html" - } - }, - "schemes": ["http", "https"], - "paths": { - "/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}": { - "get": { - "tags": ["grafeas"], - "operationId": "GetOccurrence", - "description": "Returns the requested occurrence", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrencesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Occurrence" - } - } - } - }, - "delete": { - "tags": ["grafeas"], - "operationId": "DeleteOccurrence", - "description": "Deletes the given occurrence from the system.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrencesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Empty" - } - } - } - }, - "put": { - "tags": ["grafeas"], - "operationId": "UpdateOccurrence", - "description": "Updates an existing occurrence.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the occurrence.\nShould be of the form \"projects\/{project_id}\/occurrences\/{occurrence_id}\".", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrencesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrence", - "description": "The updated occurrence.", - "in": "body", - "schema": { - "$ref": "#/definitions/Occurrence" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Occurrence" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/occurrences": { - "get": { - "tags": ["grafeas"], - "operationId": "ListOccurrences", - "description": "Lists active occurrences for a given project\/Digest.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. This contains the projectId for example: projects\/{project_id}", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "filter", - "description": "The filter expression.", - "in": "query", - "type": "string" - }, - { - "name": "pageSize", - "description": "Number of occurrences to return in the list.", - "in": "query", - "type": "integer", - "format": "int32" - }, - { - "name": "pageToken", - "description": "Token to provide to skip to a particular spot in the list.", - "in": "query", - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/ListOccurrencesResponse" - } - } - } - }, - "post": { - "tags": ["grafeas"], - "operationId": "CreateOccurrence", - "description": "Creates a new occurrence.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. This field contains the projectId for example: \"projects\/{project_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrence", - "description": "The occurrence to be inserted", - "in": "body", - "schema": { - "$ref": "#/definitions/Occurrence" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Occurrence" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}/notes": { - "get": { - "tags": ["grafeas"], - "operationId": "GetOccurrenceNote", - "description": "Gets the note that this occurrence is attached to.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "occurrencesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Note" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/notes/{notesId}": { - "get": { - "tags": ["grafeas"], - "operationId": "GetNote", - "description": "Returns the requested occurrence", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "notesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Note" - } - } - } - }, - "delete": { - "tags": ["grafeas"], - "operationId": "DeleteNote", - "description": "Deletes the given note from the system.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "notesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Empty" - } - } - } - }, - "put": { - "tags": ["grafeas"], - "operationId": "UpdateNote", - "description": "Updates an existing note.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the note.\nShould be of the form \"projects\/{project_id}\/notes\/{note_id}\".", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "notesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "note", - "description": "The updated note.", - "in": "body", - "schema": { - "$ref": "#/definitions/Note" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Note" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/notes": { - "get": { - "tags": ["grafeas"], - "operationId": "ListNotes", - "description": "Lists all notes for a given project.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "filter", - "description": "The filter expression.", - "in": "query", - "type": "string" - }, - { - "name": "pageSize", - "description": "Number of notes to return in the list.", - "in": "query", - "type": "integer", - "format": "int32" - }, - { - "name": "pageToken", - "description": "Token to provide to skip to a particular spot in the list.", - "in": "query", - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/ListNotesResponse" - } - } - } - }, - "post": { - "tags": ["grafeas"], - "operationId": "CreateNote", - "description": "Creates a new note.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "noteId", - "description": "The ID to use for this note.", - "in": "query", - "type": "string" - }, - { - "name": "note", - "description": "The Note to be inserted", - "in": "body", - "schema": { - "$ref": "#/definitions/Note" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Note" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/notes/{notesId}/occurrences": { - "get": { - "tags": ["grafeas"], - "operationId": "ListNoteOccurrences", - "description": "Lists the names of Occurrences linked to a particular Note.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name field will contain the note name for example:\n \"project\/{project_id}\/notes\/{note_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "notesId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "filter", - "description": "The filter expression.", - "in": "query", - "type": "string" - }, - { - "name": "pageSize", - "description": "Number of notes to return in the list.", - "in": "query", - "type": "integer", - "format": "int32" - }, - { - "name": "pageToken", - "description": "Token to provide to skip to a particular spot in the list.", - "in": "query", - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/ListNoteOccurrencesResponse" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/operations": { - "post": { - "tags": ["grafeas"], - "operationId": "CreateOperation", - "description": "Creates a new operation", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. The projectId that this operation should be created under.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "operationId", - "description": "The ID to use for this operation. If empty a random string will be used.", - "in": "query", - "type": "string" - }, - { - "name": "body", - "description": "The request body.", - "in": "body", - "schema": { - "$ref": "#/definitions/CreateOperationRequest" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Operation" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/operations": { - "get": { - "tags": [ - "grafeas" - ], - "operationId": "ListOperations", - "description": "Lists all operations for a given project.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `parent`. This field contains the projectId for example:\n\"project\/{project_id}", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "filter", - "description": "The filter expression.", - "in": "query", - "type": "string" - }, - { - "name": "pageSize", - "description": "Number of operations to return in the list.", - "in": "query", - "type": "integer", - "format": "int32" - }, - { - "name": "pageToken", - "description": "Token to provide to skip to a particular spot in the list.", - "in": "query", - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/ListOperationsResponse" - } - } - } - } - }, - "/v1alpha1/projects/{projectsId}/operations/{operationsId}": { - "get": { - "tags": ["grafeas"], - "operationId": "GetOperation", - "description": "Returns the requested occurrence", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the operation in the form\n\"projects\/{project_id}\/operations\/{operation_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "operationsId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Operation" - } - } - } - }, - "put": { - "tags": ["grafeas"], - "operationId": "UpdateOperation", - "description": "Updates an existing operation returns an error if operation\n does not exist. The only valid operations are to update mark the done bit\nchange the result.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the Operation.\nShould be of the form \"projects\/{project_id}\/operations\/{operation_id}\".", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "operationsId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "body", - "description": "The request body.", - "in": "body", - "schema": { - "$ref": "#/definitions/UpdateOperationRequest" - } - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Operation" - } - } - } - } - }, - "delete": { - "tags": ["grafeas"], - "operationId": "DeleteOperation", - "description": "Deletes the given operation from the system.", - "parameters": [ - { - "name": "projectsId", - "description": "Part of `name`. The name of the note in the form\n\"projects\/{project_id}\/operations\/{operation_id}\"", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "operationsId", - "description": "Part of `name`. See documentation of `projectsId`.", - "in": "path", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "Successful operation", - "schema": { - "$ref": "#/definitions/Empty" - } - } - } - } - }, - "definitions": { - "Occurrence": { - "id": "Occurrence", - "description": "Occurrence includes information about analysis occurrences for an image.\n``", - "type": "object", - "properties": { - "name": { - "description": "The name of the occurrence in the form\n\"projects\/{project_id}\/occurrences\/{occurrence_id}\"\n@OutputOnly", - "type": "string" - }, - "resourceUrl": { - "description": "The unique url of the image or container for which the occurrence applies.\nExample: https:\/\/gcr.io\/project\/image@sha256:foo\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "noteName": { - "description": "An analysis note associated with this image, in the form\n\"projects\/{project_id}\/notes\/{note_id}\"\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "kind": { - "description": "This explicitly denotes which of the occurrence details is specified.\nThis field can be used as a filter in list requests.\n@OutputOnly", - "type": "string", - "enum": [ - "CUSTOM", - "PACKAGE_VULNERABILITY", - "BUILD_DETAILS", - "IMAGE_BASIS", - "PACKAGE_MANAGER", - "DEPLOYABLE", - "DISCOVERY" - ] - }, - "customDetails": { - "description": "Details of the custom note.", - "$ref": "#/definitions/CustomDetails" - }, - "vulnerabilityDetails": { - "description": "Details of a security vulnerability note.", - "$ref": "#/definitions/VulnerabilityDetails" - }, - "buildDetails": { - "description": "Build details for a verifiable build.", - "$ref": "#/definitions/BuildDetails" - }, - "derivedImage": { - "description": "Describes how this resource derives from the basis\nin the associated note.", - "$ref": "#/definitions/Derived" - }, - "installation": { - "description": "Describes the installation of a package on the linked resource.", - "$ref": "#/definitions/Installation" - }, - "deployment": { - "description": "Describes the deployment of an artifact on a runtime.", - "$ref": "#/definitions/Deployment" - }, - "discovered": { - "description": "Describes the initial scan status for this resource.", - "$ref": "#/definitions/Discovered" - }, - "attestation": { - "description": "Describes an attestation of an artifact.", - "$ref": "#/definitions/Attestation" - }, - "remediation": { - "description": "A description of actions that can be taken to remedy the note", - "type": "string" - }, - "createTime": { - "description": "The time this occurrence was created.\n@OutputOnly", - "type": "string", - "format": "google-datetime" - }, - "updateTime": { - "description": "The time this occurrence was last updated.\n@OutputOnly", - "type": "string", - "format": "google-datetime" - }, - "operationName": { - "description": "The name of the operation that created this note.", - "type": "string" - } - } - }, - "CustomDetails": { - "id": "CustomDetails", - "description": "Details of the custom note type", - "type": "object", - "properties": { - "description": { - "description": "A description of location of a custom note.", - "type": "string" - } - } - }, - "VulnerabilityDetails": { - "id": "VulnerabilityDetails", - "description": "Used by Occurrence to point to where the vulnerability exists and how\nto fix it.", - "type": "object", - "properties": { - "affectedLocation": { - "description": "The location of the vulnerability.", - "$ref": "#/definitions/VulnerabilityLocation" - }, - "fixedLocation": { - "description": "The location of the available fix for vulnerability.", - "$ref": "#/definitions/VulnerabilityLocation" - }, - "type": { - "description": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)", - "type": "string" - }, - "severity": { - "description": "The note provider assigned Severity of the vulnerability.\n@OutputOnly", - "type": "string", - "enum": [ - "UNKNOWN", - "MINIMAL", - "LOW", - "MEDIUM", - "HIGH", - "CRITICAL" - ] - }, - "cvssScore": { - "description": "The CVSS score of this vulnerability. CVSS score is on a scale of 0-10\nwhere 0 indicates low severity and 10 indicates high severity.\n@OutputOnly", - "type": "number", - "format": "float" - }, - "packageIssue": { - "description": "The set of affected locations and their fixes (if available) within\nthe associated resource.", - "type": "array", - "items": { - "$ref": "#/definitions/PackageIssue" - } - } - } - }, - "VulnerabilityLocation": { - "id": "VulnerabilityLocation", - "description": "The location of the vulnerability", - "type": "object", - "properties": { - "cpeUri": { - "description": "The cpe_uri in [cpe format] (https:\/\/cpe.mitre.org\/specification\/)\nformat. Examples include distro or storage location for vulnerable jar.\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "package": { - "description": "The package being described.", - "type": "string" - }, - "version": { - "description": "The version of the package being described.\nThis field can be used as a filter in list requests.", - "$ref": "#/definitions/Version" - } - } - }, - "Version": { - "id": "Version", - "description": "Version contains structured information about the version of the package.\nFor a discussion of this in Debian\/Ubuntu:\nhttp:\/\/serverfault.com\/questions\/604541\/debian-packages-version-convention\nFor a discussion of this in Redhat\/Fedora\/Centos:\nhttp:\/\/blog.jasonantman.com\/2014\/07\/how-yum-and-rpm-compare-versions\/", - "type": "object", - "properties": { - "epoch": { - "description": "Used to correct mistakes in the version numbering scheme.", - "type": "integer", - "format": "int32" - }, - "name": { - "description": "The main part of the version name.", - "type": "string" - }, - "revision": { - "description": "The iteration of the package build from the above version.", - "type": "string" - }, - "kind": { - "description": "Distinguish between sentinel MIN\/MAX versions and normal versions.\nIf kind is not NORMAL, then the other fields are ignored.", - "type": "string", - "enum": [ - "NORMAL", - "MINIMUM", - "MAXIMUM" - ] - } - } - }, - "PackageIssue": { - "id": "PackageIssue", - "description": "This message wraps a location affected by a vulnerability and its\nassociated fix (if one is available).", - "type": "object", - "properties": { - "affectedLocation": { - "description": "The location of the vulnerability.", - "$ref": "#/definitions/VulnerabilityLocation" - }, - "fixedLocation": { - "description": "The location of the available fix for vulnerability.", - "$ref": "#/definitions/VulnerabilityLocation" - }, - "severityName": { - "description": "The severity (eg: distro assigned severity) for this vulnerability.", - "type": "string" - } - } - }, - "BuildDetails": { - "id": "BuildDetails", - "description": "Message encapsulating build provenance details", - "type": "object", - "properties": { - "provenance": { - "description": "The actual provenance", - "$ref": "#/definitions/BuildProvenance" - }, - "provenanceBytes": { - "description": "Serialized json representation of the provenance, used in generating the\nBuildSignature in the corresponding Result. After verifying the signature,\nprovenance_bytes can be unmarshalled and compared to the provenance to\nconfirm that it is unchanged. A base64-encoded string representation of the\nprovenance bytes is used for the signature in order to interoperate with\nopenssl which expects this format for signature verification.\n\nThe serialized form is captured both to avoid ambiguity in how the\nprovenance is marshalled to json as well to prevent incompatibilities with\nfuture changes.", - "type": "string" - } - } - }, - "BuildProvenance": { - "id": "BuildProvenance", - "description": "Provenance of a build. Contains all information needed to verify the full\ndetails about the build from source to completion.", - "type": "object", - "properties": { - "id": { - "description": "Unique identifier of the build.", - "type": "string" - }, - "projectId": { - "description": "ID of the project.", - "type": "string" - }, - "projectNum": { - "description": "Numerical ID of the project.", - "type": "string", - "format": "int64" - }, - "commands": { - "description": "Commands requested by the build.", - "type": "array", - "items": { - "$ref": "#/definitions/Command" - } - }, - "builtArtifacts": { - "description": "Output of the build.", - "type": "array", - "items": { - "$ref": "#/definitions/Artifact" - } - }, - "createTime": { - "description": "Time at which the build was created.", - "type": "string", - "format": "google-datetime" - }, - "startTime": { - "description": "Time at which execution of the build was started.", - "type": "string", - "format": "google-datetime" - }, - "finishTime": { - "description": "Time at which execution of the build was finished.", - "type": "string", - "format": "google-datetime" - }, - "userId": { - "description": "GAIA ID of end user who initiated this build; at the time that the\nBuildProvenance is uploaded to Analysis, this will be resolved to the\nprimary e-mail address of the user and stored in the Creator field.", - "type": "string", - "format": "int64" - }, - "creator": { - "description": "E-mail address of the user who initiated this build. Note that this was the\nuser's e-mail address at the time the build was initiated; this address may\nnot represent the same end-user for all time.", - "type": "string" - }, - "logsBucket": { - "description": "Google Cloud Storage bucket where logs were written.", - "type": "string" - }, - "sourceProvenance": { - "description": "Details of the Source input to the build.", - "$ref": "#/definitions/Source" - }, - "triggerId": { - "description": "Trigger identifier if the build was triggered automatically; empty if not.", - "type": "string" - }, - "buildOptions": { - "description": "Special options applied to this build. This is a catch-all field where\nbuild providers can enter any desired additional details.", - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "builderVersion": { - "description": "Version string of the builder at the time this build was executed.", - "type": "string" - } - } - }, - "Command": { - "id": "Command", - "description": "Command describes a step performed as part of the build pipeline.", - "type": "object", - "properties": { - "name": { - "description": "Name of the command, as presented on the command line, or if the command is\npackaged as a Docker container, as presented to `docker pull`.", - "type": "string" - }, - "env": { - "description": "Environment variables set before running this Command.", - "type": "array", - "items": { - "type": "string" - } - }, - "args": { - "description": "Command-line arguments used when executing this Command.", - "type": "array", - "items": { - "type": "string" - } - }, - "dir": { - "description": "Working directory (relative to project source root) used when running\nthis Command.", - "type": "string" - }, - "id": { - "description": "Optional unique identifier for this Command, used in wait_for to reference\nthis Command as a dependency.", - "type": "string" - }, - "waitFor": { - "description": "The ID(s) of the Command(s) that this Command depends on.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "Artifact": { - "id": "Artifact", - "description": "Artifact describes a build product.", - "type": "object", - "properties": { - "checksum": { - "description": "Hash or checksum value of a binary, or Docker Registry 2.0 digest of a\ncontainer.", - "type": "string" - }, - "id": { - "description": "Artifact ID, if any; for container images, this will be a URL by digest\nlike gcr.io\/projectID\/imagename@sha256:123456", - "type": "string" - }, - "names": { - "description": "Related artifact names. This may be the path to a binary or jar file, or in\nthe case of a container build, the name used to push the container image to\nGoogle Container Registry, as presented to `docker push`. Note that a\nsingle Artifact ID can have multiple names, for example if two tags are\napplied to one image.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "Source": { - "id": "Source", - "description": "Source describes the location of the source used for the build.", - "type": "object", - "properties": { - "storageSource": { - "description": "If provided, get the source from this location in in Google Cloud\nStorage.", - "$ref": "#/definitions/StorageSource" - }, - "repoSource": { - "description": "If provided, get source from this location in a Cloud Repo.", - "$ref": "#/definitions/RepoSource" - }, - "artifactStorageSource": { - "description": "If provided, the input binary artifacts for the build came from this\nlocation.", - "$ref": "#/definitions/StorageSource" - }, - "sourceContext": { - "description": "If provided, the source code used for the build came from this location.", - "$ref": "#/definitions/ExtendedSourceContext" - }, - "additionalSourceContexts": { - "description": "If provided, some of the source code used for the build may be found in\nthese locations, in the case where the source repository had multiple\nremotes or submodules. This list will not include the context specified in\nthe source_context field.", - "type": "array", - "items": { - "$ref": "#/definitions/ExtendedSourceContext" - } - }, - "fileHashes": { - "description": "Hash(es) of the build source, which can be used to verify that the original\nsource integrity was maintained in the build.\n\nThe keys to this map are file paths used as build source and the values\ncontain the hash values for those files.\n\nIf the build source came in a single package such as a gzipped tarfile\n(.tar.gz), the FileHash will be for the single path to that file.", - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/FileHashes" - } - } - } - }, - "StorageSource": { - "id": "StorageSource", - "description": "StorageSource describes the location of the source in an archive file in\nGoogle Cloud Storage.", - "type": "object", - "properties": { - "bucket": { - "description": "Google Cloud Storage bucket containing source (see [Bucket Name\nRequirements]\n(https:\/\/cloud.google.com\/storage\/docs\/bucket-naming#requirements)).", - "type": "string" - }, - "object": { - "description": "Google Cloud Storage object containing source.", - "type": "string" - }, - "generation": { - "description": "Google Cloud Storage generation for the object.", - "type": "string", - "format": "int64" - } - } - }, - "RepoSource": { - "id": "RepoSource", - "description": "RepoSource describes the location of the source in a Google Cloud Source\nRepository.", - "type": "object", - "properties": { - "projectId": { - "description": "ID of the project that owns the repo.", - "type": "string" - }, - "repoName": { - "description": "Name of the repo.", - "type": "string" - }, - "branchName": { - "description": "Name of the branch to build.", - "type": "string" - }, - "tagName": { - "description": "Name of the tag to build.", - "type": "string" - }, - "commitSha": { - "description": "Explicit commit SHA to build.", - "type": "string" - } - } - }, - "ExtendedSourceContext": { - "id": "ExtendedSourceContext", - "description": "An ExtendedSourceContext is a SourceContext combined with additional\ndetails describing the context.", - "type": "object", - "properties": { - "context": { - "description": "Any source context.", - "$ref": "#/definitions/SourceContext" - }, - "labels": { - "description": "Labels with user defined metadata.", - "type": "object", - "additionalProperties": { - "type": "string" - } - } - } - }, - "SourceContext": { - "id": "SourceContext", - "description": "A SourceContext is a reference to a tree of files. A SourceContext together\nwith a path point to a unique revision of a single file or directory.", - "type": "object", - "properties": { - "cloudRepo": { - "description": "A SourceContext referring to a revision in a cloud repo.", - "$ref": "#/definitions/CloudRepoSourceContext" - }, - "cloudWorkspace": { - "description": "A SourceContext referring to a snapshot in a cloud workspace.", - "$ref": "#/definitions/CloudWorkspaceSourceContext" - }, - "gerrit": { - "description": "A SourceContext referring to a Gerrit project.", - "$ref": "#/definitions/GerritSourceContext" - }, - "git": { - "description": "A SourceContext referring to any third party Git repo (e.g. GitHub).", - "$ref": "#/definitions/GitSourceContext" - } - } - }, - "CloudRepoSourceContext": { - "id": "CloudRepoSourceContext", - "description": "A CloudRepoSourceContext denotes a particular revision in a cloud\nrepo (a repo hosted by the Google Cloud Platform).", - "type": "object", - "properties": { - "repoId": { - "description": "The ID of the repo.", - "$ref": "#/definitions/RepoId" - }, - "revisionId": { - "description": "A revision ID.", - "type": "string" - }, - "aliasName": { - "description": "The name of an alias (branch, tag, etc.).", - "type": "string" - }, - "aliasContext": { - "description": "An alias, which may be a branch or tag.", - "$ref": "#/definitions/AliasContext" - } - } - }, - "RepoId": { - "id": "RepoId", - "description": "A unique identifier for a cloud repo.", - "type": "object", - "properties": { - "projectRepoId": { - "description": "A combination of a project ID and a repo name.", - "$ref": "#/definitions/ProjectRepoId" - }, - "uid": { - "description": "A server-assigned, globally unique identifier.", - "type": "string" - } - } - }, - "ProjectRepoId": { - "id": "ProjectRepoId", - "description": "Selects a repo using a Google Cloud Platform project ID\n(e.g. winged-cargo-31) and a repo name within that project.", - "type": "object", - "properties": { - "projectId": { - "description": "The ID of the project.", - "type": "string" - }, - "repoName": { - "description": "The name of the repo. Leave empty for the default repo.", - "type": "string" - } - } - }, - "AliasContext": { - "id": "AliasContext", - "description": "An alias to a repo revision.", - "type": "object", - "properties": { - "kind": { - "description": "The alias kind.", - "type": "string", - "enum": [ - "ANY", - "FIXED", - "MOVABLE", - "OTHER" - ] - }, - "name": { - "description": "The alias name.", - "type": "string" - } - } - }, - "CloudWorkspaceSourceContext": { - "id": "CloudWorkspaceSourceContext", - "description": "A CloudWorkspaceSourceContext denotes a workspace at a particular snapshot.", - "type": "object", - "properties": { - "workspaceId": { - "description": "The ID of the workspace.", - "$ref": "#/definitions/CloudWorkspaceId" - }, - "snapshotId": { - "description": "The ID of the snapshot.\nAn empty snapshot_id refers to the most recent snapshot.", - "type": "string" - } - } - }, - "CloudWorkspaceId": { - "id": "CloudWorkspaceId", - "description": "A CloudWorkspaceId is a unique identifier for a cloud workspace.\nA cloud workspace is a place associated with a repo where modified files\ncan be stored before they are committed.", - "type": "object", - "properties": { - "repoId": { - "description": "The ID of the repo containing the workspace.", - "$ref": "#/definitions/RepoId" - }, - "name": { - "description": "The unique name of the workspace within the repo. This is the name\nchosen by the client in the Source API's CreateWorkspace method.", - "type": "string" - } - } - }, - "GerritSourceContext": { - "id": "GerritSourceContext", - "description": "A SourceContext referring to a Gerrit project.", - "type": "object", - "properties": { - "hostUri": { - "description": "The URI of a running Gerrit instance.", - "type": "string" - }, - "gerritProject": { - "description": "The full project name within the host. Projects may be nested, so\n\"project\/subproject\" is a valid project name.\nThe \"repo name\" is hostURI\/project.", - "type": "string" - }, - "revisionId": { - "description": "A revision (commit) ID.", - "type": "string" - }, - "aliasName": { - "description": "The name of an alias (branch, tag, etc.).", - "type": "string" - }, - "aliasContext": { - "description": "An alias, which may be a branch or tag.", - "$ref": "#/definitions/AliasContext" - } - } - }, - "GitSourceContext": { - "id": "GitSourceContext", - "description": "A GitSourceContext denotes a particular revision in a third party Git\nrepository (e.g. GitHub).", - "type": "object", - "properties": { - "url": { - "description": "Git repository URL.", - "type": "string" - }, - "revisionId": { - "description": "Git commit hash.\nrequired.", - "type": "string" - } - } - }, - "FileHashes": { - "id": "FileHashes", - "description": "Container message for hashes of byte content of files, used in Source\nmessages to verify integrity of source input to the build.", - "type": "object", - "properties": { - "fileHash": { - "description": "Collection of file hashes.", - "type": "array", - "items": { - "$ref": "#/definitions/Hash" - } - } - } - }, - "Hash": { - "id": "Hash", - "description": "Container message for hash values.", - "type": "object", - "properties": { - "type": { - "description": "The type of hash that was performed.", - "type": "string", - "enum": [ - "NONE", - "SHA256", - "MD5" - ] - }, - "value": { - "description": "The hash value.", - "type": "string", - "format": "byte" - } - } - }, - "Derived": { - "id": "Derived", - "description": "Derived describes the derived image portion (Occurrence) of the\nDockerImage relationship. This image would be produced from a Dockerfile\nwith FROM .", - "type": "object", - "properties": { - "fingerprint": { - "description": "The fingerprint of the derived image", - "$ref": "#/definitions/Fingerprint" - }, - "distance": { - "description": "The number of layers by which this image differs from\nthe associated image basis.\n@OutputOnly", - "type": "integer", - "format": "uint32" - }, - "layerInfo": { - "description": "This contains layer-specific metadata, if populated it\nhas length \u201Cdistance\u201D and is ordered with [distance] being the\nlayer immediately following the base image and [1]\nbeing the final layer.", - "type": "array", - "items": { - "$ref": "#/definitions/Layer" - } - }, - "baseResourceUrl": { - "description": "This contains the base image url for the derived image Occurrence\n@OutputOnly", - "type": "string" - } - } - }, - "Fingerprint": { - "id": "Fingerprint", - "description": "A set of properties that uniquely identify a given Docker image.", - "type": "object", - "properties": { - "v1Name": { - "description": "The layer-id of the final layer in the Docker image\u2019s v1\nrepresentation.\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "v2Blob": { - "description": "The ordered list of v2 blobs that represent a given image.", - "type": "array", - "items": { - "type": "string" - } - }, - "v2Name": { - "description": "The name of the image\u2019s v2 blobs computed via:\n [bottom] := v2_blobbottom := sha256(v2_blob[N] + \u201C \u201D + v2_name[N+1])\nOnly the name of the final blob is kept.\nThis field can be used as a filter in list requests.\n@OutputOnly", - "type": "string" - } - } - }, - "Layer": { - "id": "Layer", - "description": "Layer holds metadata specific to a layer of a Docker image.", - "type": "object", - "properties": { - "directive": { - "description": "The recovered Dockerfile directive used to construct this layer.", - "type": "string", - "enum": [ - "UNKNOWN_DIRECTIVE", - "MAINTAINER", - "RUN", - "CMD", - "LABEL", - "EXPOSE", - "ENV", - "ADD", - "COPY", - "ENTRYPOINT", - "VOLUME", - "USER", - "WORKDIR", - "ARG", - "ONBUILD", - "STOPSIGNAL", - "HEALTHCHECK", - "SHELL" - ] - }, - "arguments": { - "description": "The recovered arguments to the Dockerfile directive.", - "type": "string" - } - } - }, - "Installation": { - "id": "Installation", - "description": "This represents how a particular software package may be installed on\na system.", - "type": "object", - "properties": { - "name": { - "description": "The name of the installed package.\n@OutputOnly", - "type": "string" - }, - "location": { - "description": "All of the places within the filesystem versions of this package\nhave been found.", - "type": "array", - "items": { - "$ref": "#/definitions/Location" - } - } - } - }, - "Location": { - "id": "Location", - "description": "An occurrence of a particular package installation found within a\nsystem's filesystem.\ne.g. glibc was found in \/var\/lib\/dpkg\/status", - "type": "object", - "properties": { - "cpeUri": { - "description": "The cpe_uri in [cpe format](https:\/\/cpe.mitre.org\/specification\/)\ndenoting the package manager version distributing a package.", - "type": "string" - }, - "version": { - "description": "The version installed at this location.", - "$ref": "#/definitions/Version" - }, - "path": { - "description": "The path from which we gathered that this package\/version is installed.", - "type": "string" - } - } - }, - "Deployment": { - "id": "Deployment", - "description": "The period during which some deployable was active in a runtime.", - "type": "object", - "properties": { - "userEmail": { - "description": "Identity of the user that triggered this deployment.", - "type": "string" - }, - "deployTime": { - "description": "Beginning of the lifetime of this deployment.", - "type": "string", - "format": "google-datetime" - }, - "undeployTime": { - "description": "End of the lifetime of this deployment.", - "type": "string", - "format": "google-datetime" - }, - "config": { - "description": "Configuration used to create this deployment.", - "type": "object", - "additionalProperties": { - "type": "string", - "description": "Properties of the object. Contains field @type with type URL." - } - }, - "address": { - "description": "Address of the runtime element hosting this deployment.", - "type": "string" - } - } - }, - "Discovered": { - "id": "Discovered", - "description": "Provides information about the scan status of a discovered resource.", - "type": "object", - "properties": { - "operation": { - "description": "An operation that indicates the status of the current scan.\n@OutputOnly", - "$ref": "#/definitions/Operation" - } - } - }, - "Operation": { - "id": "Operation", - "description": "This resource represents a long-running operation that is the result of a\nnetwork API call.", - "type": "object", - "properties": { - "name": { - "description": "The server-assigned name, which is only unique within the same service that\noriginally returns it. If you use the default HTTP mapping, the\n`name` should have the format of `operations\/some\/unique\/name`.", - "type": "string" - }, - "metadata": { - "description": "Service-specific metadata associated with the operation. It typically\ncontains progress information and common metadata such as create time.\nSome services might not provide such metadata. Any method that returns a\nlong-running operation should document the metadata type, if any.", - "type": "object", - "additionalProperties": { - "type": "string", - "description": "Properties of the object. Contains field @type with type URL." - } - }, - "done": { - "description": "If the value is `false`, it means the operation is still in progress.\nIf true, the operation is completed, and either `error` or `response` is\navailable.", - "type": "boolean" - }, - "error": { - "description": "The error result of the operation in case of failure or cancellation.", - "$ref": "#/definitions/Status" - }, - "response": { - "description": "The normal response of the operation in case of success. If the original\nmethod returns no data on success, such as `Delete`, the response is\n`google.protobuf.Empty`. If the original method is standard\n`Get`\/`Create`\/`Update`, the response should be the resource. For other\nmethods, the response should have the type `XxxResponse`, where `Xxx`\nis the original method name. For example, if the original method name\nis `TakeSnapshot()`, the inferred response type is\n`TakeSnapshotResponse`.", - "type": "object", - "additionalProperties": { - "type": "string", - "description": "Properties of the object. Contains field @type with type URL." - } - } - } - }, - "Status": { - "id": "Status", - "description": "The `Status` type defines a logical error model that is suitable for different\nprogramming environments, including REST APIs and RPC APIs. It is used by\n[gRPC](https:\/\/github.com\/grpc). The error model is designed to be:\n\n- Simple to use and understand for most users\n- Flexible enough to meet unexpected needs\n\n# Overview\n\nThe `Status` message contains three pieces of data: error code, error message,\nand error details. The error code should be an enum value of\ngoogle.rpc.Code, but it may accept additional error codes if needed. The\nerror message should be a developer-facing English message that helps\ndevelopers *understand* and *resolve* the error. If a localized user-facing\nerror message is needed, put the localized message in the error details or\nlocalize it in the client. The optional error details may contain arbitrary\ninformation about the error. There is a predefined set of error detail types\nin the package `google.rpc` that can be used for common error conditions.\n\n# Language mapping\n\nThe `Status` message is the logical representation of the error model, but it\nis not necessarily the actual wire format. When the `Status` message is\nexposed in different client libraries and different wire protocols, it can be\nmapped differently. For example, it will likely be mapped to some exceptions\nin Java, but more likely mapped to some error codes in C.\n\n# Other uses\n\nThe error model and the `Status` message can be used in a variety of\nenvironments, either with or without APIs, to provide a\nconsistent developer experience across different environments.\n\nExample uses of this error model include:\n\n- Partial errors. If a service needs to return partial errors to the client,\n it may embed the `Status` in the normal response to indicate the partial\n errors.\n\n- Workflow errors. A typical workflow has multiple steps. Each step may\n have a `Status` message for error reporting.\n\n- Batch operations. If a client uses batch request and batch response, the\n `Status` message should be used directly inside batch response, one for\n each error sub-response.\n\n- Asynchronous operations. If an API call embeds asynchronous operation\n results in its response, the status of those operations should be\n represented directly using the `Status` message.\n\n- Logging. If some API errors are stored in logs, the message `Status` could\n be used directly after any stripping needed for security\/privacy reasons.", - "type": "object", - "properties": { - "code": { - "description": "The status code, which should be an enum value of google.rpc.Code.", - "type": "integer", - "format": "int32" - }, - "message": { - "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\ngoogle.rpc.Status.details field, or localized by the client.", - "type": "string" - }, - "details": { - "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use.", - "type": "array", - "items": { - "type": "object", - "additionalProperties": { - "type": "string", - "description": "Properties of the object. Contains field @type with type URL." - } - } - } - } - }, - "Attestation": { - "id": "Attestation", - "description": "Occurrence that represents a single \"attestation\". The authenticity of an\nAttestation can be verified using the attached signature. If the verifier\ntrusts the public key of the signer, then verifying the signature is\nsufficient to establish trust. In this circumstance, the\nAttestationAuthority to which this Attestation is attached is primarily\nuseful for look-up (how to find this Attestation if you already know the\nAuthority and artifact to be verified) and intent (which authority was this\nattestation intended to sign for).", - "type": "object", - "properties": { - "pgpSignedAttestation": { - - "$ref": "#/definitions/PgpSignedAttestation" - } - } - }, - "PgpSignedAttestation": { - "id": "PgpSignedAttestation", - "description": "An attestation wrapper with a PGP-compatible signature.\nThis message only supports ATTACHED signatures, where the payload that is\nsigned is included alongside the signature itself in the same file.", - "type": "object", - "properties": { - "signature": { - "description": "The raw content of the signature, as output by gpg or equivalent. Since\nthis message only supports attached signatures, the payload that was signed\nmust be attached. While the signature format supported is dependent on the\nverification implementation, currently only ASCII-armored (`--armor` to\ngpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are\nsupported.\nConcretely, `gpg --sign --armor --output=signature.gpg payload.json` will\ncreate the signature content expected in this field in `signature.gpg` for\nthe `payload.json` attestation payload.", - "type": "string" - }, - "contentType": { - "description": "Type (e.g. schema) of the attestation payload that was signed.\nThe verifier must ensure that the provided type is one that the verifier\nsupports, and that the attestation payload is a valid instantiation of that\ntype (e.g. by validating a JSON schema).", - "type": "string", - "enum": [ - "UNSET", - "SIMPLE_SIGNING_JSON" - ] - }, - "pgpKeyId": { - "description": "The ID of the key, as output by `gpg --list-keys`. This should be 8\nhexadecimal digits, capitalized. e.g.\n$ gpg --list-keys pub\n2048R\/A663AEEA 2017-08-01 ui Fake Name\n\nIn the above example, the `key_id` is \"A663AEEA\".\nNote that in practice this ID is the last 64 bits of the key fingerprint.", - "type": "string" - } - } - }, - "ListOccurrencesResponse": { - "id": "ListOccurrencesResponse", - "description": "Response including listed active occurrences.", - "type": "object", - "properties": { - "occurrences": { - "description": "The occurrences requested.", - "type": "array", - "items": { - "$ref": "#/definitions/Occurrence" - } - }, - "nextPageToken": { - "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more results.", - "type": "string" - } - } - }, - "ListOperationsResponse": { - "id": "ListOperationsResponse", - "description": "Response including listed operations.", - "type": "object", - "properties": { - "operations": { - "description": "The operations requested.", - "type": "array", - "items": { - "$ref": "#/definitions/Operation" - } - }, - "nextPageToken": { - "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more results.", - "type": "string" - } - } - }, - "Empty": { - "id": "Empty", - "description": "A generic empty message that you can re-use to avoid defining duplicated\nempty messages in your APIs. A typical example is to use it as the request\nor the response type of an API method. For instance:\n\n service Foo {\n rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);\n }\n\nThe JSON representation for `Empty` is empty JSON object `{}`.", - "type": "object", - "properties": { - } - }, - "Note": { - "id": "Note", - "description": "Note provides a detailed description of a note using information\nfrom the provider of the note.", - "type": "object", - "properties": { - "name": { - "description": "The name of the note in the form\n\"projects\/{project_id}\/notes\/{note_id}\"", - "type": "string" - }, - "shortDescription": { - "description": "A one sentence description of this note", - "type": "string" - }, - "longDescription": { - "description": "A detailed description of this note", - "type": "string" - }, - "kind": { - "description": "This explicitly denotes which kind of note is specified.\nThis field can be used as a filter in list requests.\n@OutputOnly", - "type": "string", - "enum": [ - "CUSTOM", - "PACKAGE_VULNERABILITY", - "BUILD_DETAILS", - "IMAGE_BASIS", - "PACKAGE_MANAGER", - "DEPLOYABLE", - "DISCOVERY", - "ATTESTATION_AUTHORITY" - ] - }, - "vulnerabilityType": { - "description": "A package vulnerability type of note.", - "$ref": "#/definitions/VulnerabilityType" - }, - "buildType": { - "description": "Build provenance type for a verifiable build.", - "$ref": "#/definitions/BuildType" - }, - "baseImage": { - "description": "A note describing a base image.", - "$ref": "#/definitions/Basis" - }, - "package": { - "description": "A note describing a package hosted by various package managers.", - "$ref": "#/definitions/Package" - }, - "deployable": { - "description": "A note describing something that can be deployed.", - "$ref": "#/definitions/Deployable" - }, - "discovery": { - "description": "A note describing a project\/analysis type.", - "$ref": "#/definitions/Discovery" - }, - "attestationAuthority": { - "description": "A note describing an attestation role.", - "$ref": "#/definitions/AttestationAuthority" - }, - - "relatedUrl": { - "description": "Urls associated with this note", - "type": "array", - "items": { - "$ref": "#/definitions/RelatedUrl" - } - }, - "expirationTime": { - "description": "Time of expiration for this Note, null if Note currently does not\nexpire.", - "type": "string", - "format": "google-datetime" - }, - "createTime": { - "description": "The time this note was created.\nThis field can be used as a filter in list requests.\n@OutputOnly", - "type": "string", - "format": "google-datetime" - }, - "updateTime": { - "description": "The time this note was last updated.\nThis field can be used as a filter in list requests.\n@OutputOnly", - "type": "string", - "format": "google-datetime" - }, - "operationName": { - "description": "The name of the operation that created this note.", - "type": "string" - }, - "relatedNoteNames": { - "description": "Other notes related to this note.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "VulnerabilityType": { - "id": "VulnerabilityType", - "description": "VulnerabilityType provides metadata about a security vulnerability.", - "type": "object", - "properties": { - "cvssScore": { - "description": "The CVSS score for this Vulnerability.", - "type": "number", - "format": "float" - }, - "severity": { - "description": "Note provider assigned impact of the vulnerability", - "type": "string", - "enum": [ - "UNKNOWN", - "MINIMAL", - "LOW", - "MEDIUM", - "HIGH", - "CRITICAL" - ] - }, - "package_type": { - "description": "The type of package; whether native or non native(ruby gems,\nnode.js packages etc)", - "type": "string" - }, - "details": { - "description": "All information about the package to specifically identify this\nvulnerability. One entry per (version range and cpe_uri) the\npackage vulnerability has manifested in.", - "type": "array", - "items": { - "$ref": "#/definitions/Detail" - } - } - } - }, - "Detail": { - "id": "Detail", - "description": "Identifies all occurrences of this vulnerability in the package for a\nspecific distro\/location\nFor example: glibc in cpe:\/o:debian:debian_linux:8 for versions 2.1 - 2.2", - "type": "object", - "properties": { - "cpeUri": { - "description": "The cpe_uri in [cpe format] (https:\/\/cpe.mitre.org\/specification\/) in\nwhich the vulnerability manifests. Examples include distro or storage\nlocation for vulnerable jar.\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "package": { - "description": "The name of the package where the vulnerability was found.\nThis field can be used as a filter in list requests.", - "type": "string" - }, - "minAffectedVersion": { - "description": "The min version of the package in which the vulnerability exists.", - "$ref": "#/definitions/Version" - }, - "maxAffectedVersion": { - "description": "The max version of the package in which the vulnerability exists.\nThis field can be used as a filter in list requests.", - "$ref": "#/definitions/Version" - }, - "severityName": { - "description": "The severity (eg: distro assigned severity) for this vulnerability.", - "type": "string" - }, - "description": { - "description": "A vendor-specific description of this note.", - "type": "string" - }, - "fixedLocation": { - "description": "The fix for this specific package version.", - "$ref": "#/definitions/VulnerabilityLocation" - } - } - }, - "BuildType": { - "id": "BuildType", - "description": "Note holding the version of the provider's builder and the signature of\nthe provenance message in linked BuildDetails.", - "type": "object", - "properties": { - "builderVersion": { - "description": "Version of the builder which produced this Note.", - "type": "string" - }, - "signature": { - "description": "Signature of the build in Occurrences pointing to the Note containing this\nBuilderDetails.", - "$ref": "#/definitions/BuildSignature" - } - } - }, - "BuildSignature": { - "id": "BuildSignature", - "description": "Message encapsulating signature of the verified build", - "type": "object", - "properties": { - "publicKey": { - "description": "Public key of the builder which can be used to verify that related\nFindings are valid and unchanged. If `key_type` is empty this defaults\nto PEM encoded public keys.\n\nThis field may be empty if `key_id` references an external key.\n\nFor Cloud Container Builder based signatures this is a PEM encoded public\nkey. To verify the Cloud Container Builder signature, place the contents of\nthis field into a file (public.pem). The signature field is base64-decoded\ninto its binary representation in signature.bin, and the provenance bytes\nfrom BuildDetails are base64-decoded into a binary representation in\nsigned.bin. OpenSSL can then verify the signature:\n`openssl sha256 -verify public.pem -signature signature.bin signed.bin`", - "type": "string" - }, - "signature": { - "description": "Signature of the related BuildProvenance, encoded in a base64 string.", - "type": "string" - }, - "keyId": { - "description": "An ID for the key used to sign. This could be either an ID for the key\nstored in `public_key` (e.g., the ID or fingerprint for a PGP key, or the\nCN for a cert), or a reference to an external key (e.g., a reference to a\nkey in Cloud KMS).", - "type": "string" - }, - "keyType": { - "description": "The type of the key, either stored in `public_key` or referenced in\n`key_id`", - "type": "string", - "enum": [ - "UNSET", - "PGP_ASCII_ARMORED", - "PKIX_PEM" - ] - } - } - }, - "Basis": { - "id": "Basis", - "description": "Basis describes the base image portion (Note) of the DockerImage\nrelationship. Linked occurrences are derived from this or an\nequivalent image via:\n FROM \nOr an equivalent reference, e.g. a tag of the resource_url.", - "type": "object", - "properties": { - "resourceUrl": { - "description": "The resource_url for the resource representing the basis of\nassociated occurrence images.", - "type": "string" - }, - "fingerprint": { - "description": "The fingerprint of the base image", - "$ref": "#/definitions/Fingerprint" - } - } - }, - "Package": { - "id": "Package", - "description": "This represents a particular package that is distributed over\nvarious channels.\ne.g. glibc (aka libc6) is distributed by many, at various versions.", - "type": "object", - "properties": { - "name": { - "description": "The name of the package.", - "type": "string" - }, - "distribution": { - "description": "The various channels by which a package is distributed.", - "type": "array", - "items": { - "$ref": "#/definitions/Distribution" - } - } - } - }, - "Distribution": { - "id": "Distribution", - "description": "This represents a particular channel of distribution for a given package.\ne.g. Debian's jessie-backports dpkg mirror", - "type": "object", - "properties": { - "cpeUri": { - "description": "The cpe_uri in [cpe format](https:\/\/cpe.mitre.org\/specification\/)\ndenoting the package manager version distributing a package.", - "type": "string" - }, - "architecture": { - "description": "The CPU architecture for which packages in this distribution\nchannel were built", - "type": "string", - "enum": [ - "UNKNOWN", - "X86", - "X64" - ] - }, - "latestVersion": { - "description": "The latest available version of this package in\nthis distribution channel.", - "$ref": "#/definitions/Version" - }, - "maintainer": { - "description": "A freeform string denoting the maintainer of this package.", - "type": "string" - }, - "url": { - "description": "The distribution channel-specific homepage for this package.", - "type": "string" - }, - "description": { - "description": "The distribution channel-specific description of this package.", - "type": "string" - } - } - }, - "Deployable": { - "id": "Deployable", - "description": "An artifact that can be deployed in some runtime.", - "type": "object", - "properties": { - "resourceUri": { - "description": "Resource URI for the artifact being deployed.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "Discovery": { - "id": "Discovery", - "description": "Note that indicates a type of analysis and exists in a provider project to\nindicate the status of an analysis on a resource. Absence of an occurrence\nlinked to this note for a resource indicates that analysis hasn't started.", - "type": "object", - "properties": { - "analysisKind": { - "description": "The kind of analysis that is handled by this discovery.", - "type": "string", - "enum": [ - "UNKNOWN", - "CUSTOM", - "PACKAGE_VULNERABILITY", - "BUILD_DETAILS", - "IMAGE_BASIS", - "PACKAGE_MANAGER", - "DEPLOYABLE", - "DISCOVERY", - "ATTESTATION_AUTHORITY" - ] - } - } - }, - "AttestationAuthority": { - "id": "AttestationAuthority", - "description": "Note kind that represents a logical attestation \"role\" or \"authority\". For\nexample, an organization might have one AttestationAuthority for \"QA\" and one\nfor \"build\". This Note is intended to act strictly as a grouping mechanism\nfor the attached Occurrences (Attestations). This grouping mechanism also\nprovides a security boundary, since IAM ACLs gate the ability for a principle\nto attach an Occurrence to a given Note. It also provides a single point of\nlookup to find all attached Attestation Occurrences, even if they don't all\nlive in the same project.", - "type": "object", - "properties": { - "hint": { - - "$ref": "#/definitions/AttestationAuthorityHint" - } - } - }, - "AttestationAuthorityHint": { - "id": "AttestationAuthorityHint", - "description": "This submessage provides human-readable hints about the purpose of the\nAttestationAuthority. Because the name of a Note acts as its resource\nreference, it is important to disambiguate the canonical name of the Note\n(which might be a UUID for security purposes) from \"readable\" names more\nsuitable for debug output. Note that these hints should NOT be used to\nlook up AttestationAuthorities in security sensitive contexts, such as when\nlooking up Attestations to verify.", - "type": "object", - "properties": { - "humanReadableName": { - "description": "The human readable name of this Attestation Authority, e.g. \"qa\".", - "type": "string" - } - } - }, - "RelatedUrl": { - "id": "RelatedUrl", - "description": "Metadata for any related url information", - "type": "object", - "properties": { - "url": { - "description": "Specific url to associate with the note", - "type": "string" - }, - "label": { - "description": "Label to describe usage of the url", - "type": "string" - } - } - }, - "ListNotesResponse": { - "id": "ListNotesResponse", - "description": "Response including listed notes.", - "type": "object", - "properties": { - "notes": { - "description": "The occurrences requested", - "type": "array", - "items": { - "$ref": "#/definitions/Note" - } - }, - "nextPageToken": { - "description": "The next pagination token in the List response. It should be used as\npage_token for the following request. An empty value means no more result.", - "type": "string" - } - } - }, - "ListNoteOccurrencesResponse": { - "id": "ListNoteOccurrencesResponse", - "description": "Response including listed occurrences for a note.", - "type": "object", - "properties": { - "occurrences": { - "description": "The occurrences attached to the specified note.", - "type": "array", - "items": { - "$ref": "#/definitions/Occurrence" - } - }, - "nextPageToken": { - "description": "Token to receive the next page of notes.", - "type": "string" - } - } - }, - "CreateOperationRequest": { - "id": "CreateOperationRequest", - "description": "Request for creating an operation", - "type": "object", - "properties": { - "operationId": { - "description": "The ID to use for this operation.", - "type": "string" - }, - "operation": { - "description": "The operation to create.", - "$ref": "#/definitions/Operation" - } - } - }, - "UpdateOperationRequest": { - "id": "UpdateOperationRequest", - "description": "Request for updating an existing operation", - "type": "object", - "properties": { - "operation": { - "description": "The operation to create.", - "$ref": "#/definitions/Operation" - } - } - } - } -} diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go index ab92f4aa..c876dd12 100644 --- a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.go @@ -1,76 +1,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: v1alpha1/proto/grafeas.proto -/* -Package grafeas is a generated protocol buffer package. - -It is generated from these files: - v1alpha1/proto/grafeas.proto - -It has these top-level messages: - CreateProjectRequest - GetProjectRequest - ListProjectsRequest - DeleteProjectRequest - GetOccurrenceRequest - ListOccurrencesRequest - DeleteOccurrenceRequest - CreateOccurrenceRequest - UpdateOccurrenceRequest - GetNoteRequest - GetOccurrenceNoteRequest - ListNotesRequest - DeleteNoteRequest - CreateNoteRequest - UpdateNoteRequest - ListNoteOccurrencesRequest - ListProjectsResponse - ListNoteOccurrencesResponse - ListNotesResponse - ListOccurrencesResponse - ListOperationsResponse - UpdateOperationRequest - CreateOperationRequest - Project - OperationMetadata - Artifact - AttestationAuthority - BuildDetails - BuildProvenance - BuildSignature - BuildType - Command - Deployable - DockerImage - Discovery - FileHashes - Hash - Note - Occurrence - PackageManager - PgpSignedAttestation - Source - RepoSource - StorageSource - VulnerabilityType - SourceContext - AliasContext - CloudRepoSourceContext - GerritSourceContext - GitSourceContext - RepoId - ProjectRepoId -*/ package grafeas import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/empty" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" import _ "google.golang.org/genproto/googleapis/api/annotations" -import google_protobuf2 "google.golang.org/genproto/protobuf/field_mask" -import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" -import google_longrunning "google.golang.org/genproto/googleapis/longrunning" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" import ( context "golang.org/x/net/context" @@ -114,40 +54,42 @@ var BuildSignature_KeyType_value = map[string]int32{ func (x BuildSignature_KeyType) String() string { return proto.EnumName(BuildSignature_KeyType_name, int32(x)) } -func (BuildSignature_KeyType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{29, 0} } +func (BuildSignature_KeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{29, 0} +} // Types of platforms. -type Deployable_Deployment_Platform int32 +type Deployable_DeploymentDetails_Platform int32 const ( // Unknown - Deployable_Deployment_PLATFORM_UNSPECIFIED Deployable_Deployment_Platform = 0 + Deployable_DeploymentDetails_PLATFORM_UNSPECIFIED Deployable_DeploymentDetails_Platform = 0 // Google Container Engine - Deployable_Deployment_GKE Deployable_Deployment_Platform = 1 + Deployable_DeploymentDetails_GKE Deployable_DeploymentDetails_Platform = 1 // Google App Engine: Flexible Environment - Deployable_Deployment_FLEX Deployable_Deployment_Platform = 2 + Deployable_DeploymentDetails_FLEX Deployable_DeploymentDetails_Platform = 2 // Custom user-defined platform - Deployable_Deployment_CUSTOM Deployable_Deployment_Platform = 3 + Deployable_DeploymentDetails_CUSTOM Deployable_DeploymentDetails_Platform = 3 ) -var Deployable_Deployment_Platform_name = map[int32]string{ +var Deployable_DeploymentDetails_Platform_name = map[int32]string{ 0: "PLATFORM_UNSPECIFIED", 1: "GKE", 2: "FLEX", 3: "CUSTOM", } -var Deployable_Deployment_Platform_value = map[string]int32{ +var Deployable_DeploymentDetails_Platform_value = map[string]int32{ "PLATFORM_UNSPECIFIED": 0, "GKE": 1, "FLEX": 2, "CUSTOM": 3, } -func (x Deployable_Deployment_Platform) String() string { - return proto.EnumName(Deployable_Deployment_Platform_name, int32(x)) +func (x Deployable_DeploymentDetails_Platform) String() string { + return proto.EnumName(Deployable_DeploymentDetails_Platform_name, int32(x)) } -func (Deployable_Deployment_Platform) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{32, 0, 0} +func (Deployable_DeploymentDetails_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{32, 0, 0} } // Instructions from dockerfile @@ -237,7 +179,7 @@ func (x DockerImage_Layer_Directive) String() string { return proto.EnumName(DockerImage_Layer_Directive_name, int32(x)) } func (DockerImage_Layer_Directive) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{33, 0, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33, 0, 0} } // Specifies the hash algorithm, if any. @@ -262,7 +204,9 @@ var Hash_HashType_value = map[string]int32{ func (x Hash_HashType) String() string { return proto.EnumName(Hash_HashType_name, int32(x)) } -func (Hash_HashType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{36, 0} } +func (Hash_HashType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{36, 0} +} // This must be 1:1 with members of our oneofs, it can be used for filtering // Note and Occurrence on their kind. @@ -283,6 +227,8 @@ const ( Note_DEPLOYABLE Note_Kind = 6 // The note and occurrence track the initial discovery status of a resource. Note_DISCOVERY Note_Kind = 7 + // This represents a logical "role" that can attest to artifacts. + Note_ATTESTATION_AUTHORITY Note_Kind = 8 ) var Note_Kind_name = map[int32]string{ @@ -293,6 +239,7 @@ var Note_Kind_name = map[int32]string{ 5: "PACKAGE_MANAGER", 6: "DEPLOYABLE", 7: "DISCOVERY", + 8: "ATTESTATION_AUTHORITY", } var Note_Kind_value = map[string]int32{ "KIND_UNSPECIFIED": 0, @@ -302,12 +249,15 @@ var Note_Kind_value = map[string]int32{ "PACKAGE_MANAGER": 5, "DEPLOYABLE": 6, "DISCOVERY": 7, + "ATTESTATION_AUTHORITY": 8, } func (x Note_Kind) String() string { return proto.EnumName(Note_Kind_name, int32(x)) } -func (Note_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{37, 0} } +func (Note_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{37, 0} +} // Instruction set architectures supported by various package managers. type PackageManager_Architecture int32 @@ -336,7 +286,7 @@ func (x PackageManager_Architecture) String() string { return proto.EnumName(PackageManager_Architecture_name, int32(x)) } func (PackageManager_Architecture) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{39, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39, 0} } // Type (e.g. schema) of the attestation payload that was signed. @@ -365,7 +315,7 @@ func (x PgpSignedAttestation_ContentType) String() string { return proto.EnumName(PgpSignedAttestation_ContentType_name, int32(x)) } func (PgpSignedAttestation_ContentType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{40, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{40, 0} } // Note provider-assigned severity/impact ranking @@ -407,7 +357,7 @@ func (x VulnerabilityType_Severity) String() string { return proto.EnumName(VulnerabilityType_Severity_name, int32(x)) } func (VulnerabilityType_Severity) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{44, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 0} } // Whether this is an ordinary package version or a @@ -440,7 +390,7 @@ func (x VulnerabilityType_Version_VersionKind) String() string { return proto.EnumName(VulnerabilityType_Version_VersionKind_name, int32(x)) } func (VulnerabilityType_Version_VersionKind) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{44, 0, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 0, 0} } // The type of an alias. @@ -474,38 +424,83 @@ var AliasContext_Kind_value = map[string]int32{ func (x AliasContext_Kind) String() string { return proto.EnumName(AliasContext_Kind_name, int32(x)) } -func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{46, 0} } +func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{46, 0} +} // Request to insert a new Project. type CreateProjectRequest struct { - // The name of the project of the form - // "projects/{project_id}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The project to be inserted + Project *Project `protobuf:"bytes,1,opt,name=project" json:"project,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *CreateProjectRequest) Reset() { *m = CreateProjectRequest{} } -func (m *CreateProjectRequest) String() string { return proto.CompactTextString(m) } -func (*CreateProjectRequest) ProtoMessage() {} -func (*CreateProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (m *CreateProjectRequest) Reset() { *m = CreateProjectRequest{} } +func (m *CreateProjectRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProjectRequest) ProtoMessage() {} +func (*CreateProjectRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{0} +} +func (m *CreateProjectRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProjectRequest.Unmarshal(m, b) +} +func (m *CreateProjectRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProjectRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProjectRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProjectRequest.Merge(dst, src) +} +func (m *CreateProjectRequest) XXX_Size() int { + return xxx_messageInfo_CreateProjectRequest.Size(m) +} +func (m *CreateProjectRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProjectRequest.DiscardUnknown(m) +} -func (m *CreateProjectRequest) GetName() string { +var xxx_messageInfo_CreateProjectRequest proto.InternalMessageInfo + +func (m *CreateProjectRequest) GetProject() *Project { if m != nil { - return m.Name + return m.Project } - return "" + return nil } // Request to get a Project. type GetProjectRequest struct { // The name of the project of the form // "projects/{project_id}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GetProjectRequest) Reset() { *m = GetProjectRequest{} } -func (m *GetProjectRequest) String() string { return proto.CompactTextString(m) } -func (*GetProjectRequest) ProtoMessage() {} -func (*GetProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *GetProjectRequest) Reset() { *m = GetProjectRequest{} } +func (m *GetProjectRequest) String() string { return proto.CompactTextString(m) } +func (*GetProjectRequest) ProtoMessage() {} +func (*GetProjectRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{1} +} +func (m *GetProjectRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProjectRequest.Unmarshal(m, b) +} +func (m *GetProjectRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProjectRequest.Marshal(b, m, deterministic) +} +func (dst *GetProjectRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProjectRequest.Merge(dst, src) +} +func (m *GetProjectRequest) XXX_Size() int { + return xxx_messageInfo_GetProjectRequest.Size(m) +} +func (m *GetProjectRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProjectRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProjectRequest proto.InternalMessageInfo func (m *GetProjectRequest) GetName() string { if m != nil { @@ -521,13 +516,35 @@ type ListProjectsRequest struct { // Number of projects to return in the list. PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` // Token to provide to skip to a particular spot in the list. - PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListProjectsRequest) Reset() { *m = ListProjectsRequest{} } -func (m *ListProjectsRequest) String() string { return proto.CompactTextString(m) } -func (*ListProjectsRequest) ProtoMessage() {} -func (*ListProjectsRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *ListProjectsRequest) Reset() { *m = ListProjectsRequest{} } +func (m *ListProjectsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProjectsRequest) ProtoMessage() {} +func (*ListProjectsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{2} +} +func (m *ListProjectsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectsRequest.Unmarshal(m, b) +} +func (m *ListProjectsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProjectsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectsRequest.Merge(dst, src) +} +func (m *ListProjectsRequest) XXX_Size() int { + return xxx_messageInfo_ListProjectsRequest.Size(m) +} +func (m *ListProjectsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectsRequest proto.InternalMessageInfo func (m *ListProjectsRequest) GetFilter() string { if m != nil { @@ -554,13 +571,35 @@ func (m *ListProjectsRequest) GetPageToken() string { type DeleteProjectRequest struct { // The name of the project of the form // "projects/{project_id}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DeleteProjectRequest) Reset() { *m = DeleteProjectRequest{} } -func (m *DeleteProjectRequest) String() string { return proto.CompactTextString(m) } -func (*DeleteProjectRequest) ProtoMessage() {} -func (*DeleteProjectRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *DeleteProjectRequest) Reset() { *m = DeleteProjectRequest{} } +func (m *DeleteProjectRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProjectRequest) ProtoMessage() {} +func (*DeleteProjectRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{3} +} +func (m *DeleteProjectRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProjectRequest.Unmarshal(m, b) +} +func (m *DeleteProjectRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProjectRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProjectRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProjectRequest.Merge(dst, src) +} +func (m *DeleteProjectRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProjectRequest.Size(m) +} +func (m *DeleteProjectRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProjectRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProjectRequest proto.InternalMessageInfo func (m *DeleteProjectRequest) GetName() string { if m != nil { @@ -573,13 +612,35 @@ func (m *DeleteProjectRequest) GetName() string { type GetOccurrenceRequest struct { // The name of the occurrence of the form // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GetOccurrenceRequest) Reset() { *m = GetOccurrenceRequest{} } -func (m *GetOccurrenceRequest) String() string { return proto.CompactTextString(m) } -func (*GetOccurrenceRequest) ProtoMessage() {} -func (*GetOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (m *GetOccurrenceRequest) Reset() { *m = GetOccurrenceRequest{} } +func (m *GetOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceRequest) ProtoMessage() {} +func (*GetOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{4} +} +func (m *GetOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceRequest.Merge(dst, src) +} +func (m *GetOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceRequest.Size(m) +} +func (m *GetOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceRequest proto.InternalMessageInfo func (m *GetOccurrenceRequest) GetName() string { if m != nil { @@ -597,13 +658,35 @@ type ListOccurrencesRequest struct { // Number of occurrences to return in the list. PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` // Token to provide to skip to a particular spot in the list. - PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListOccurrencesRequest) Reset() { *m = ListOccurrencesRequest{} } -func (m *ListOccurrencesRequest) String() string { return proto.CompactTextString(m) } -func (*ListOccurrencesRequest) ProtoMessage() {} -func (*ListOccurrencesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } +func (m *ListOccurrencesRequest) Reset() { *m = ListOccurrencesRequest{} } +func (m *ListOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesRequest) ProtoMessage() {} +func (*ListOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{5} +} +func (m *ListOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesRequest.Merge(dst, src) +} +func (m *ListOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesRequest.Size(m) +} +func (m *ListOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesRequest proto.InternalMessageInfo func (m *ListOccurrencesRequest) GetParent() string { if m != nil { @@ -637,13 +720,35 @@ func (m *ListOccurrencesRequest) GetPageToken() string { type DeleteOccurrenceRequest struct { // The name of the occurrence in the form of // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DeleteOccurrenceRequest) Reset() { *m = DeleteOccurrenceRequest{} } -func (m *DeleteOccurrenceRequest) String() string { return proto.CompactTextString(m) } -func (*DeleteOccurrenceRequest) ProtoMessage() {} -func (*DeleteOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } +func (m *DeleteOccurrenceRequest) Reset() { *m = DeleteOccurrenceRequest{} } +func (m *DeleteOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteOccurrenceRequest) ProtoMessage() {} +func (*DeleteOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{6} +} +func (m *DeleteOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteOccurrenceRequest.Unmarshal(m, b) +} +func (m *DeleteOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteOccurrenceRequest.Merge(dst, src) +} +func (m *DeleteOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteOccurrenceRequest.Size(m) +} +func (m *DeleteOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteOccurrenceRequest proto.InternalMessageInfo func (m *DeleteOccurrenceRequest) GetName() string { if m != nil { @@ -657,13 +762,35 @@ type CreateOccurrenceRequest struct { // This field contains the project Id for example: "projects/{project_id}" Parent string `protobuf:"bytes,3,opt,name=parent" json:"parent,omitempty"` // The occurrence to be inserted - Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence" json:"occurrence,omitempty"` + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence" json:"occurrence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *CreateOccurrenceRequest) Reset() { *m = CreateOccurrenceRequest{} } -func (m *CreateOccurrenceRequest) String() string { return proto.CompactTextString(m) } -func (*CreateOccurrenceRequest) ProtoMessage() {} -func (*CreateOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } +func (m *CreateOccurrenceRequest) Reset() { *m = CreateOccurrenceRequest{} } +func (m *CreateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOccurrenceRequest) ProtoMessage() {} +func (*CreateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{7} +} +func (m *CreateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOccurrenceRequest.Unmarshal(m, b) +} +func (m *CreateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOccurrenceRequest.Merge(dst, src) +} +func (m *CreateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_CreateOccurrenceRequest.Size(m) +} +func (m *CreateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOccurrenceRequest proto.InternalMessageInfo func (m *CreateOccurrenceRequest) GetParent() string { if m != nil { @@ -687,13 +814,35 @@ type UpdateOccurrenceRequest struct { // The updated occurrence. Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence" json:"occurrence,omitempty"` // The fields to update. - UpdateMask *google_protobuf2.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UpdateOccurrenceRequest) Reset() { *m = UpdateOccurrenceRequest{} } -func (m *UpdateOccurrenceRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateOccurrenceRequest) ProtoMessage() {} -func (*UpdateOccurrenceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } +func (m *UpdateOccurrenceRequest) Reset() { *m = UpdateOccurrenceRequest{} } +func (m *UpdateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOccurrenceRequest) ProtoMessage() {} +func (*UpdateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{8} +} +func (m *UpdateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOccurrenceRequest.Unmarshal(m, b) +} +func (m *UpdateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOccurrenceRequest.Merge(dst, src) +} +func (m *UpdateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOccurrenceRequest.Size(m) +} +func (m *UpdateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOccurrenceRequest proto.InternalMessageInfo func (m *UpdateOccurrenceRequest) GetName() string { if m != nil { @@ -709,7 +858,7 @@ func (m *UpdateOccurrenceRequest) GetOccurrence() *Occurrence { return nil } -func (m *UpdateOccurrenceRequest) GetUpdateMask() *google_protobuf2.FieldMask { +func (m *UpdateOccurrenceRequest) GetUpdateMask() *field_mask.FieldMask { if m != nil { return m.UpdateMask } @@ -720,13 +869,35 @@ func (m *UpdateOccurrenceRequest) GetUpdateMask() *google_protobuf2.FieldMask { type GetNoteRequest struct { // The name of the note in the form of // "providers/{provider_id}/notes/{NOTE_ID}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GetNoteRequest) Reset() { *m = GetNoteRequest{} } -func (m *GetNoteRequest) String() string { return proto.CompactTextString(m) } -func (*GetNoteRequest) ProtoMessage() {} -func (*GetNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } +func (m *GetNoteRequest) Reset() { *m = GetNoteRequest{} } +func (m *GetNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetNoteRequest) ProtoMessage() {} +func (*GetNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{9} +} +func (m *GetNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNoteRequest.Unmarshal(m, b) +} +func (m *GetNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNoteRequest.Merge(dst, src) +} +func (m *GetNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetNoteRequest.Size(m) +} +func (m *GetNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNoteRequest proto.InternalMessageInfo func (m *GetNoteRequest) GetName() string { if m != nil { @@ -739,13 +910,35 @@ func (m *GetNoteRequest) GetName() string { type GetOccurrenceNoteRequest struct { // The name of the occurrence in the form // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GetOccurrenceNoteRequest) Reset() { *m = GetOccurrenceNoteRequest{} } -func (m *GetOccurrenceNoteRequest) String() string { return proto.CompactTextString(m) } -func (*GetOccurrenceNoteRequest) ProtoMessage() {} -func (*GetOccurrenceNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } +func (m *GetOccurrenceNoteRequest) Reset() { *m = GetOccurrenceNoteRequest{} } +func (m *GetOccurrenceNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceNoteRequest) ProtoMessage() {} +func (*GetOccurrenceNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{10} +} +func (m *GetOccurrenceNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceNoteRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceNoteRequest.Merge(dst, src) +} +func (m *GetOccurrenceNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceNoteRequest.Size(m) +} +func (m *GetOccurrenceNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceNoteRequest proto.InternalMessageInfo func (m *GetOccurrenceNoteRequest) GetName() string { if m != nil { @@ -763,13 +956,35 @@ type ListNotesRequest struct { // Number of notes to return in the list. PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` // Token to provide to skip to a particular spot in the list. - PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListNotesRequest) Reset() { *m = ListNotesRequest{} } -func (m *ListNotesRequest) String() string { return proto.CompactTextString(m) } -func (*ListNotesRequest) ProtoMessage() {} -func (*ListNotesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } +func (m *ListNotesRequest) Reset() { *m = ListNotesRequest{} } +func (m *ListNotesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotesRequest) ProtoMessage() {} +func (*ListNotesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{11} +} +func (m *ListNotesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesRequest.Unmarshal(m, b) +} +func (m *ListNotesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNotesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesRequest.Merge(dst, src) +} +func (m *ListNotesRequest) XXX_Size() int { + return xxx_messageInfo_ListNotesRequest.Size(m) +} +func (m *ListNotesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesRequest proto.InternalMessageInfo func (m *ListNotesRequest) GetParent() string { if m != nil { @@ -803,13 +1018,35 @@ func (m *ListNotesRequest) GetPageToken() string { type DeleteNoteRequest struct { // The name of the note in the form of // "providers/{provider_id}/notes/{NOTE_ID}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DeleteNoteRequest) Reset() { *m = DeleteNoteRequest{} } -func (m *DeleteNoteRequest) String() string { return proto.CompactTextString(m) } -func (*DeleteNoteRequest) ProtoMessage() {} -func (*DeleteNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } +func (m *DeleteNoteRequest) Reset() { *m = DeleteNoteRequest{} } +func (m *DeleteNoteRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNoteRequest) ProtoMessage() {} +func (*DeleteNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{12} +} +func (m *DeleteNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNoteRequest.Unmarshal(m, b) +} +func (m *DeleteNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNoteRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNoteRequest.Merge(dst, src) +} +func (m *DeleteNoteRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNoteRequest.Size(m) +} +func (m *DeleteNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNoteRequest proto.InternalMessageInfo func (m *DeleteNoteRequest) GetName() string { if m != nil { @@ -826,13 +1063,35 @@ type CreateNoteRequest struct { // The ID to use for this note. NoteId string `protobuf:"bytes,2,opt,name=note_id,json=noteId" json:"note_id,omitempty"` // The Note to be inserted - Note *Note `protobuf:"bytes,3,opt,name=note" json:"note,omitempty"` + Note *Note `protobuf:"bytes,3,opt,name=note" json:"note,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *CreateNoteRequest) Reset() { *m = CreateNoteRequest{} } -func (m *CreateNoteRequest) String() string { return proto.CompactTextString(m) } -func (*CreateNoteRequest) ProtoMessage() {} -func (*CreateNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } +func (m *CreateNoteRequest) Reset() { *m = CreateNoteRequest{} } +func (m *CreateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNoteRequest) ProtoMessage() {} +func (*CreateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{13} +} +func (m *CreateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNoteRequest.Unmarshal(m, b) +} +func (m *CreateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNoteRequest.Merge(dst, src) +} +func (m *CreateNoteRequest) XXX_Size() int { + return xxx_messageInfo_CreateNoteRequest.Size(m) +} +func (m *CreateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNoteRequest proto.InternalMessageInfo func (m *CreateNoteRequest) GetParent() string { if m != nil { @@ -863,13 +1122,35 @@ type UpdateNoteRequest struct { // The updated note. Note *Note `protobuf:"bytes,2,opt,name=note" json:"note,omitempty"` // The fields to update. - UpdateMask *google_protobuf2.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UpdateNoteRequest) Reset() { *m = UpdateNoteRequest{} } -func (m *UpdateNoteRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateNoteRequest) ProtoMessage() {} -func (*UpdateNoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } +func (m *UpdateNoteRequest) Reset() { *m = UpdateNoteRequest{} } +func (m *UpdateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNoteRequest) ProtoMessage() {} +func (*UpdateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{14} +} +func (m *UpdateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNoteRequest.Unmarshal(m, b) +} +func (m *UpdateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNoteRequest.Merge(dst, src) +} +func (m *UpdateNoteRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNoteRequest.Size(m) +} +func (m *UpdateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNoteRequest proto.InternalMessageInfo func (m *UpdateNoteRequest) GetName() string { if m != nil { @@ -885,7 +1166,7 @@ func (m *UpdateNoteRequest) GetNote() *Note { return nil } -func (m *UpdateNoteRequest) GetUpdateMask() *google_protobuf2.FieldMask { +func (m *UpdateNoteRequest) GetUpdateMask() *field_mask.FieldMask { if m != nil { return m.UpdateMask } @@ -902,13 +1183,35 @@ type ListNoteOccurrencesRequest struct { // Number of notes to return in the list. PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize" json:"page_size,omitempty"` // Token to provide to skip to a particular spot in the list. - PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListNoteOccurrencesRequest) Reset() { *m = ListNoteOccurrencesRequest{} } -func (m *ListNoteOccurrencesRequest) String() string { return proto.CompactTextString(m) } -func (*ListNoteOccurrencesRequest) ProtoMessage() {} -func (*ListNoteOccurrencesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } +func (m *ListNoteOccurrencesRequest) Reset() { *m = ListNoteOccurrencesRequest{} } +func (m *ListNoteOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesRequest) ProtoMessage() {} +func (*ListNoteOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{15} +} +func (m *ListNoteOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesRequest.Merge(dst, src) +} +func (m *ListNoteOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesRequest.Size(m) +} +func (m *ListNoteOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesRequest proto.InternalMessageInfo func (m *ListNoteOccurrencesRequest) GetName() string { if m != nil { @@ -945,13 +1248,35 @@ type ListProjectsResponse struct { // The next pagination token in the list response. It should be used as // `page_token` for the following request. An empty value means no more // results. - NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListProjectsResponse) Reset() { *m = ListProjectsResponse{} } -func (m *ListProjectsResponse) String() string { return proto.CompactTextString(m) } -func (*ListProjectsResponse) ProtoMessage() {} -func (*ListProjectsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } +func (m *ListProjectsResponse) Reset() { *m = ListProjectsResponse{} } +func (m *ListProjectsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProjectsResponse) ProtoMessage() {} +func (*ListProjectsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{16} +} +func (m *ListProjectsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectsResponse.Unmarshal(m, b) +} +func (m *ListProjectsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProjectsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectsResponse.Merge(dst, src) +} +func (m *ListProjectsResponse) XXX_Size() int { + return xxx_messageInfo_ListProjectsResponse.Size(m) +} +func (m *ListProjectsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectsResponse proto.InternalMessageInfo func (m *ListProjectsResponse) GetProjects() []*Project { if m != nil { @@ -972,13 +1297,35 @@ type ListNoteOccurrencesResponse struct { // The occurrences attached to the specified note. Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences" json:"occurrences,omitempty"` // Token to receive the next page of notes. - NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListNoteOccurrencesResponse) Reset() { *m = ListNoteOccurrencesResponse{} } -func (m *ListNoteOccurrencesResponse) String() string { return proto.CompactTextString(m) } -func (*ListNoteOccurrencesResponse) ProtoMessage() {} -func (*ListNoteOccurrencesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } +func (m *ListNoteOccurrencesResponse) Reset() { *m = ListNoteOccurrencesResponse{} } +func (m *ListNoteOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesResponse) ProtoMessage() {} +func (*ListNoteOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{17} +} +func (m *ListNoteOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesResponse.Merge(dst, src) +} +func (m *ListNoteOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesResponse.Size(m) +} +func (m *ListNoteOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesResponse proto.InternalMessageInfo func (m *ListNoteOccurrencesResponse) GetOccurrences() []*Occurrence { if m != nil { @@ -1000,13 +1347,35 @@ type ListNotesResponse struct { Notes []*Note `protobuf:"bytes,1,rep,name=notes" json:"notes,omitempty"` // The next pagination token in the list response. It should be used as // page_token for the following request. An empty value means no more result. - NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListNotesResponse) Reset() { *m = ListNotesResponse{} } -func (m *ListNotesResponse) String() string { return proto.CompactTextString(m) } -func (*ListNotesResponse) ProtoMessage() {} -func (*ListNotesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } +func (m *ListNotesResponse) Reset() { *m = ListNotesResponse{} } +func (m *ListNotesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNotesResponse) ProtoMessage() {} +func (*ListNotesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{18} +} +func (m *ListNotesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesResponse.Unmarshal(m, b) +} +func (m *ListNotesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNotesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesResponse.Merge(dst, src) +} +func (m *ListNotesResponse) XXX_Size() int { + return xxx_messageInfo_ListNotesResponse.Size(m) +} +func (m *ListNotesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesResponse proto.InternalMessageInfo func (m *ListNotesResponse) GetNotes() []*Note { if m != nil { @@ -1029,13 +1398,35 @@ type ListOccurrencesResponse struct { // The next pagination token in the list response. It should be used as // `page_token` for the following request. An empty value means no more // results. - NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListOccurrencesResponse) Reset() { *m = ListOccurrencesResponse{} } -func (m *ListOccurrencesResponse) String() string { return proto.CompactTextString(m) } -func (*ListOccurrencesResponse) ProtoMessage() {} -func (*ListOccurrencesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } +func (m *ListOccurrencesResponse) Reset() { *m = ListOccurrencesResponse{} } +func (m *ListOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesResponse) ProtoMessage() {} +func (*ListOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{19} +} +func (m *ListOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesResponse.Merge(dst, src) +} +func (m *ListOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesResponse.Size(m) +} +func (m *ListOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesResponse proto.InternalMessageInfo func (m *ListOccurrencesResponse) GetOccurrences() []*Occurrence { if m != nil { @@ -1057,13 +1448,35 @@ type ListOperationsResponse struct { // page_token for the following request. An empty value means no more results. NextPageToken string `protobuf:"bytes,1,opt,name=nextPageToken" json:"nextPageToken,omitempty"` // The operations requested. - Operations []*google_longrunning.Operation `protobuf:"bytes,2,rep,name=operations" json:"operations,omitempty"` + Operations []*longrunning.Operation `protobuf:"bytes,2,rep,name=operations" json:"operations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } -func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } -func (*ListOperationsResponse) ProtoMessage() {} -func (*ListOperationsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{20} +} +func (m *ListOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsResponse.Unmarshal(m, b) +} +func (m *ListOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsResponse.Merge(dst, src) +} +func (m *ListOperationsResponse) XXX_Size() int { + return xxx_messageInfo_ListOperationsResponse.Size(m) +} +func (m *ListOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsResponse proto.InternalMessageInfo func (m *ListOperationsResponse) GetNextPageToken() string { if m != nil { @@ -1072,7 +1485,7 @@ func (m *ListOperationsResponse) GetNextPageToken() string { return "" } -func (m *ListOperationsResponse) GetOperations() []*google_longrunning.Operation { +func (m *ListOperationsResponse) GetOperations() []*longrunning.Operation { if m != nil { return m.Operations } @@ -1085,13 +1498,35 @@ type UpdateOperationRequest struct { // Should be of the form "projects/{provider_id}/operations/{operation_id}". Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // The operation to create. - Operation *google_longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` + Operation *longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *UpdateOperationRequest) Reset() { *m = UpdateOperationRequest{} } -func (m *UpdateOperationRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateOperationRequest) ProtoMessage() {} -func (*UpdateOperationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } +func (m *UpdateOperationRequest) Reset() { *m = UpdateOperationRequest{} } +func (m *UpdateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOperationRequest) ProtoMessage() {} +func (*UpdateOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{21} +} +func (m *UpdateOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOperationRequest.Unmarshal(m, b) +} +func (m *UpdateOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOperationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOperationRequest.Merge(dst, src) +} +func (m *UpdateOperationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOperationRequest.Size(m) +} +func (m *UpdateOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOperationRequest proto.InternalMessageInfo func (m *UpdateOperationRequest) GetName() string { if m != nil { @@ -1100,7 +1535,7 @@ func (m *UpdateOperationRequest) GetName() string { return "" } -func (m *UpdateOperationRequest) GetOperation() *google_longrunning.Operation { +func (m *UpdateOperationRequest) GetOperation() *longrunning.Operation { if m != nil { return m.Operation } @@ -1114,13 +1549,35 @@ type CreateOperationRequest struct { // The ID to use for this operation. OperationId string `protobuf:"bytes,2,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` // The operation to create. - Operation *google_longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` + Operation *longrunning.Operation `protobuf:"bytes,3,opt,name=operation" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *CreateOperationRequest) Reset() { *m = CreateOperationRequest{} } -func (m *CreateOperationRequest) String() string { return proto.CompactTextString(m) } -func (*CreateOperationRequest) ProtoMessage() {} -func (*CreateOperationRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } +func (m *CreateOperationRequest) Reset() { *m = CreateOperationRequest{} } +func (m *CreateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOperationRequest) ProtoMessage() {} +func (*CreateOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{22} +} +func (m *CreateOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOperationRequest.Unmarshal(m, b) +} +func (m *CreateOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOperationRequest.Merge(dst, src) +} +func (m *CreateOperationRequest) XXX_Size() int { + return xxx_messageInfo_CreateOperationRequest.Size(m) +} +func (m *CreateOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOperationRequest proto.InternalMessageInfo func (m *CreateOperationRequest) GetParent() string { if m != nil { @@ -1136,7 +1593,7 @@ func (m *CreateOperationRequest) GetOperationId() string { return "" } -func (m *CreateOperationRequest) GetOperation() *google_longrunning.Operation { +func (m *CreateOperationRequest) GetOperation() *longrunning.Operation { if m != nil { return m.Operation } @@ -1147,13 +1604,35 @@ func (m *CreateOperationRequest) GetOperation() *google_longrunning.Operation { type Project struct { // The name of the project of the form // "projects/{project_id}" - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Project) Reset() { *m = Project{} } -func (m *Project) String() string { return proto.CompactTextString(m) } -func (*Project) ProtoMessage() {} -func (*Project) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } +func (m *Project) Reset() { *m = Project{} } +func (m *Project) String() string { return proto.CompactTextString(m) } +func (*Project) ProtoMessage() {} +func (*Project) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{23} +} +func (m *Project) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Project.Unmarshal(m, b) +} +func (m *Project) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Project.Marshal(b, m, deterministic) +} +func (dst *Project) XXX_Merge(src proto.Message) { + xxx_messageInfo_Project.Merge(dst, src) +} +func (m *Project) XXX_Size() int { + return xxx_messageInfo_Project.Size(m) +} +func (m *Project) XXX_DiscardUnknown() { + xxx_messageInfo_Project.DiscardUnknown(m) +} + +var xxx_messageInfo_Project proto.InternalMessageInfo func (m *Project) GetName() string { if m != nil { @@ -1166,24 +1645,46 @@ func (m *Project) GetName() string { // that created by Container Analysis Providers type OperationMetadata struct { // Output only. The time this operation was created. - CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime" json:"create_time,omitempty"` // Output only. The time that this operation was marked completed or failed. - EndTime *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } -func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } -func (*OperationMetadata) ProtoMessage() {} -func (*OperationMetadata) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{24} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} -func (m *OperationMetadata) GetCreateTime() *google_protobuf3.Timestamp { +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { if m != nil { return m.CreateTime } return nil } -func (m *OperationMetadata) GetEndTime() *google_protobuf3.Timestamp { +func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp { if m != nil { return m.EndTime } @@ -1212,13 +1713,35 @@ type Artifact struct { // Google Container Registry, as presented to `docker push`. Note that a // single Artifact ID can have multiple names, for example if two tags are // applied to one image. - Names []string `protobuf:"bytes,4,rep,name=names" json:"names,omitempty"` + Names []string `protobuf:"bytes,4,rep,name=names" json:"names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Artifact) Reset() { *m = Artifact{} } -func (m *Artifact) String() string { return proto.CompactTextString(m) } -func (*Artifact) ProtoMessage() {} -func (*Artifact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } +func (m *Artifact) Reset() { *m = Artifact{} } +func (m *Artifact) String() string { return proto.CompactTextString(m) } +func (*Artifact) ProtoMessage() {} +func (*Artifact) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{25} +} +func (m *Artifact) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifact.Unmarshal(m, b) +} +func (m *Artifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifact.Marshal(b, m, deterministic) +} +func (dst *Artifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifact.Merge(dst, src) +} +func (m *Artifact) XXX_Size() int { + return xxx_messageInfo_Artifact.Size(m) +} +func (m *Artifact) XXX_DiscardUnknown() { + xxx_messageInfo_Artifact.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifact proto.InternalMessageInfo func (m *Artifact) GetName() string { if m != nil { @@ -1256,13 +1779,35 @@ func (m *Artifact) GetNames() []string { // all attached Attestation Occurrences, even if they don't all live in the same // project. type AttestationAuthority struct { - Hint *AttestationAuthority_AttestationAuthorityHint `protobuf:"bytes,1,opt,name=hint" json:"hint,omitempty"` + Hint *AttestationAuthority_AttestationAuthorityHint `protobuf:"bytes,1,opt,name=hint" json:"hint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *AttestationAuthority) Reset() { *m = AttestationAuthority{} } -func (m *AttestationAuthority) String() string { return proto.CompactTextString(m) } -func (*AttestationAuthority) ProtoMessage() {} -func (*AttestationAuthority) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } +func (m *AttestationAuthority) Reset() { *m = AttestationAuthority{} } +func (m *AttestationAuthority) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority) ProtoMessage() {} +func (*AttestationAuthority) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{26} +} +func (m *AttestationAuthority) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority.Unmarshal(m, b) +} +func (m *AttestationAuthority) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority.Merge(dst, src) +} +func (m *AttestationAuthority) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority.Size(m) +} +func (m *AttestationAuthority) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestationAuthority proto.InternalMessageInfo func (m *AttestationAuthority) GetHint() *AttestationAuthority_AttestationAuthorityHint { if m != nil { @@ -1273,7 +1818,10 @@ func (m *AttestationAuthority) GetHint() *AttestationAuthority_AttestationAuthor type AttestationAuthority_AttestationAuthorityHint struct { // The human readable name of this Attestation Authority, e.g. "qa". - HumanReadableName string `protobuf:"bytes,1,opt,name=human_readable_name,json=humanReadableName" json:"human_readable_name,omitempty"` + HumanReadableName string `protobuf:"bytes,1,opt,name=human_readable_name,json=humanReadableName" json:"human_readable_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *AttestationAuthority_AttestationAuthorityHint) Reset() { @@ -1284,8 +1832,25 @@ func (m *AttestationAuthority_AttestationAuthorityHint) String() string { } func (*AttestationAuthority_AttestationAuthorityHint) ProtoMessage() {} func (*AttestationAuthority_AttestationAuthorityHint) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{26, 0} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{26, 0} } +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Unmarshal(m, b) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority_AttestationAuthorityHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Merge(dst, src) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Size(m) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint proto.InternalMessageInfo func (m *AttestationAuthority_AttestationAuthorityHint) GetHumanReadableName() string { if m != nil { @@ -1294,73 +1859,95 @@ func (m *AttestationAuthority_AttestationAuthorityHint) GetHumanReadableName() s return "" } -type AttestationAuthority_Attestation struct { +type AttestationAuthority_AttestationDetails struct { // The signature, generally over the `resource_url`, that verifies this // attestation. The semantics of the signature veracity are ultimately // determined by the verification engine. // // Types that are valid to be assigned to Signature: - // *AttestationAuthority_Attestation_PgpSignedAttestation - Signature isAttestationAuthority_Attestation_Signature `protobuf_oneof:"signature"` + // *AttestationAuthority_AttestationDetails_PgpSignedAttestation + Signature isAttestationAuthority_AttestationDetails_Signature `protobuf_oneof:"signature"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *AttestationAuthority_Attestation) Reset() { *m = AttestationAuthority_Attestation{} } -func (m *AttestationAuthority_Attestation) String() string { return proto.CompactTextString(m) } -func (*AttestationAuthority_Attestation) ProtoMessage() {} -func (*AttestationAuthority_Attestation) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{26, 1} +func (m *AttestationAuthority_AttestationDetails) Reset() { + *m = AttestationAuthority_AttestationDetails{} +} +func (m *AttestationAuthority_AttestationDetails) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority_AttestationDetails) ProtoMessage() {} +func (*AttestationAuthority_AttestationDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{26, 1} +} +func (m *AttestationAuthority_AttestationDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority_AttestationDetails.Unmarshal(m, b) +} +func (m *AttestationAuthority_AttestationDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority_AttestationDetails.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority_AttestationDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority_AttestationDetails.Merge(dst, src) +} +func (m *AttestationAuthority_AttestationDetails) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority_AttestationDetails.Size(m) +} +func (m *AttestationAuthority_AttestationDetails) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority_AttestationDetails.DiscardUnknown(m) } -type isAttestationAuthority_Attestation_Signature interface { - isAttestationAuthority_Attestation_Signature() +var xxx_messageInfo_AttestationAuthority_AttestationDetails proto.InternalMessageInfo + +type isAttestationAuthority_AttestationDetails_Signature interface { + isAttestationAuthority_AttestationDetails_Signature() } -type AttestationAuthority_Attestation_PgpSignedAttestation struct { +type AttestationAuthority_AttestationDetails_PgpSignedAttestation struct { PgpSignedAttestation *PgpSignedAttestation `protobuf:"bytes,1,opt,name=pgp_signed_attestation,json=pgpSignedAttestation,oneof"` } -func (*AttestationAuthority_Attestation_PgpSignedAttestation) isAttestationAuthority_Attestation_Signature() { +func (*AttestationAuthority_AttestationDetails_PgpSignedAttestation) isAttestationAuthority_AttestationDetails_Signature() { } -func (m *AttestationAuthority_Attestation) GetSignature() isAttestationAuthority_Attestation_Signature { +func (m *AttestationAuthority_AttestationDetails) GetSignature() isAttestationAuthority_AttestationDetails_Signature { if m != nil { return m.Signature } return nil } -func (m *AttestationAuthority_Attestation) GetPgpSignedAttestation() *PgpSignedAttestation { - if x, ok := m.GetSignature().(*AttestationAuthority_Attestation_PgpSignedAttestation); ok { +func (m *AttestationAuthority_AttestationDetails) GetPgpSignedAttestation() *PgpSignedAttestation { + if x, ok := m.GetSignature().(*AttestationAuthority_AttestationDetails_PgpSignedAttestation); ok { return x.PgpSignedAttestation } return nil } // XXX_OneofFuncs is for the internal use of the proto package. -func (*AttestationAuthority_Attestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _AttestationAuthority_Attestation_OneofMarshaler, _AttestationAuthority_Attestation_OneofUnmarshaler, _AttestationAuthority_Attestation_OneofSizer, []interface{}{ - (*AttestationAuthority_Attestation_PgpSignedAttestation)(nil), +func (*AttestationAuthority_AttestationDetails) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttestationAuthority_AttestationDetails_OneofMarshaler, _AttestationAuthority_AttestationDetails_OneofUnmarshaler, _AttestationAuthority_AttestationDetails_OneofSizer, []interface{}{ + (*AttestationAuthority_AttestationDetails_PgpSignedAttestation)(nil), } } -func _AttestationAuthority_Attestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*AttestationAuthority_Attestation) +func _AttestationAuthority_AttestationDetails_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttestationAuthority_AttestationDetails) // signature switch x := m.Signature.(type) { - case *AttestationAuthority_Attestation_PgpSignedAttestation: + case *AttestationAuthority_AttestationDetails_PgpSignedAttestation: b.EncodeVarint(1<<3 | proto.WireBytes) if err := b.EncodeMessage(x.PgpSignedAttestation); err != nil { return err } case nil: default: - return fmt.Errorf("AttestationAuthority_Attestation.Signature has unexpected type %T", x) + return fmt.Errorf("AttestationAuthority_AttestationDetails.Signature has unexpected type %T", x) } return nil } -func _AttestationAuthority_Attestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*AttestationAuthority_Attestation) +func _AttestationAuthority_AttestationDetails_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttestationAuthority_AttestationDetails) switch tag { case 1: // signature.pgp_signed_attestation if wire != proto.WireBytes { @@ -1368,20 +1955,20 @@ func _AttestationAuthority_Attestation_OneofUnmarshaler(msg proto.Message, tag, } msg := new(PgpSignedAttestation) err := b.DecodeMessage(msg) - m.Signature = &AttestationAuthority_Attestation_PgpSignedAttestation{msg} + m.Signature = &AttestationAuthority_AttestationDetails_PgpSignedAttestation{msg} return true, err default: return false, nil } } -func _AttestationAuthority_Attestation_OneofSizer(msg proto.Message) (n int) { - m := msg.(*AttestationAuthority_Attestation) +func _AttestationAuthority_AttestationDetails_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttestationAuthority_AttestationDetails) // signature switch x := m.Signature.(type) { - case *AttestationAuthority_Attestation_PgpSignedAttestation: + case *AttestationAuthority_AttestationDetails_PgpSignedAttestation: s := proto.Size(x.PgpSignedAttestation) - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -1406,13 +1993,35 @@ type BuildDetails struct { // The serialized form is captured both to avoid ambiguity in how the // provenance is marshalled to json as well to prevent incompatibilities with // future changes. - ProvenanceBytes string `protobuf:"bytes,2,opt,name=provenance_bytes,json=provenanceBytes" json:"provenance_bytes,omitempty"` + ProvenanceBytes string `protobuf:"bytes,2,opt,name=provenance_bytes,json=provenanceBytes" json:"provenance_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BuildDetails) Reset() { *m = BuildDetails{} } -func (m *BuildDetails) String() string { return proto.CompactTextString(m) } -func (*BuildDetails) ProtoMessage() {} -func (*BuildDetails) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } +func (m *BuildDetails) Reset() { *m = BuildDetails{} } +func (m *BuildDetails) String() string { return proto.CompactTextString(m) } +func (*BuildDetails) ProtoMessage() {} +func (*BuildDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{27} +} +func (m *BuildDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildDetails.Unmarshal(m, b) +} +func (m *BuildDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildDetails.Marshal(b, m, deterministic) +} +func (dst *BuildDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildDetails.Merge(dst, src) +} +func (m *BuildDetails) XXX_Size() int { + return xxx_messageInfo_BuildDetails.Size(m) +} +func (m *BuildDetails) XXX_DiscardUnknown() { + xxx_messageInfo_BuildDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildDetails proto.InternalMessageInfo func (m *BuildDetails) GetProvenance() *BuildProvenance { if m != nil { @@ -1440,11 +2049,11 @@ type BuildProvenance struct { // Output of the build. BuiltArtifacts []*Artifact `protobuf:"bytes,6,rep,name=built_artifacts,json=builtArtifacts" json:"built_artifacts,omitempty"` // Time at which the build was created. - CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime" json:"create_time,omitempty"` // Time at which execution of the build was started. - StartTime *google_protobuf3.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + StartTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime" json:"start_time,omitempty"` // Time at which execution of the build was finished. - FinishTime *google_protobuf3.Timestamp `protobuf:"bytes,9,opt,name=finish_time,json=finishTime" json:"finish_time,omitempty"` + FinishTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=finish_time,json=finishTime" json:"finish_time,omitempty"` // E-mail address of the user who initiated this build. Note that this was the // user's e-mail address at the time the build was initiated; this address may // not represent the same end-user for all time. @@ -1459,13 +2068,35 @@ type BuildProvenance struct { // build providers can enter any desired additional details. BuildOptions map[string]string `protobuf:"bytes,16,rep,name=build_options,json=buildOptions" json:"build_options,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Version string of the builder at the time this build was executed. - BuilderVersion string `protobuf:"bytes,17,opt,name=builder_version,json=builderVersion" json:"builder_version,omitempty"` + BuilderVersion string `protobuf:"bytes,17,opt,name=builder_version,json=builderVersion" json:"builder_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BuildProvenance) Reset() { *m = BuildProvenance{} } -func (m *BuildProvenance) String() string { return proto.CompactTextString(m) } -func (*BuildProvenance) ProtoMessage() {} -func (*BuildProvenance) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } +func (m *BuildProvenance) Reset() { *m = BuildProvenance{} } +func (m *BuildProvenance) String() string { return proto.CompactTextString(m) } +func (*BuildProvenance) ProtoMessage() {} +func (*BuildProvenance) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{28} +} +func (m *BuildProvenance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildProvenance.Unmarshal(m, b) +} +func (m *BuildProvenance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildProvenance.Marshal(b, m, deterministic) +} +func (dst *BuildProvenance) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildProvenance.Merge(dst, src) +} +func (m *BuildProvenance) XXX_Size() int { + return xxx_messageInfo_BuildProvenance.Size(m) +} +func (m *BuildProvenance) XXX_DiscardUnknown() { + xxx_messageInfo_BuildProvenance.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildProvenance proto.InternalMessageInfo func (m *BuildProvenance) GetId() string { if m != nil { @@ -1495,21 +2126,21 @@ func (m *BuildProvenance) GetBuiltArtifacts() []*Artifact { return nil } -func (m *BuildProvenance) GetCreateTime() *google_protobuf3.Timestamp { +func (m *BuildProvenance) GetCreateTime() *timestamp.Timestamp { if m != nil { return m.CreateTime } return nil } -func (m *BuildProvenance) GetStartTime() *google_protobuf3.Timestamp { +func (m *BuildProvenance) GetStartTime() *timestamp.Timestamp { if m != nil { return m.StartTime } return nil } -func (m *BuildProvenance) GetFinishTime() *google_protobuf3.Timestamp { +func (m *BuildProvenance) GetFinishTime() *timestamp.Timestamp { if m != nil { return m.FinishTime } @@ -1583,13 +2214,35 @@ type BuildSignature struct { KeyId string `protobuf:"bytes,3,opt,name=key_id,json=keyId" json:"key_id,omitempty"` // The type of the key, either stored in `public_key` or referenced in // `key_id` - KeyType BuildSignature_KeyType `protobuf:"varint,4,opt,name=key_type,json=keyType,enum=grafeas.v1alpha1.api.BuildSignature_KeyType" json:"key_type,omitempty"` + KeyType BuildSignature_KeyType `protobuf:"varint,4,opt,name=key_type,json=keyType,enum=grafeas.v1alpha1.api.BuildSignature_KeyType" json:"key_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BuildSignature) Reset() { *m = BuildSignature{} } -func (m *BuildSignature) String() string { return proto.CompactTextString(m) } -func (*BuildSignature) ProtoMessage() {} -func (*BuildSignature) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } +func (m *BuildSignature) Reset() { *m = BuildSignature{} } +func (m *BuildSignature) String() string { return proto.CompactTextString(m) } +func (*BuildSignature) ProtoMessage() {} +func (*BuildSignature) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{29} +} +func (m *BuildSignature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildSignature.Unmarshal(m, b) +} +func (m *BuildSignature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildSignature.Marshal(b, m, deterministic) +} +func (dst *BuildSignature) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildSignature.Merge(dst, src) +} +func (m *BuildSignature) XXX_Size() int { + return xxx_messageInfo_BuildSignature.Size(m) +} +func (m *BuildSignature) XXX_DiscardUnknown() { + xxx_messageInfo_BuildSignature.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildSignature proto.InternalMessageInfo func (m *BuildSignature) GetPublicKey() string { if m != nil { @@ -1626,13 +2279,35 @@ type BuildType struct { BuilderVersion string `protobuf:"bytes,1,opt,name=builder_version,json=builderVersion" json:"builder_version,omitempty"` // Signature of the build in Occurrences pointing to the Note containing this // `BuilderDetails`. - Signature *BuildSignature `protobuf:"bytes,2,opt,name=signature" json:"signature,omitempty"` + Signature *BuildSignature `protobuf:"bytes,2,opt,name=signature" json:"signature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BuildType) Reset() { *m = BuildType{} } -func (m *BuildType) String() string { return proto.CompactTextString(m) } -func (*BuildType) ProtoMessage() {} -func (*BuildType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } +func (m *BuildType) Reset() { *m = BuildType{} } +func (m *BuildType) String() string { return proto.CompactTextString(m) } +func (*BuildType) ProtoMessage() {} +func (*BuildType) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{30} +} +func (m *BuildType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildType.Unmarshal(m, b) +} +func (m *BuildType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildType.Marshal(b, m, deterministic) +} +func (dst *BuildType) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildType.Merge(dst, src) +} +func (m *BuildType) XXX_Size() int { + return xxx_messageInfo_BuildType.Size(m) +} +func (m *BuildType) XXX_DiscardUnknown() { + xxx_messageInfo_BuildType.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildType proto.InternalMessageInfo func (m *BuildType) GetBuilderVersion() string { if m != nil { @@ -1664,13 +2339,35 @@ type Command struct { // this Command as a dependency. Id string `protobuf:"bytes,5,opt,name=id" json:"id,omitempty"` // The ID(s) of the Command(s) that this Command depends on. - WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor" json:"wait_for,omitempty"` + WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor" json:"wait_for,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Command) Reset() { *m = Command{} } -func (m *Command) String() string { return proto.CompactTextString(m) } -func (*Command) ProtoMessage() {} -func (*Command) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{31} } +func (m *Command) Reset() { *m = Command{} } +func (m *Command) String() string { return proto.CompactTextString(m) } +func (*Command) ProtoMessage() {} +func (*Command) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{31} +} +func (m *Command) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Command.Unmarshal(m, b) +} +func (m *Command) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Command.Marshal(b, m, deterministic) +} +func (dst *Command) XXX_Merge(src proto.Message) { + xxx_messageInfo_Command.Merge(dst, src) +} +func (m *Command) XXX_Size() int { + return xxx_messageInfo_Command.Size(m) +} +func (m *Command) XXX_DiscardUnknown() { + xxx_messageInfo_Command.DiscardUnknown(m) +} + +var xxx_messageInfo_Command proto.InternalMessageInfo func (m *Command) GetName() string { if m != nil { @@ -1717,13 +2414,35 @@ func (m *Command) GetWaitFor() []string { // An artifact that can be deployed in some runtime. type Deployable struct { // Resource URI for the artifact being deployed. - ResourceUri []string `protobuf:"bytes,1,rep,name=resource_uri,json=resourceUri" json:"resource_uri,omitempty"` + ResourceUri []string `protobuf:"bytes,1,rep,name=resource_uri,json=resourceUri" json:"resource_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Deployable) Reset() { *m = Deployable{} } -func (m *Deployable) String() string { return proto.CompactTextString(m) } -func (*Deployable) ProtoMessage() {} -func (*Deployable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32} } +func (m *Deployable) Reset() { *m = Deployable{} } +func (m *Deployable) String() string { return proto.CompactTextString(m) } +func (*Deployable) ProtoMessage() {} +func (*Deployable) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{32} +} +func (m *Deployable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployable.Unmarshal(m, b) +} +func (m *Deployable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployable.Marshal(b, m, deterministic) +} +func (dst *Deployable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployable.Merge(dst, src) +} +func (m *Deployable) XXX_Size() int { + return xxx_messageInfo_Deployable.Size(m) +} +func (m *Deployable) XXX_DiscardUnknown() { + xxx_messageInfo_Deployable.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployable proto.InternalMessageInfo func (m *Deployable) GetResourceUri() []string { if m != nil { @@ -1733,13 +2452,13 @@ func (m *Deployable) GetResourceUri() []string { } // The period during which some deployable was active in a runtime. -type Deployable_Deployment struct { +type Deployable_DeploymentDetails struct { // Identity of the user that triggered this deployment. UserEmail string `protobuf:"bytes,1,opt,name=user_email,json=userEmail" json:"user_email,omitempty"` // Beginning of the lifetime of this deployment. - DeployTime *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=deploy_time,json=deployTime" json:"deploy_time,omitempty"` + DeployTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=deploy_time,json=deployTime" json:"deploy_time,omitempty"` // End of the lifetime of this deployment. - UndeployTime *google_protobuf3.Timestamp `protobuf:"bytes,3,opt,name=undeploy_time,json=undeployTime" json:"undeploy_time,omitempty"` + UndeployTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=undeploy_time,json=undeployTime" json:"undeploy_time,omitempty"` // Configuration used to create this deployment. Config string `protobuf:"bytes,8,opt,name=config" json:"config,omitempty"` // Address of the runtime element hosting this deployment. @@ -1748,85 +2467,151 @@ type Deployable_Deployment struct { // deployable field with the same name. ResourceUri []string `protobuf:"bytes,6,rep,name=resource_uri,json=resourceUri" json:"resource_uri,omitempty"` // Platform hosting this deployment. - Platform Deployable_Deployment_Platform `protobuf:"varint,7,opt,name=platform,enum=grafeas.v1alpha1.api.Deployable_Deployment_Platform" json:"platform,omitempty"` + Platform Deployable_DeploymentDetails_Platform `protobuf:"varint,7,opt,name=platform,enum=grafeas.v1alpha1.api.Deployable_DeploymentDetails_Platform" json:"platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Deployable_Deployment) Reset() { *m = Deployable_Deployment{} } -func (m *Deployable_Deployment) String() string { return proto.CompactTextString(m) } -func (*Deployable_Deployment) ProtoMessage() {} -func (*Deployable_Deployment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32, 0} } +func (m *Deployable_DeploymentDetails) Reset() { *m = Deployable_DeploymentDetails{} } +func (m *Deployable_DeploymentDetails) String() string { return proto.CompactTextString(m) } +func (*Deployable_DeploymentDetails) ProtoMessage() {} +func (*Deployable_DeploymentDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{32, 0} +} +func (m *Deployable_DeploymentDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployable_DeploymentDetails.Unmarshal(m, b) +} +func (m *Deployable_DeploymentDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployable_DeploymentDetails.Marshal(b, m, deterministic) +} +func (dst *Deployable_DeploymentDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployable_DeploymentDetails.Merge(dst, src) +} +func (m *Deployable_DeploymentDetails) XXX_Size() int { + return xxx_messageInfo_Deployable_DeploymentDetails.Size(m) +} +func (m *Deployable_DeploymentDetails) XXX_DiscardUnknown() { + xxx_messageInfo_Deployable_DeploymentDetails.DiscardUnknown(m) +} -func (m *Deployable_Deployment) GetUserEmail() string { +var xxx_messageInfo_Deployable_DeploymentDetails proto.InternalMessageInfo + +func (m *Deployable_DeploymentDetails) GetUserEmail() string { if m != nil { return m.UserEmail } return "" } -func (m *Deployable_Deployment) GetDeployTime() *google_protobuf3.Timestamp { +func (m *Deployable_DeploymentDetails) GetDeployTime() *timestamp.Timestamp { if m != nil { return m.DeployTime } return nil } -func (m *Deployable_Deployment) GetUndeployTime() *google_protobuf3.Timestamp { +func (m *Deployable_DeploymentDetails) GetUndeployTime() *timestamp.Timestamp { if m != nil { return m.UndeployTime } return nil } -func (m *Deployable_Deployment) GetConfig() string { +func (m *Deployable_DeploymentDetails) GetConfig() string { if m != nil { return m.Config } return "" } -func (m *Deployable_Deployment) GetAddress() string { +func (m *Deployable_DeploymentDetails) GetAddress() string { if m != nil { return m.Address } return "" } -func (m *Deployable_Deployment) GetResourceUri() []string { +func (m *Deployable_DeploymentDetails) GetResourceUri() []string { if m != nil { return m.ResourceUri } return nil } -func (m *Deployable_Deployment) GetPlatform() Deployable_Deployment_Platform { +func (m *Deployable_DeploymentDetails) GetPlatform() Deployable_DeploymentDetails_Platform { if m != nil { return m.Platform } - return Deployable_Deployment_PLATFORM_UNSPECIFIED + return Deployable_DeploymentDetails_PLATFORM_UNSPECIFIED } // DockerImage holds types defining base image notes // and derived image occurrences. type DockerImage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DockerImage) Reset() { *m = DockerImage{} } -func (m *DockerImage) String() string { return proto.CompactTextString(m) } -func (*DockerImage) ProtoMessage() {} -func (*DockerImage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33} } +func (m *DockerImage) Reset() { *m = DockerImage{} } +func (m *DockerImage) String() string { return proto.CompactTextString(m) } +func (*DockerImage) ProtoMessage() {} +func (*DockerImage) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33} +} +func (m *DockerImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage.Unmarshal(m, b) +} +func (m *DockerImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage.Marshal(b, m, deterministic) +} +func (dst *DockerImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage.Merge(dst, src) +} +func (m *DockerImage) XXX_Size() int { + return xxx_messageInfo_DockerImage.Size(m) +} +func (m *DockerImage) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage proto.InternalMessageInfo // Layer holds metadata specific to a layer of a Docker image. type DockerImage_Layer struct { // The recovered Dockerfile directive used to construct this layer. Directive DockerImage_Layer_Directive `protobuf:"varint,1,opt,name=directive,enum=grafeas.v1alpha1.api.DockerImage_Layer_Directive" json:"directive,omitempty"` // The recovered arguments to the Dockerfile directive. - Arguments string `protobuf:"bytes,2,opt,name=arguments" json:"arguments,omitempty"` + Arguments string `protobuf:"bytes,2,opt,name=arguments" json:"arguments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DockerImage_Layer) Reset() { *m = DockerImage_Layer{} } -func (m *DockerImage_Layer) String() string { return proto.CompactTextString(m) } -func (*DockerImage_Layer) ProtoMessage() {} -func (*DockerImage_Layer) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 0} } +func (m *DockerImage_Layer) Reset() { *m = DockerImage_Layer{} } +func (m *DockerImage_Layer) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Layer) ProtoMessage() {} +func (*DockerImage_Layer) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33, 0} +} +func (m *DockerImage_Layer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Layer.Unmarshal(m, b) +} +func (m *DockerImage_Layer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Layer.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Layer) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Layer.Merge(dst, src) +} +func (m *DockerImage_Layer) XXX_Size() int { + return xxx_messageInfo_DockerImage_Layer.Size(m) +} +func (m *DockerImage_Layer) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Layer.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Layer proto.InternalMessageInfo func (m *DockerImage_Layer) GetDirective() DockerImage_Layer_Directive { if m != nil { @@ -1855,13 +2640,35 @@ type DockerImage_Fingerprint struct { // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) // Only the name of the final blob is kept. // This field can be used as a filter in list requests. - V2Name string `protobuf:"bytes,3,opt,name=v2_name,json=v2Name" json:"v2_name,omitempty"` + V2Name string `protobuf:"bytes,3,opt,name=v2_name,json=v2Name" json:"v2_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DockerImage_Fingerprint) Reset() { *m = DockerImage_Fingerprint{} } -func (m *DockerImage_Fingerprint) String() string { return proto.CompactTextString(m) } -func (*DockerImage_Fingerprint) ProtoMessage() {} -func (*DockerImage_Fingerprint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 1} } +func (m *DockerImage_Fingerprint) Reset() { *m = DockerImage_Fingerprint{} } +func (m *DockerImage_Fingerprint) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Fingerprint) ProtoMessage() {} +func (*DockerImage_Fingerprint) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33, 1} +} +func (m *DockerImage_Fingerprint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Fingerprint.Unmarshal(m, b) +} +func (m *DockerImage_Fingerprint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Fingerprint.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Fingerprint) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Fingerprint.Merge(dst, src) +} +func (m *DockerImage_Fingerprint) XXX_Size() int { + return xxx_messageInfo_DockerImage_Fingerprint.Size(m) +} +func (m *DockerImage_Fingerprint) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Fingerprint.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Fingerprint proto.InternalMessageInfo func (m *DockerImage_Fingerprint) GetV1Name() string { if m != nil { @@ -1894,13 +2701,35 @@ type DockerImage_Basis struct { // associated occurrence images. ResourceUrl string `protobuf:"bytes,1,opt,name=resource_url,json=resourceUrl" json:"resource_url,omitempty"` // The fingerprint of the base image - Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,2,opt,name=fingerprint" json:"fingerprint,omitempty"` + Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,2,opt,name=fingerprint" json:"fingerprint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DockerImage_Basis) Reset() { *m = DockerImage_Basis{} } -func (m *DockerImage_Basis) String() string { return proto.CompactTextString(m) } -func (*DockerImage_Basis) ProtoMessage() {} -func (*DockerImage_Basis) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 2} } +func (m *DockerImage_Basis) Reset() { *m = DockerImage_Basis{} } +func (m *DockerImage_Basis) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Basis) ProtoMessage() {} +func (*DockerImage_Basis) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33, 2} +} +func (m *DockerImage_Basis) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Basis.Unmarshal(m, b) +} +func (m *DockerImage_Basis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Basis.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Basis) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Basis.Merge(dst, src) +} +func (m *DockerImage_Basis) XXX_Size() int { + return xxx_messageInfo_DockerImage_Basis.Size(m) +} +func (m *DockerImage_Basis) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Basis.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Basis proto.InternalMessageInfo func (m *DockerImage_Basis) GetResourceUrl() string { if m != nil { @@ -1919,7 +2748,7 @@ func (m *DockerImage_Basis) GetFingerprint() *DockerImage_Fingerprint { // Derived describes the derived image portion (Occurrence) of the // DockerImage relationship. This image would be produced from a Dockerfile // with FROM . -type DockerImage_Derived struct { +type DockerImage_DerivedDetails struct { // The fingerprint of the derived image Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,1,opt,name=fingerprint" json:"fingerprint,omitempty"` // Output only. The number of layers by which this image differs from @@ -1932,36 +2761,58 @@ type DockerImage_Derived struct { LayerInfo []*DockerImage_Layer `protobuf:"bytes,3,rep,name=layer_info,json=layerInfo" json:"layer_info,omitempty"` // Output only.This contains the base image url for the derived image // Occurrence - BaseResourceUrl string `protobuf:"bytes,4,opt,name=base_resource_url,json=baseResourceUrl" json:"base_resource_url,omitempty"` + BaseResourceUrl string `protobuf:"bytes,4,opt,name=base_resource_url,json=baseResourceUrl" json:"base_resource_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *DockerImage_Derived) Reset() { *m = DockerImage_Derived{} } -func (m *DockerImage_Derived) String() string { return proto.CompactTextString(m) } -func (*DockerImage_Derived) ProtoMessage() {} -func (*DockerImage_Derived) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33, 3} } +func (m *DockerImage_DerivedDetails) Reset() { *m = DockerImage_DerivedDetails{} } +func (m *DockerImage_DerivedDetails) String() string { return proto.CompactTextString(m) } +func (*DockerImage_DerivedDetails) ProtoMessage() {} +func (*DockerImage_DerivedDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{33, 3} +} +func (m *DockerImage_DerivedDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_DerivedDetails.Unmarshal(m, b) +} +func (m *DockerImage_DerivedDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_DerivedDetails.Marshal(b, m, deterministic) +} +func (dst *DockerImage_DerivedDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_DerivedDetails.Merge(dst, src) +} +func (m *DockerImage_DerivedDetails) XXX_Size() int { + return xxx_messageInfo_DockerImage_DerivedDetails.Size(m) +} +func (m *DockerImage_DerivedDetails) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_DerivedDetails.DiscardUnknown(m) +} -func (m *DockerImage_Derived) GetFingerprint() *DockerImage_Fingerprint { +var xxx_messageInfo_DockerImage_DerivedDetails proto.InternalMessageInfo + +func (m *DockerImage_DerivedDetails) GetFingerprint() *DockerImage_Fingerprint { if m != nil { return m.Fingerprint } return nil } -func (m *DockerImage_Derived) GetDistance() uint32 { +func (m *DockerImage_DerivedDetails) GetDistance() uint32 { if m != nil { return m.Distance } return 0 } -func (m *DockerImage_Derived) GetLayerInfo() []*DockerImage_Layer { +func (m *DockerImage_DerivedDetails) GetLayerInfo() []*DockerImage_Layer { if m != nil { return m.LayerInfo } return nil } -func (m *DockerImage_Derived) GetBaseResourceUrl() string { +func (m *DockerImage_DerivedDetails) GetBaseResourceUrl() string { if m != nil { return m.BaseResourceUrl } @@ -1975,13 +2826,35 @@ func (m *DockerImage_Derived) GetBaseResourceUrl() string { // note for a resource indicates that analysis hasn't started. type Discovery struct { // The kind of analysis that is handled by this discovery. - AnalysisKind Note_Kind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,enum=grafeas.v1alpha1.api.Note_Kind" json:"analysis_kind,omitempty"` + AnalysisKind Note_Kind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,enum=grafeas.v1alpha1.api.Note_Kind" json:"analysis_kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Discovery) Reset() { *m = Discovery{} } -func (m *Discovery) String() string { return proto.CompactTextString(m) } -func (*Discovery) ProtoMessage() {} -func (*Discovery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34} } +func (m *Discovery) Reset() { *m = Discovery{} } +func (m *Discovery) String() string { return proto.CompactTextString(m) } +func (*Discovery) ProtoMessage() {} +func (*Discovery) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{34} +} +func (m *Discovery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovery.Unmarshal(m, b) +} +func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovery.Marshal(b, m, deterministic) +} +func (dst *Discovery) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovery.Merge(dst, src) +} +func (m *Discovery) XXX_Size() int { + return xxx_messageInfo_Discovery.Size(m) +} +func (m *Discovery) XXX_DiscardUnknown() { + xxx_messageInfo_Discovery.DiscardUnknown(m) +} + +var xxx_messageInfo_Discovery proto.InternalMessageInfo func (m *Discovery) GetAnalysisKind() Note_Kind { if m != nil { @@ -1991,17 +2864,39 @@ func (m *Discovery) GetAnalysisKind() Note_Kind { } // Provides information about the scan status of a discovered resource. -type Discovery_Discovered struct { +type Discovery_DiscoveredDetails struct { // Output only. An operation that indicates the status of the current scan. - Operation *google_longrunning.Operation `protobuf:"bytes,1,opt,name=operation" json:"operation,omitempty"` + Operation *longrunning.Operation `protobuf:"bytes,1,opt,name=operation" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Discovery_Discovered) Reset() { *m = Discovery_Discovered{} } -func (m *Discovery_Discovered) String() string { return proto.CompactTextString(m) } -func (*Discovery_Discovered) ProtoMessage() {} -func (*Discovery_Discovered) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34, 0} } +func (m *Discovery_DiscoveredDetails) Reset() { *m = Discovery_DiscoveredDetails{} } +func (m *Discovery_DiscoveredDetails) String() string { return proto.CompactTextString(m) } +func (*Discovery_DiscoveredDetails) ProtoMessage() {} +func (*Discovery_DiscoveredDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{34, 0} +} +func (m *Discovery_DiscoveredDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovery_DiscoveredDetails.Unmarshal(m, b) +} +func (m *Discovery_DiscoveredDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovery_DiscoveredDetails.Marshal(b, m, deterministic) +} +func (dst *Discovery_DiscoveredDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovery_DiscoveredDetails.Merge(dst, src) +} +func (m *Discovery_DiscoveredDetails) XXX_Size() int { + return xxx_messageInfo_Discovery_DiscoveredDetails.Size(m) +} +func (m *Discovery_DiscoveredDetails) XXX_DiscardUnknown() { + xxx_messageInfo_Discovery_DiscoveredDetails.DiscardUnknown(m) +} -func (m *Discovery_Discovered) GetOperation() *google_longrunning.Operation { +var xxx_messageInfo_Discovery_DiscoveredDetails proto.InternalMessageInfo + +func (m *Discovery_DiscoveredDetails) GetOperation() *longrunning.Operation { if m != nil { return m.Operation } @@ -2012,13 +2907,35 @@ func (m *Discovery_Discovered) GetOperation() *google_longrunning.Operation { // messages to verify integrity of source input to the build. type FileHashes struct { // Collection of file hashes. - FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash" json:"file_hash,omitempty"` + FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash" json:"file_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FileHashes) Reset() { *m = FileHashes{} } -func (m *FileHashes) String() string { return proto.CompactTextString(m) } -func (*FileHashes) ProtoMessage() {} -func (*FileHashes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{35} } +func (m *FileHashes) Reset() { *m = FileHashes{} } +func (m *FileHashes) String() string { return proto.CompactTextString(m) } +func (*FileHashes) ProtoMessage() {} +func (*FileHashes) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{35} +} +func (m *FileHashes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileHashes.Unmarshal(m, b) +} +func (m *FileHashes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileHashes.Marshal(b, m, deterministic) +} +func (dst *FileHashes) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileHashes.Merge(dst, src) +} +func (m *FileHashes) XXX_Size() int { + return xxx_messageInfo_FileHashes.Size(m) +} +func (m *FileHashes) XXX_DiscardUnknown() { + xxx_messageInfo_FileHashes.DiscardUnknown(m) +} + +var xxx_messageInfo_FileHashes proto.InternalMessageInfo func (m *FileHashes) GetFileHash() []*Hash { if m != nil { @@ -2032,13 +2949,35 @@ type Hash struct { // The type of hash that was performed. Type Hash_HashType `protobuf:"varint,1,opt,name=type,enum=grafeas.v1alpha1.api.Hash_HashType" json:"type,omitempty"` // The hash value. - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Hash) Reset() { *m = Hash{} } -func (m *Hash) String() string { return proto.CompactTextString(m) } -func (*Hash) ProtoMessage() {} -func (*Hash) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{36} } +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{36} +} +func (m *Hash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Hash.Unmarshal(m, b) +} +func (m *Hash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Hash.Marshal(b, m, deterministic) +} +func (dst *Hash) XXX_Merge(src proto.Message) { + xxx_messageInfo_Hash.Merge(dst, src) +} +func (m *Hash) XXX_Size() int { + return xxx_messageInfo_Hash.Size(m) +} +func (m *Hash) XXX_DiscardUnknown() { + xxx_messageInfo_Hash.DiscardUnknown(m) +} + +var xxx_messageInfo_Hash proto.InternalMessageInfo func (m *Hash) GetType() Hash_HashType { if m != nil { @@ -2079,19 +3018,44 @@ type Note struct { // URLs associated with this note RelatedUrl []*Note_RelatedUrl `protobuf:"bytes,7,rep,name=related_url,json=relatedUrl" json:"related_url,omitempty"` // Time of expiration for this note, null if note does not expire. - ExpirationTime *google_protobuf3.Timestamp `protobuf:"bytes,10,opt,name=expiration_time,json=expirationTime" json:"expiration_time,omitempty"` + ExpirationTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=expiration_time,json=expirationTime" json:"expiration_time,omitempty"` // Output only. The time this note was created. This field can be used as a // filter in list requests. - CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,11,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + CreateTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=create_time,json=createTime" json:"create_time,omitempty"` // Output only. The time this note was last updated. This field can be used as // a filter in list requests. - UpdateTime *google_protobuf3.Timestamp `protobuf:"bytes,12,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + UpdateTime *timestamp.Timestamp `protobuf:"bytes,12,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + // The name of the `Operation` in the form + // "projects/{project_id}/operation/{OPERATION_ID}" + OperationName string `protobuf:"bytes,19,opt,name=operation_name,json=operationName" json:"operation_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Note) Reset() { *m = Note{} } -func (m *Note) String() string { return proto.CompactTextString(m) } -func (*Note) ProtoMessage() {} -func (*Note) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37} } +func (m *Note) Reset() { *m = Note{} } +func (m *Note) String() string { return proto.CompactTextString(m) } +func (*Note) ProtoMessage() {} +func (*Note) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{37} +} +func (m *Note) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Note.Unmarshal(m, b) +} +func (m *Note) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Note.Marshal(b, m, deterministic) +} +func (dst *Note) XXX_Merge(src proto.Message) { + xxx_messageInfo_Note.Merge(dst, src) +} +func (m *Note) XXX_Size() int { + return xxx_messageInfo_Note.Size(m) +} +func (m *Note) XXX_DiscardUnknown() { + xxx_messageInfo_Note.DiscardUnknown(m) +} + +var xxx_messageInfo_Note proto.InternalMessageInfo type isNote_NoteType interface { isNote_NoteType() @@ -2207,27 +3171,34 @@ func (m *Note) GetRelatedUrl() []*Note_RelatedUrl { return nil } -func (m *Note) GetExpirationTime() *google_protobuf3.Timestamp { +func (m *Note) GetExpirationTime() *timestamp.Timestamp { if m != nil { return m.ExpirationTime } return nil } -func (m *Note) GetCreateTime() *google_protobuf3.Timestamp { +func (m *Note) GetCreateTime() *timestamp.Timestamp { if m != nil { return m.CreateTime } return nil } -func (m *Note) GetUpdateTime() *google_protobuf3.Timestamp { +func (m *Note) GetUpdateTime() *timestamp.Timestamp { if m != nil { return m.UpdateTime } return nil } +func (m *Note) GetOperationName() string { + if m != nil { + return m.OperationName + } + return "" +} + // XXX_OneofFuncs is for the internal use of the proto package. func (*Note) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _Note_OneofMarshaler, _Note_OneofUnmarshaler, _Note_OneofSizer, []interface{}{ @@ -2343,32 +3314,32 @@ func _Note_OneofSizer(msg proto.Message) (n int) { switch x := m.NoteType.(type) { case *Note_VulnerabilityType: s := proto.Size(x.VulnerabilityType) - n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Note_BuildType: s := proto.Size(x.BuildType) - n += proto.SizeVarint(8<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Note_BaseImage: s := proto.Size(x.BaseImage) - n += proto.SizeVarint(13<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Note_Package: s := proto.Size(x.Package) - n += proto.SizeVarint(14<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Note_Deployable: s := proto.Size(x.Deployable) - n += proto.SizeVarint(17<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Note_Discovery: s := proto.Size(x.Discovery) - n += proto.SizeVarint(18<<3 | proto.WireBytes) + n += 2 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -2383,13 +3354,35 @@ type Note_RelatedUrl struct { // Specific URL to associate with the note Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` // Label to describe usage of the URL - Label string `protobuf:"bytes,2,opt,name=label" json:"label,omitempty"` + Label string `protobuf:"bytes,2,opt,name=label" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Note_RelatedUrl) Reset() { *m = Note_RelatedUrl{} } -func (m *Note_RelatedUrl) String() string { return proto.CompactTextString(m) } -func (*Note_RelatedUrl) ProtoMessage() {} -func (*Note_RelatedUrl) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37, 0} } +func (m *Note_RelatedUrl) Reset() { *m = Note_RelatedUrl{} } +func (m *Note_RelatedUrl) String() string { return proto.CompactTextString(m) } +func (*Note_RelatedUrl) ProtoMessage() {} +func (*Note_RelatedUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{37, 0} +} +func (m *Note_RelatedUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Note_RelatedUrl.Unmarshal(m, b) +} +func (m *Note_RelatedUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Note_RelatedUrl.Marshal(b, m, deterministic) +} +func (dst *Note_RelatedUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Note_RelatedUrl.Merge(dst, src) +} +func (m *Note_RelatedUrl) XXX_Size() int { + return xxx_messageInfo_Note_RelatedUrl.Size(m) +} +func (m *Note_RelatedUrl) XXX_DiscardUnknown() { + xxx_messageInfo_Note_RelatedUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_Note_RelatedUrl proto.InternalMessageInfo func (m *Note_RelatedUrl) GetUrl() string { if m != nil { @@ -2426,23 +3419,49 @@ type Occurrence struct { // Types that are valid to be assigned to Details: // *Occurrence_VulnerabilityDetails // *Occurrence_BuildDetails - // *Occurrence_DerivedImage - // *Occurrence_Installation - // *Occurrence_Deployment - // *Occurrence_Discovered + // *Occurrence_DerivedImageDetails + // *Occurrence_InstallationDetails + // *Occurrence_DeploymentDetails + // *Occurrence_DiscoveredDetails + // *Occurrence_AttestationDetails Details isOccurrence_Details `protobuf_oneof:"details"` // A description of actions that can be taken to remedy the `Note` Remediation string `protobuf:"bytes,5,opt,name=remediation" json:"remediation,omitempty"` // Output only. The time this `Occurrence` was created. - CreateTime *google_protobuf3.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime" json:"create_time,omitempty"` + CreateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime" json:"create_time,omitempty"` // Output only. The time this `Occurrence` was last updated. - UpdateTime *google_protobuf3.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + UpdateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + // The name of the `Operation` in the form + // "projects/{project_id}/operation/{OPERATION_ID}" + OperationName string `protobuf:"bytes,19,opt,name=operation_name,json=operationName" json:"operation_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Occurrence) Reset() { *m = Occurrence{} } -func (m *Occurrence) String() string { return proto.CompactTextString(m) } -func (*Occurrence) ProtoMessage() {} -func (*Occurrence) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{38} } +func (m *Occurrence) Reset() { *m = Occurrence{} } +func (m *Occurrence) String() string { return proto.CompactTextString(m) } +func (*Occurrence) ProtoMessage() {} +func (*Occurrence) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{38} +} +func (m *Occurrence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Occurrence.Unmarshal(m, b) +} +func (m *Occurrence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Occurrence.Marshal(b, m, deterministic) +} +func (dst *Occurrence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Occurrence.Merge(dst, src) +} +func (m *Occurrence) XXX_Size() int { + return xxx_messageInfo_Occurrence.Size(m) +} +func (m *Occurrence) XXX_DiscardUnknown() { + xxx_messageInfo_Occurrence.DiscardUnknown(m) +} + +var xxx_messageInfo_Occurrence proto.InternalMessageInfo type isOccurrence_Details interface { isOccurrence_Details() @@ -2454,25 +3473,29 @@ type Occurrence_VulnerabilityDetails struct { type Occurrence_BuildDetails struct { BuildDetails *BuildDetails `protobuf:"bytes,7,opt,name=build_details,json=buildDetails,oneof"` } -type Occurrence_DerivedImage struct { - DerivedImage *DockerImage_Derived `protobuf:"bytes,11,opt,name=derived_image,json=derivedImage,oneof"` +type Occurrence_DerivedImageDetails struct { + DerivedImageDetails *DockerImage_DerivedDetails `protobuf:"bytes,11,opt,name=derived_image_details,json=derivedImageDetails,oneof"` } -type Occurrence_Installation struct { - Installation *PackageManager_Installation `protobuf:"bytes,12,opt,name=installation,oneof"` +type Occurrence_InstallationDetails struct { + InstallationDetails *PackageManager_InstallationDetails `protobuf:"bytes,12,opt,name=installation_details,json=installationDetails,oneof"` } -type Occurrence_Deployment struct { - Deployment *Deployable_Deployment `protobuf:"bytes,14,opt,name=deployment,oneof"` +type Occurrence_DeploymentDetails struct { + DeploymentDetails *Deployable_DeploymentDetails `protobuf:"bytes,14,opt,name=deployment_details,json=deploymentDetails,oneof"` } -type Occurrence_Discovered struct { - Discovered *Discovery_Discovered `protobuf:"bytes,15,opt,name=discovered,oneof"` +type Occurrence_DiscoveredDetails struct { + DiscoveredDetails *Discovery_DiscoveredDetails `protobuf:"bytes,15,opt,name=discovered_details,json=discoveredDetails,oneof"` +} +type Occurrence_AttestationDetails struct { + AttestationDetails *AttestationAuthority_AttestationDetails `protobuf:"bytes,16,opt,name=attestation_details,json=attestationDetails,oneof"` } func (*Occurrence_VulnerabilityDetails) isOccurrence_Details() {} func (*Occurrence_BuildDetails) isOccurrence_Details() {} -func (*Occurrence_DerivedImage) isOccurrence_Details() {} -func (*Occurrence_Installation) isOccurrence_Details() {} -func (*Occurrence_Deployment) isOccurrence_Details() {} -func (*Occurrence_Discovered) isOccurrence_Details() {} +func (*Occurrence_DerivedImageDetails) isOccurrence_Details() {} +func (*Occurrence_InstallationDetails) isOccurrence_Details() {} +func (*Occurrence_DeploymentDetails) isOccurrence_Details() {} +func (*Occurrence_DiscoveredDetails) isOccurrence_Details() {} +func (*Occurrence_AttestationDetails) isOccurrence_Details() {} func (m *Occurrence) GetDetails() isOccurrence_Details { if m != nil { @@ -2523,30 +3546,37 @@ func (m *Occurrence) GetBuildDetails() *BuildDetails { return nil } -func (m *Occurrence) GetDerivedImage() *DockerImage_Derived { - if x, ok := m.GetDetails().(*Occurrence_DerivedImage); ok { - return x.DerivedImage +func (m *Occurrence) GetDerivedImageDetails() *DockerImage_DerivedDetails { + if x, ok := m.GetDetails().(*Occurrence_DerivedImageDetails); ok { + return x.DerivedImageDetails } return nil } -func (m *Occurrence) GetInstallation() *PackageManager_Installation { - if x, ok := m.GetDetails().(*Occurrence_Installation); ok { - return x.Installation +func (m *Occurrence) GetInstallationDetails() *PackageManager_InstallationDetails { + if x, ok := m.GetDetails().(*Occurrence_InstallationDetails); ok { + return x.InstallationDetails } return nil } -func (m *Occurrence) GetDeployment() *Deployable_Deployment { - if x, ok := m.GetDetails().(*Occurrence_Deployment); ok { - return x.Deployment +func (m *Occurrence) GetDeploymentDetails() *Deployable_DeploymentDetails { + if x, ok := m.GetDetails().(*Occurrence_DeploymentDetails); ok { + return x.DeploymentDetails } return nil } -func (m *Occurrence) GetDiscovered() *Discovery_Discovered { - if x, ok := m.GetDetails().(*Occurrence_Discovered); ok { - return x.Discovered +func (m *Occurrence) GetDiscoveredDetails() *Discovery_DiscoveredDetails { + if x, ok := m.GetDetails().(*Occurrence_DiscoveredDetails); ok { + return x.DiscoveredDetails + } + return nil +} + +func (m *Occurrence) GetAttestationDetails() *AttestationAuthority_AttestationDetails { + if x, ok := m.GetDetails().(*Occurrence_AttestationDetails); ok { + return x.AttestationDetails } return nil } @@ -2558,29 +3588,37 @@ func (m *Occurrence) GetRemediation() string { return "" } -func (m *Occurrence) GetCreateTime() *google_protobuf3.Timestamp { +func (m *Occurrence) GetCreateTime() *timestamp.Timestamp { if m != nil { return m.CreateTime } return nil } -func (m *Occurrence) GetUpdateTime() *google_protobuf3.Timestamp { +func (m *Occurrence) GetUpdateTime() *timestamp.Timestamp { if m != nil { return m.UpdateTime } return nil } +func (m *Occurrence) GetOperationName() string { + if m != nil { + return m.OperationName + } + return "" +} + // XXX_OneofFuncs is for the internal use of the proto package. func (*Occurrence) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _Occurrence_OneofMarshaler, _Occurrence_OneofUnmarshaler, _Occurrence_OneofSizer, []interface{}{ (*Occurrence_VulnerabilityDetails)(nil), (*Occurrence_BuildDetails)(nil), - (*Occurrence_DerivedImage)(nil), - (*Occurrence_Installation)(nil), - (*Occurrence_Deployment)(nil), - (*Occurrence_Discovered)(nil), + (*Occurrence_DerivedImageDetails)(nil), + (*Occurrence_InstallationDetails)(nil), + (*Occurrence_DeploymentDetails)(nil), + (*Occurrence_DiscoveredDetails)(nil), + (*Occurrence_AttestationDetails)(nil), } } @@ -2598,24 +3636,29 @@ func _Occurrence_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { if err := b.EncodeMessage(x.BuildDetails); err != nil { return err } - case *Occurrence_DerivedImage: + case *Occurrence_DerivedImageDetails: b.EncodeVarint(11<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.DerivedImage); err != nil { + if err := b.EncodeMessage(x.DerivedImageDetails); err != nil { return err } - case *Occurrence_Installation: + case *Occurrence_InstallationDetails: b.EncodeVarint(12<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Installation); err != nil { + if err := b.EncodeMessage(x.InstallationDetails); err != nil { return err } - case *Occurrence_Deployment: + case *Occurrence_DeploymentDetails: b.EncodeVarint(14<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Deployment); err != nil { + if err := b.EncodeMessage(x.DeploymentDetails); err != nil { return err } - case *Occurrence_Discovered: + case *Occurrence_DiscoveredDetails: b.EncodeVarint(15<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Discovered); err != nil { + if err := b.EncodeMessage(x.DiscoveredDetails); err != nil { + return err + } + case *Occurrence_AttestationDetails: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AttestationDetails); err != nil { return err } case nil: @@ -2644,37 +3687,45 @@ func _Occurrence_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buf err := b.DecodeMessage(msg) m.Details = &Occurrence_BuildDetails{msg} return true, err - case 11: // details.derived_image + case 11: // details.derived_image_details if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } - msg := new(DockerImage_Derived) + msg := new(DockerImage_DerivedDetails) err := b.DecodeMessage(msg) - m.Details = &Occurrence_DerivedImage{msg} + m.Details = &Occurrence_DerivedImageDetails{msg} return true, err - case 12: // details.installation + case 12: // details.installation_details if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } - msg := new(PackageManager_Installation) + msg := new(PackageManager_InstallationDetails) err := b.DecodeMessage(msg) - m.Details = &Occurrence_Installation{msg} + m.Details = &Occurrence_InstallationDetails{msg} return true, err - case 14: // details.deployment + case 14: // details.deployment_details if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } - msg := new(Deployable_Deployment) + msg := new(Deployable_DeploymentDetails) err := b.DecodeMessage(msg) - m.Details = &Occurrence_Deployment{msg} + m.Details = &Occurrence_DeploymentDetails{msg} return true, err - case 15: // details.discovered + case 15: // details.discovered_details if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } - msg := new(Discovery_Discovered) + msg := new(Discovery_DiscoveredDetails) err := b.DecodeMessage(msg) - m.Details = &Occurrence_Discovered{msg} + m.Details = &Occurrence_DiscoveredDetails{msg} + return true, err + case 16: // details.attestation_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AttestationAuthority_AttestationDetails) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_AttestationDetails{msg} return true, err default: return false, nil @@ -2687,32 +3738,37 @@ func _Occurrence_OneofSizer(msg proto.Message) (n int) { switch x := m.Details.(type) { case *Occurrence_VulnerabilityDetails: s := proto.Size(x.VulnerabilityDetails) - n += proto.SizeVarint(8<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Occurrence_BuildDetails: s := proto.Size(x.BuildDetails) - n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s - case *Occurrence_DerivedImage: - s := proto.Size(x.DerivedImage) - n += proto.SizeVarint(11<<3 | proto.WireBytes) + case *Occurrence_DerivedImageDetails: + s := proto.Size(x.DerivedImageDetails) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s - case *Occurrence_Installation: - s := proto.Size(x.Installation) - n += proto.SizeVarint(12<<3 | proto.WireBytes) + case *Occurrence_InstallationDetails: + s := proto.Size(x.InstallationDetails) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s - case *Occurrence_Deployment: - s := proto.Size(x.Deployment) - n += proto.SizeVarint(14<<3 | proto.WireBytes) + case *Occurrence_DeploymentDetails: + s := proto.Size(x.DeploymentDetails) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s - case *Occurrence_Discovered: - s := proto.Size(x.Discovered) - n += proto.SizeVarint(15<<3 | proto.WireBytes) + case *Occurrence_DiscoveredDetails: + s := proto.Size(x.DiscoveredDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_AttestationDetails: + s := proto.Size(x.AttestationDetails) + n += 2 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -2724,12 +3780,34 @@ func _Occurrence_OneofSizer(msg proto.Message) (n int) { // PackageManager provides metadata about available / installed packages. type PackageManager struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PackageManager) Reset() { *m = PackageManager{} } -func (m *PackageManager) String() string { return proto.CompactTextString(m) } -func (*PackageManager) ProtoMessage() {} -func (*PackageManager) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39} } +func (m *PackageManager) Reset() { *m = PackageManager{} } +func (m *PackageManager) String() string { return proto.CompactTextString(m) } +func (*PackageManager) ProtoMessage() {} +func (*PackageManager) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39} +} +func (m *PackageManager) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager.Unmarshal(m, b) +} +func (m *PackageManager) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager.Marshal(b, m, deterministic) +} +func (dst *PackageManager) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager.Merge(dst, src) +} +func (m *PackageManager) XXX_Size() int { + return xxx_messageInfo_PackageManager.Size(m) +} +func (m *PackageManager) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager proto.InternalMessageInfo // This represents a particular channel of distribution for a given package. // e.g. Debian's jessie-backports dpkg mirror @@ -2748,13 +3826,35 @@ type PackageManager_Distribution struct { // The distribution channel-specific homepage for this package. Url string `protobuf:"bytes,6,opt,name=url" json:"url,omitempty"` // The distribution channel-specific description of this package. - Description string `protobuf:"bytes,7,opt,name=description" json:"description,omitempty"` + Description string `protobuf:"bytes,7,opt,name=description" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PackageManager_Distribution) Reset() { *m = PackageManager_Distribution{} } -func (m *PackageManager_Distribution) String() string { return proto.CompactTextString(m) } -func (*PackageManager_Distribution) ProtoMessage() {} -func (*PackageManager_Distribution) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 0} } +func (m *PackageManager_Distribution) Reset() { *m = PackageManager_Distribution{} } +func (m *PackageManager_Distribution) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Distribution) ProtoMessage() {} +func (*PackageManager_Distribution) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39, 0} +} +func (m *PackageManager_Distribution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Distribution.Unmarshal(m, b) +} +func (m *PackageManager_Distribution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Distribution.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Distribution) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Distribution.Merge(dst, src) +} +func (m *PackageManager_Distribution) XXX_Size() int { + return xxx_messageInfo_PackageManager_Distribution.Size(m) +} +func (m *PackageManager_Distribution) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Distribution.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Distribution proto.InternalMessageInfo func (m *PackageManager_Distribution) GetCpeUri() string { if m != nil { @@ -2808,13 +3908,35 @@ type PackageManager_Location struct { // The version installed at this location. Version *VulnerabilityType_Version `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"` // The path from which we gathered that this package/version is installed. - Path string `protobuf:"bytes,3,opt,name=path" json:"path,omitempty"` + Path string `protobuf:"bytes,3,opt,name=path" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PackageManager_Location) Reset() { *m = PackageManager_Location{} } -func (m *PackageManager_Location) String() string { return proto.CompactTextString(m) } -func (*PackageManager_Location) ProtoMessage() {} -func (*PackageManager_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 1} } +func (m *PackageManager_Location) Reset() { *m = PackageManager_Location{} } +func (m *PackageManager_Location) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Location) ProtoMessage() {} +func (*PackageManager_Location) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39, 1} +} +func (m *PackageManager_Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Location.Unmarshal(m, b) +} +func (m *PackageManager_Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Location.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Location.Merge(dst, src) +} +func (m *PackageManager_Location) XXX_Size() int { + return xxx_messageInfo_PackageManager_Location.Size(m) +} +func (m *PackageManager_Location) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Location proto.InternalMessageInfo func (m *PackageManager_Location) GetCpeUri() string { if m != nil { @@ -2844,13 +3966,35 @@ type PackageManager_Package struct { // The name of the package. Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // The various channels by which a package is distributed. - Distribution []*PackageManager_Distribution `protobuf:"bytes,10,rep,name=distribution" json:"distribution,omitempty"` + Distribution []*PackageManager_Distribution `protobuf:"bytes,10,rep,name=distribution" json:"distribution,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PackageManager_Package) Reset() { *m = PackageManager_Package{} } -func (m *PackageManager_Package) String() string { return proto.CompactTextString(m) } -func (*PackageManager_Package) ProtoMessage() {} -func (*PackageManager_Package) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 2} } +func (m *PackageManager_Package) Reset() { *m = PackageManager_Package{} } +func (m *PackageManager_Package) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Package) ProtoMessage() {} +func (*PackageManager_Package) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39, 2} +} +func (m *PackageManager_Package) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Package.Unmarshal(m, b) +} +func (m *PackageManager_Package) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Package.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Package) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Package.Merge(dst, src) +} +func (m *PackageManager_Package) XXX_Size() int { + return xxx_messageInfo_PackageManager_Package.Size(m) +} +func (m *PackageManager_Package) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Package.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Package proto.InternalMessageInfo func (m *PackageManager_Package) GetName() string { if m != nil { @@ -2868,27 +4012,49 @@ func (m *PackageManager_Package) GetDistribution() []*PackageManager_Distributio // This represents how a particular software package may be installed on // a system. -type PackageManager_Installation struct { +type PackageManager_InstallationDetails struct { // Output only. The name of the installed package. Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // All of the places within the filesystem versions of this package // have been found. - Location []*PackageManager_Location `protobuf:"bytes,2,rep,name=location" json:"location,omitempty"` + Location []*PackageManager_Location `protobuf:"bytes,2,rep,name=location" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PackageManager_Installation) Reset() { *m = PackageManager_Installation{} } -func (m *PackageManager_Installation) String() string { return proto.CompactTextString(m) } -func (*PackageManager_Installation) ProtoMessage() {} -func (*PackageManager_Installation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39, 3} } +func (m *PackageManager_InstallationDetails) Reset() { *m = PackageManager_InstallationDetails{} } +func (m *PackageManager_InstallationDetails) String() string { return proto.CompactTextString(m) } +func (*PackageManager_InstallationDetails) ProtoMessage() {} +func (*PackageManager_InstallationDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{39, 3} +} +func (m *PackageManager_InstallationDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_InstallationDetails.Unmarshal(m, b) +} +func (m *PackageManager_InstallationDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_InstallationDetails.Marshal(b, m, deterministic) +} +func (dst *PackageManager_InstallationDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_InstallationDetails.Merge(dst, src) +} +func (m *PackageManager_InstallationDetails) XXX_Size() int { + return xxx_messageInfo_PackageManager_InstallationDetails.Size(m) +} +func (m *PackageManager_InstallationDetails) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_InstallationDetails.DiscardUnknown(m) +} -func (m *PackageManager_Installation) GetName() string { +var xxx_messageInfo_PackageManager_InstallationDetails proto.InternalMessageInfo + +func (m *PackageManager_InstallationDetails) GetName() string { if m != nil { return m.Name } return "" } -func (m *PackageManager_Installation) GetLocation() []*PackageManager_Location { +func (m *PackageManager_InstallationDetails) GetLocation() []*PackageManager_Location { if m != nil { return m.Location } @@ -2925,13 +4091,35 @@ type PgpSignedAttestation struct { // // Types that are valid to be assigned to KeyId: // *PgpSignedAttestation_PgpKeyId - KeyId isPgpSignedAttestation_KeyId `protobuf_oneof:"key_id"` + KeyId isPgpSignedAttestation_KeyId `protobuf_oneof:"key_id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *PgpSignedAttestation) Reset() { *m = PgpSignedAttestation{} } -func (m *PgpSignedAttestation) String() string { return proto.CompactTextString(m) } -func (*PgpSignedAttestation) ProtoMessage() {} -func (*PgpSignedAttestation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{40} } +func (m *PgpSignedAttestation) Reset() { *m = PgpSignedAttestation{} } +func (m *PgpSignedAttestation) String() string { return proto.CompactTextString(m) } +func (*PgpSignedAttestation) ProtoMessage() {} +func (*PgpSignedAttestation) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{40} +} +func (m *PgpSignedAttestation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PgpSignedAttestation.Unmarshal(m, b) +} +func (m *PgpSignedAttestation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PgpSignedAttestation.Marshal(b, m, deterministic) +} +func (dst *PgpSignedAttestation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PgpSignedAttestation.Merge(dst, src) +} +func (m *PgpSignedAttestation) XXX_Size() int { + return xxx_messageInfo_PgpSignedAttestation.Size(m) +} +func (m *PgpSignedAttestation) XXX_DiscardUnknown() { + xxx_messageInfo_PgpSignedAttestation.DiscardUnknown(m) +} + +var xxx_messageInfo_PgpSignedAttestation proto.InternalMessageInfo type isPgpSignedAttestation_KeyId interface { isPgpSignedAttestation_KeyId() @@ -3012,7 +4200,7 @@ func _PgpSignedAttestation_OneofSizer(msg proto.Message) (n int) { // key_id switch x := m.KeyId.(type) { case *PgpSignedAttestation_PgpKeyId: - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.PgpKeyId))) n += len(x.PgpKeyId) case nil: @@ -3048,13 +4236,35 @@ type Source struct { // these locations, in the case where the source repository had multiple // remotes or submodules. This list will not include the context specified in // the context field. - AdditionalContexts []*SourceContext `protobuf:"bytes,8,rep,name=additional_contexts,json=additionalContexts" json:"additional_contexts,omitempty"` + AdditionalContexts []*SourceContext `protobuf:"bytes,8,rep,name=additional_contexts,json=additionalContexts" json:"additional_contexts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Source) Reset() { *m = Source{} } -func (m *Source) String() string { return proto.CompactTextString(m) } -func (*Source) ProtoMessage() {} -func (*Source) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{41} } +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{41} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo type isSource_Source interface { isSource_Source() @@ -3178,12 +4388,12 @@ func _Source_OneofSizer(msg proto.Message) (n int) { switch x := m.Source.(type) { case *Source_StorageSource: s := proto.Size(x.StorageSource) - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *Source_RepoSource: s := proto.Size(x.RepoSource) - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -3207,13 +4417,35 @@ type RepoSource struct { // *RepoSource_BranchName // *RepoSource_TagName // *RepoSource_CommitSha - Revision isRepoSource_Revision `protobuf_oneof:"revision"` + Revision isRepoSource_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *RepoSource) Reset() { *m = RepoSource{} } -func (m *RepoSource) String() string { return proto.CompactTextString(m) } -func (*RepoSource) ProtoMessage() {} -func (*RepoSource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{42} } +func (m *RepoSource) Reset() { *m = RepoSource{} } +func (m *RepoSource) String() string { return proto.CompactTextString(m) } +func (*RepoSource) ProtoMessage() {} +func (*RepoSource) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{42} +} +func (m *RepoSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoSource.Unmarshal(m, b) +} +func (m *RepoSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoSource.Marshal(b, m, deterministic) +} +func (dst *RepoSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoSource.Merge(dst, src) +} +func (m *RepoSource) XXX_Size() int { + return xxx_messageInfo_RepoSource.Size(m) +} +func (m *RepoSource) XXX_DiscardUnknown() { + xxx_messageInfo_RepoSource.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoSource proto.InternalMessageInfo type isRepoSource_Revision interface { isRepoSource_Revision() @@ -3338,15 +4570,15 @@ func _RepoSource_OneofSizer(msg proto.Message) (n int) { // revision switch x := m.Revision.(type) { case *RepoSource_BranchName: - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.BranchName))) n += len(x.BranchName) case *RepoSource_TagName: - n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.TagName))) n += len(x.TagName) case *RepoSource_CommitSha: - n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.CommitSha))) n += len(x.CommitSha) case nil: @@ -3366,13 +4598,35 @@ type StorageSource struct { // Google Cloud Storage object containing source. Object string `protobuf:"bytes,2,opt,name=object" json:"object,omitempty"` // Google Cloud Storage generation for the object. - Generation int64 `protobuf:"varint,3,opt,name=generation" json:"generation,omitempty"` + Generation int64 `protobuf:"varint,3,opt,name=generation" json:"generation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *StorageSource) Reset() { *m = StorageSource{} } -func (m *StorageSource) String() string { return proto.CompactTextString(m) } -func (*StorageSource) ProtoMessage() {} -func (*StorageSource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{43} } +func (m *StorageSource) Reset() { *m = StorageSource{} } +func (m *StorageSource) String() string { return proto.CompactTextString(m) } +func (*StorageSource) ProtoMessage() {} +func (*StorageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{43} +} +func (m *StorageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StorageSource.Unmarshal(m, b) +} +func (m *StorageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StorageSource.Marshal(b, m, deterministic) +} +func (dst *StorageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageSource.Merge(dst, src) +} +func (m *StorageSource) XXX_Size() int { + return xxx_messageInfo_StorageSource.Size(m) +} +func (m *StorageSource) XXX_DiscardUnknown() { + xxx_messageInfo_StorageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_StorageSource proto.InternalMessageInfo func (m *StorageSource) GetBucket() string { if m != nil { @@ -3404,13 +4658,35 @@ type VulnerabilityType struct { // All information about the package to specifically identify this // vulnerability. One entry per (version range and cpe_uri) the // package vulnerability has manifested in. - Details []*VulnerabilityType_Detail `protobuf:"bytes,4,rep,name=details" json:"details,omitempty"` + Details []*VulnerabilityType_Detail `protobuf:"bytes,4,rep,name=details" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *VulnerabilityType) Reset() { *m = VulnerabilityType{} } -func (m *VulnerabilityType) String() string { return proto.CompactTextString(m) } -func (*VulnerabilityType) ProtoMessage() {} -func (*VulnerabilityType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44} } +func (m *VulnerabilityType) Reset() { *m = VulnerabilityType{} } +func (m *VulnerabilityType) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType) ProtoMessage() {} +func (*VulnerabilityType) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44} +} +func (m *VulnerabilityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType.Unmarshal(m, b) +} +func (m *VulnerabilityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType.Merge(dst, src) +} +func (m *VulnerabilityType) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType.Size(m) +} +func (m *VulnerabilityType) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType proto.InternalMessageInfo func (m *VulnerabilityType) GetCvssScore() float32 { if m != nil { @@ -3447,13 +4723,35 @@ type VulnerabilityType_Version struct { Revision string `protobuf:"bytes,3,opt,name=revision" json:"revision,omitempty"` // Distinguish between sentinel MIN/MAX versions and normal versions. // If kind is not NORMAL, then the other fields are ignored. - Kind VulnerabilityType_Version_VersionKind `protobuf:"varint,5,opt,name=kind,enum=grafeas.v1alpha1.api.VulnerabilityType_Version_VersionKind" json:"kind,omitempty"` + Kind VulnerabilityType_Version_VersionKind `protobuf:"varint,5,opt,name=kind,enum=grafeas.v1alpha1.api.VulnerabilityType_Version_VersionKind" json:"kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *VulnerabilityType_Version) Reset() { *m = VulnerabilityType_Version{} } -func (m *VulnerabilityType_Version) String() string { return proto.CompactTextString(m) } -func (*VulnerabilityType_Version) ProtoMessage() {} -func (*VulnerabilityType_Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44, 0} } +func (m *VulnerabilityType_Version) Reset() { *m = VulnerabilityType_Version{} } +func (m *VulnerabilityType_Version) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Version) ProtoMessage() {} +func (*VulnerabilityType_Version) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 0} +} +func (m *VulnerabilityType_Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_Version.Unmarshal(m, b) +} +func (m *VulnerabilityType_Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_Version.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_Version.Merge(dst, src) +} +func (m *VulnerabilityType_Version) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_Version.Size(m) +} +func (m *VulnerabilityType_Version) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_Version proto.InternalMessageInfo func (m *VulnerabilityType_Version) GetEpoch() int32 { if m != nil { @@ -3508,13 +4806,35 @@ type VulnerabilityType_Detail struct { FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,5,opt,name=fixed_location,json=fixedLocation" json:"fixed_location,omitempty"` // The type of package; whether native or non native(ruby gems, // node.js packages etc) - PackageType string `protobuf:"bytes,10,opt,name=package_type,json=packageType" json:"package_type,omitempty"` + PackageType string `protobuf:"bytes,10,opt,name=package_type,json=packageType" json:"package_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *VulnerabilityType_Detail) Reset() { *m = VulnerabilityType_Detail{} } -func (m *VulnerabilityType_Detail) String() string { return proto.CompactTextString(m) } -func (*VulnerabilityType_Detail) ProtoMessage() {} -func (*VulnerabilityType_Detail) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44, 1} } +func (m *VulnerabilityType_Detail) Reset() { *m = VulnerabilityType_Detail{} } +func (m *VulnerabilityType_Detail) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Detail) ProtoMessage() {} +func (*VulnerabilityType_Detail) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 1} +} +func (m *VulnerabilityType_Detail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_Detail.Unmarshal(m, b) +} +func (m *VulnerabilityType_Detail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_Detail.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_Detail) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_Detail.Merge(dst, src) +} +func (m *VulnerabilityType_Detail) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_Detail.Size(m) +} +func (m *VulnerabilityType_Detail) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_Detail.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_Detail proto.InternalMessageInfo func (m *VulnerabilityType_Detail) GetCpeUri() string { if m != nil { @@ -3586,7 +4906,10 @@ type VulnerabilityType_VulnerabilityDetails struct { CvssScore float32 `protobuf:"fixed32,5,opt,name=cvss_score,json=cvssScore" json:"cvss_score,omitempty"` // The set of affected locations and their fixes (if available) within // the associated resource. - PackageIssue []*VulnerabilityType_PackageIssue `protobuf:"bytes,6,rep,name=package_issue,json=packageIssue" json:"package_issue,omitempty"` + PackageIssue []*VulnerabilityType_PackageIssue `protobuf:"bytes,6,rep,name=package_issue,json=packageIssue" json:"package_issue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *VulnerabilityType_VulnerabilityDetails) Reset() { @@ -3595,8 +4918,25 @@ func (m *VulnerabilityType_VulnerabilityDetails) Reset() { func (m *VulnerabilityType_VulnerabilityDetails) String() string { return proto.CompactTextString(m) } func (*VulnerabilityType_VulnerabilityDetails) ProtoMessage() {} func (*VulnerabilityType_VulnerabilityDetails) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{44, 2} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 2} } +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Unmarshal(m, b) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_VulnerabilityDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Merge(dst, src) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Size(m) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_VulnerabilityDetails proto.InternalMessageInfo func (m *VulnerabilityType_VulnerabilityDetails) GetType() string { if m != nil { @@ -3634,15 +4974,35 @@ type VulnerabilityType_PackageIssue struct { // The location of the available fix for vulnerability. FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,2,opt,name=fixed_location,json=fixedLocation" json:"fixed_location,omitempty"` // The severity (eg: distro assigned severity) for this vulnerability. - SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName" json:"severity_name,omitempty"` + SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName" json:"severity_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *VulnerabilityType_PackageIssue) Reset() { *m = VulnerabilityType_PackageIssue{} } func (m *VulnerabilityType_PackageIssue) String() string { return proto.CompactTextString(m) } func (*VulnerabilityType_PackageIssue) ProtoMessage() {} func (*VulnerabilityType_PackageIssue) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{44, 3} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 3} } +func (m *VulnerabilityType_PackageIssue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Unmarshal(m, b) +} +func (m *VulnerabilityType_PackageIssue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_PackageIssue) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_PackageIssue.Merge(dst, src) +} +func (m *VulnerabilityType_PackageIssue) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Size(m) +} +func (m *VulnerabilityType_PackageIssue) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_PackageIssue.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_PackageIssue proto.InternalMessageInfo func (m *VulnerabilityType_PackageIssue) GetAffectedLocation() *VulnerabilityType_VulnerabilityLocation { if m != nil { @@ -3675,7 +5035,10 @@ type VulnerabilityType_VulnerabilityLocation struct { Package string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` // The version of the package being described. // This field can be used as a filter in list requests. - Version *VulnerabilityType_Version `protobuf:"bytes,4,opt,name=version" json:"version,omitempty"` + Version *VulnerabilityType_Version `protobuf:"bytes,4,opt,name=version" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *VulnerabilityType_VulnerabilityLocation) Reset() { @@ -3684,8 +5047,25 @@ func (m *VulnerabilityType_VulnerabilityLocation) Reset() { func (m *VulnerabilityType_VulnerabilityLocation) String() string { return proto.CompactTextString(m) } func (*VulnerabilityType_VulnerabilityLocation) ProtoMessage() {} func (*VulnerabilityType_VulnerabilityLocation) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{44, 4} + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{44, 4} } +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Unmarshal(m, b) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_VulnerabilityLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Merge(dst, src) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Size(m) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_VulnerabilityLocation proto.InternalMessageInfo func (m *VulnerabilityType_VulnerabilityLocation) GetCpeUri() string { if m != nil { @@ -3719,13 +5099,35 @@ type SourceContext struct { // *SourceContext_Git Context isSourceContext_Context `protobuf_oneof:"context"` // Labels with user defined metadata. - Labels map[string]string `protobuf:"bytes,4,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Labels map[string]string `protobuf:"bytes,4,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SourceContext) Reset() { *m = SourceContext{} } -func (m *SourceContext) String() string { return proto.CompactTextString(m) } -func (*SourceContext) ProtoMessage() {} -func (*SourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{45} } +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{45} +} +func (m *SourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceContext.Unmarshal(m, b) +} +func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceContext.Marshal(b, m, deterministic) +} +func (dst *SourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceContext.Merge(dst, src) +} +func (m *SourceContext) XXX_Size() int { + return xxx_messageInfo_SourceContext.Size(m) +} +func (m *SourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_SourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceContext proto.InternalMessageInfo type isSourceContext_Context interface { isSourceContext_Context() @@ -3853,17 +5255,17 @@ func _SourceContext_OneofSizer(msg proto.Message) (n int) { switch x := m.Context.(type) { case *SourceContext_CloudRepo: s := proto.Size(x.CloudRepo) - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *SourceContext_Gerrit: s := proto.Size(x.Gerrit) - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *SourceContext_Git: s := proto.Size(x.Git) - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -3878,13 +5280,35 @@ type AliasContext struct { // The alias kind. Kind AliasContext_Kind `protobuf:"varint,1,opt,name=kind,enum=grafeas.v1alpha1.api.AliasContext_Kind" json:"kind,omitempty"` // The alias name. - Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *AliasContext) Reset() { *m = AliasContext{} } -func (m *AliasContext) String() string { return proto.CompactTextString(m) } -func (*AliasContext) ProtoMessage() {} -func (*AliasContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{46} } +func (m *AliasContext) Reset() { *m = AliasContext{} } +func (m *AliasContext) String() string { return proto.CompactTextString(m) } +func (*AliasContext) ProtoMessage() {} +func (*AliasContext) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{46} +} +func (m *AliasContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AliasContext.Unmarshal(m, b) +} +func (m *AliasContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AliasContext.Marshal(b, m, deterministic) +} +func (dst *AliasContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_AliasContext.Merge(dst, src) +} +func (m *AliasContext) XXX_Size() int { + return xxx_messageInfo_AliasContext.Size(m) +} +func (m *AliasContext) XXX_DiscardUnknown() { + xxx_messageInfo_AliasContext.DiscardUnknown(m) +} + +var xxx_messageInfo_AliasContext proto.InternalMessageInfo func (m *AliasContext) GetKind() AliasContext_Kind { if m != nil { @@ -3911,13 +5335,35 @@ type CloudRepoSourceContext struct { // Types that are valid to be assigned to Revision: // *CloudRepoSourceContext_RevisionId // *CloudRepoSourceContext_AliasContext - Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` + Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } -func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } -func (*CloudRepoSourceContext) ProtoMessage() {} -func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{47} } +func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } +func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudRepoSourceContext) ProtoMessage() {} +func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{47} +} +func (m *CloudRepoSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudRepoSourceContext.Unmarshal(m, b) +} +func (m *CloudRepoSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudRepoSourceContext.Marshal(b, m, deterministic) +} +func (dst *CloudRepoSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudRepoSourceContext.Merge(dst, src) +} +func (m *CloudRepoSourceContext) XXX_Size() int { + return xxx_messageInfo_CloudRepoSourceContext.Size(m) +} +func (m *CloudRepoSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_CloudRepoSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudRepoSourceContext proto.InternalMessageInfo type isCloudRepoSourceContext_Revision interface { isCloudRepoSourceContext_Revision() @@ -4016,12 +5462,12 @@ func _CloudRepoSourceContext_OneofSizer(msg proto.Message) (n int) { // revision switch x := m.Revision.(type) { case *CloudRepoSourceContext_RevisionId: - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.RevisionId))) n += len(x.RevisionId) case *CloudRepoSourceContext_AliasContext: s := proto.Size(x.AliasContext) - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -4045,13 +5491,35 @@ type GerritSourceContext struct { // Types that are valid to be assigned to Revision: // *GerritSourceContext_RevisionId // *GerritSourceContext_AliasContext - Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` + Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } -func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } -func (*GerritSourceContext) ProtoMessage() {} -func (*GerritSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{48} } +func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } +func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } +func (*GerritSourceContext) ProtoMessage() {} +func (*GerritSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{48} +} +func (m *GerritSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GerritSourceContext.Unmarshal(m, b) +} +func (m *GerritSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GerritSourceContext.Marshal(b, m, deterministic) +} +func (dst *GerritSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GerritSourceContext.Merge(dst, src) +} +func (m *GerritSourceContext) XXX_Size() int { + return xxx_messageInfo_GerritSourceContext.Size(m) +} +func (m *GerritSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GerritSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GerritSourceContext proto.InternalMessageInfo type isGerritSourceContext_Revision interface { isGerritSourceContext_Revision() @@ -4157,12 +5625,12 @@ func _GerritSourceContext_OneofSizer(msg proto.Message) (n int) { // revision switch x := m.Revision.(type) { case *GerritSourceContext_RevisionId: - n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.RevisionId))) n += len(x.RevisionId) case *GerritSourceContext_AliasContext: s := proto.Size(x.AliasContext) - n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: @@ -4179,13 +5647,35 @@ type GitSourceContext struct { Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` // Required. // Git commit hash. - RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId" json:"revision_id,omitempty"` + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId" json:"revision_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } -func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } -func (*GitSourceContext) ProtoMessage() {} -func (*GitSourceContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{49} } +func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } +func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } +func (*GitSourceContext) ProtoMessage() {} +func (*GitSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{49} +} +func (m *GitSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GitSourceContext.Unmarshal(m, b) +} +func (m *GitSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GitSourceContext.Marshal(b, m, deterministic) +} +func (dst *GitSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GitSourceContext.Merge(dst, src) +} +func (m *GitSourceContext) XXX_Size() int { + return xxx_messageInfo_GitSourceContext.Size(m) +} +func (m *GitSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GitSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GitSourceContext proto.InternalMessageInfo func (m *GitSourceContext) GetUrl() string { if m != nil { @@ -4209,13 +5699,35 @@ type RepoId struct { // Types that are valid to be assigned to Id: // *RepoId_ProjectRepoId // *RepoId_Uid - Id isRepoId_Id `protobuf_oneof:"id"` + Id isRepoId_Id `protobuf_oneof:"id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *RepoId) Reset() { *m = RepoId{} } -func (m *RepoId) String() string { return proto.CompactTextString(m) } -func (*RepoId) ProtoMessage() {} -func (*RepoId) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{50} } +func (m *RepoId) Reset() { *m = RepoId{} } +func (m *RepoId) String() string { return proto.CompactTextString(m) } +func (*RepoId) ProtoMessage() {} +func (*RepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{50} +} +func (m *RepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoId.Unmarshal(m, b) +} +func (m *RepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoId.Marshal(b, m, deterministic) +} +func (dst *RepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoId.Merge(dst, src) +} +func (m *RepoId) XXX_Size() int { + return xxx_messageInfo_RepoId.Size(m) +} +func (m *RepoId) XXX_DiscardUnknown() { + xxx_messageInfo_RepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoId proto.InternalMessageInfo type isRepoId_Id interface { isRepoId_Id() @@ -4308,11 +5820,11 @@ func _RepoId_OneofSizer(msg proto.Message) (n int) { switch x := m.Id.(type) { case *RepoId_ProjectRepoId: s := proto.Size(x.ProjectRepoId) - n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case *RepoId_Uid: - n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += 1 // tag and wire n += proto.SizeVarint(uint64(len(x.Uid))) n += len(x.Uid) case nil: @@ -4328,13 +5840,35 @@ type ProjectRepoId struct { // The ID of the project. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"` // The name of the repo. Leave empty for the default repo. - RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName" json:"repo_name,omitempty"` + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName" json:"repo_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } -func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } -func (*ProjectRepoId) ProtoMessage() {} -func (*ProjectRepoId) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{51} } +func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } +func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } +func (*ProjectRepoId) ProtoMessage() {} +func (*ProjectRepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_f4518a1c0d4387f8, []int{51} +} +func (m *ProjectRepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectRepoId.Unmarshal(m, b) +} +func (m *ProjectRepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectRepoId.Marshal(b, m, deterministic) +} +func (dst *ProjectRepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectRepoId.Merge(dst, src) +} +func (m *ProjectRepoId) XXX_Size() int { + return xxx_messageInfo_ProjectRepoId.Size(m) +} +func (m *ProjectRepoId) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectRepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectRepoId proto.InternalMessageInfo func (m *ProjectRepoId) GetProjectId() string { if m != nil { @@ -4379,21 +5913,22 @@ func init() { proto.RegisterType((*Artifact)(nil), "grafeas.v1alpha1.api.Artifact") proto.RegisterType((*AttestationAuthority)(nil), "grafeas.v1alpha1.api.AttestationAuthority") proto.RegisterType((*AttestationAuthority_AttestationAuthorityHint)(nil), "grafeas.v1alpha1.api.AttestationAuthority.AttestationAuthorityHint") - proto.RegisterType((*AttestationAuthority_Attestation)(nil), "grafeas.v1alpha1.api.AttestationAuthority.Attestation") + proto.RegisterType((*AttestationAuthority_AttestationDetails)(nil), "grafeas.v1alpha1.api.AttestationAuthority.AttestationDetails") proto.RegisterType((*BuildDetails)(nil), "grafeas.v1alpha1.api.BuildDetails") proto.RegisterType((*BuildProvenance)(nil), "grafeas.v1alpha1.api.BuildProvenance") + proto.RegisterMapType((map[string]string)(nil), "grafeas.v1alpha1.api.BuildProvenance.BuildOptionsEntry") proto.RegisterType((*BuildSignature)(nil), "grafeas.v1alpha1.api.BuildSignature") proto.RegisterType((*BuildType)(nil), "grafeas.v1alpha1.api.BuildType") proto.RegisterType((*Command)(nil), "grafeas.v1alpha1.api.Command") proto.RegisterType((*Deployable)(nil), "grafeas.v1alpha1.api.Deployable") - proto.RegisterType((*Deployable_Deployment)(nil), "grafeas.v1alpha1.api.Deployable.Deployment") + proto.RegisterType((*Deployable_DeploymentDetails)(nil), "grafeas.v1alpha1.api.Deployable.DeploymentDetails") proto.RegisterType((*DockerImage)(nil), "grafeas.v1alpha1.api.DockerImage") proto.RegisterType((*DockerImage_Layer)(nil), "grafeas.v1alpha1.api.DockerImage.Layer") proto.RegisterType((*DockerImage_Fingerprint)(nil), "grafeas.v1alpha1.api.DockerImage.Fingerprint") proto.RegisterType((*DockerImage_Basis)(nil), "grafeas.v1alpha1.api.DockerImage.Basis") - proto.RegisterType((*DockerImage_Derived)(nil), "grafeas.v1alpha1.api.DockerImage.Derived") + proto.RegisterType((*DockerImage_DerivedDetails)(nil), "grafeas.v1alpha1.api.DockerImage.DerivedDetails") proto.RegisterType((*Discovery)(nil), "grafeas.v1alpha1.api.Discovery") - proto.RegisterType((*Discovery_Discovered)(nil), "grafeas.v1alpha1.api.Discovery.Discovered") + proto.RegisterType((*Discovery_DiscoveredDetails)(nil), "grafeas.v1alpha1.api.Discovery.DiscoveredDetails") proto.RegisterType((*FileHashes)(nil), "grafeas.v1alpha1.api.FileHashes") proto.RegisterType((*Hash)(nil), "grafeas.v1alpha1.api.Hash") proto.RegisterType((*Note)(nil), "grafeas.v1alpha1.api.Note") @@ -4403,9 +5938,10 @@ func init() { proto.RegisterType((*PackageManager_Distribution)(nil), "grafeas.v1alpha1.api.PackageManager.Distribution") proto.RegisterType((*PackageManager_Location)(nil), "grafeas.v1alpha1.api.PackageManager.Location") proto.RegisterType((*PackageManager_Package)(nil), "grafeas.v1alpha1.api.PackageManager.Package") - proto.RegisterType((*PackageManager_Installation)(nil), "grafeas.v1alpha1.api.PackageManager.Installation") + proto.RegisterType((*PackageManager_InstallationDetails)(nil), "grafeas.v1alpha1.api.PackageManager.InstallationDetails") proto.RegisterType((*PgpSignedAttestation)(nil), "grafeas.v1alpha1.api.PgpSignedAttestation") proto.RegisterType((*Source)(nil), "grafeas.v1alpha1.api.Source") + proto.RegisterMapType((map[string]*FileHashes)(nil), "grafeas.v1alpha1.api.Source.FileHashesEntry") proto.RegisterType((*RepoSource)(nil), "grafeas.v1alpha1.api.RepoSource") proto.RegisterType((*StorageSource)(nil), "grafeas.v1alpha1.api.StorageSource") proto.RegisterType((*VulnerabilityType)(nil), "grafeas.v1alpha1.api.VulnerabilityType") @@ -4415,6 +5951,7 @@ func init() { proto.RegisterType((*VulnerabilityType_PackageIssue)(nil), "grafeas.v1alpha1.api.VulnerabilityType.PackageIssue") proto.RegisterType((*VulnerabilityType_VulnerabilityLocation)(nil), "grafeas.v1alpha1.api.VulnerabilityType.VulnerabilityLocation") proto.RegisterType((*SourceContext)(nil), "grafeas.v1alpha1.api.SourceContext") + proto.RegisterMapType((map[string]string)(nil), "grafeas.v1alpha1.api.SourceContext.LabelsEntry") proto.RegisterType((*AliasContext)(nil), "grafeas.v1alpha1.api.AliasContext") proto.RegisterType((*CloudRepoSourceContext)(nil), "grafeas.v1alpha1.api.CloudRepoSourceContext") proto.RegisterType((*GerritSourceContext)(nil), "grafeas.v1alpha1.api.GerritSourceContext") @@ -4422,7 +5959,7 @@ func init() { proto.RegisterType((*RepoId)(nil), "grafeas.v1alpha1.api.RepoId") proto.RegisterType((*ProjectRepoId)(nil), "grafeas.v1alpha1.api.ProjectRepoId") proto.RegisterEnum("grafeas.v1alpha1.api.BuildSignature_KeyType", BuildSignature_KeyType_name, BuildSignature_KeyType_value) - proto.RegisterEnum("grafeas.v1alpha1.api.Deployable_Deployment_Platform", Deployable_Deployment_Platform_name, Deployable_Deployment_Platform_value) + proto.RegisterEnum("grafeas.v1alpha1.api.Deployable_DeploymentDetails_Platform", Deployable_DeploymentDetails_Platform_name, Deployable_DeploymentDetails_Platform_value) proto.RegisterEnum("grafeas.v1alpha1.api.DockerImage_Layer_Directive", DockerImage_Layer_Directive_name, DockerImage_Layer_Directive_value) proto.RegisterEnum("grafeas.v1alpha1.api.Hash_HashType", Hash_HashType_name, Hash_HashType_value) proto.RegisterEnum("grafeas.v1alpha1.api.Note_Kind", Note_Kind_name, Note_Kind_value) @@ -4450,7 +5987,7 @@ type GrafeasClient interface { ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) // Deletes the given `Occurrence` from the system. Use this when // an `Occurrence` is no longer applicable for the given resource. - DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) // Creates a new `Occurrence`. Use this method to create `Occurrences` // for a resource. CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) @@ -4459,17 +5996,17 @@ type GrafeasClient interface { // Gets the `Note` attached to the given `Occurrence`. GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) // Creates a new `Operation`. - CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) + CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) // Updates an existing operation returns an error if operation // does not exist. The only valid operations are to update mark the done bit // change the result. - UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) + UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) // Returns the requested `Note`. GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) // Lists all `Notes` for a given project. ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) // Deletes the given `Note` from the system. - DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) // Creates a new `Note`. CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) // Updates an existing `Note`. @@ -4506,8 +6043,8 @@ func (c *grafeasClient) ListOccurrences(ctx context.Context, in *ListOccurrences return out, nil } -func (c *grafeasClient) DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { - out := new(google_protobuf.Empty) +func (c *grafeasClient) DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/DeleteOccurrence", in, out, c.cc, opts...) if err != nil { return nil, err @@ -4542,8 +6079,8 @@ func (c *grafeasClient) GetOccurrenceNote(ctx context.Context, in *GetOccurrence return out, nil } -func (c *grafeasClient) CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) { - out := new(google_longrunning.Operation) +func (c *grafeasClient) CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/CreateOperation", in, out, c.cc, opts...) if err != nil { return nil, err @@ -4551,8 +6088,8 @@ func (c *grafeasClient) CreateOperation(ctx context.Context, in *CreateOperation return out, nil } -func (c *grafeasClient) UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) { - out := new(google_longrunning.Operation) +func (c *grafeasClient) UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/UpdateOperation", in, out, c.cc, opts...) if err != nil { return nil, err @@ -4578,8 +6115,8 @@ func (c *grafeasClient) ListNotes(ctx context.Context, in *ListNotesRequest, opt return out, nil } -func (c *grafeasClient) DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { - out := new(google_protobuf.Empty) +func (c *grafeasClient) DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.Grafeas/DeleteNote", in, out, c.cc, opts...) if err != nil { return nil, err @@ -4623,7 +6160,7 @@ type GrafeasServer interface { ListOccurrences(context.Context, *ListOccurrencesRequest) (*ListOccurrencesResponse, error) // Deletes the given `Occurrence` from the system. Use this when // an `Occurrence` is no longer applicable for the given resource. - DeleteOccurrence(context.Context, *DeleteOccurrenceRequest) (*google_protobuf.Empty, error) + DeleteOccurrence(context.Context, *DeleteOccurrenceRequest) (*empty.Empty, error) // Creates a new `Occurrence`. Use this method to create `Occurrences` // for a resource. CreateOccurrence(context.Context, *CreateOccurrenceRequest) (*Occurrence, error) @@ -4632,17 +6169,17 @@ type GrafeasServer interface { // Gets the `Note` attached to the given `Occurrence`. GetOccurrenceNote(context.Context, *GetOccurrenceNoteRequest) (*Note, error) // Creates a new `Operation`. - CreateOperation(context.Context, *CreateOperationRequest) (*google_longrunning.Operation, error) + CreateOperation(context.Context, *CreateOperationRequest) (*longrunning.Operation, error) // Updates an existing operation returns an error if operation // does not exist. The only valid operations are to update mark the done bit // change the result. - UpdateOperation(context.Context, *UpdateOperationRequest) (*google_longrunning.Operation, error) + UpdateOperation(context.Context, *UpdateOperationRequest) (*longrunning.Operation, error) // Returns the requested `Note`. GetNote(context.Context, *GetNoteRequest) (*Note, error) // Lists all `Notes` for a given project. ListNotes(context.Context, *ListNotesRequest) (*ListNotesResponse, error) // Deletes the given `Note` from the system. - DeleteNote(context.Context, *DeleteNoteRequest) (*google_protobuf.Empty, error) + DeleteNote(context.Context, *DeleteNoteRequest) (*empty.Empty, error) // Creates a new `Note`. CreateNote(context.Context, *CreateNoteRequest) (*Note, error) // Updates an existing `Note`. @@ -4978,13 +6515,13 @@ var _Grafeas_serviceDesc = grpc.ServiceDesc{ type GrafeasProjectsClient interface { // Creates a new `Project`. - CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*empty.Empty, error) // Returns the requested `Project`. GetProject(ctx context.Context, in *GetProjectRequest, opts ...grpc.CallOption) (*Project, error) // Lists `Projects` ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) // Deletes the given `Project` from the system. - DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) + DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*empty.Empty, error) } type grafeasProjectsClient struct { @@ -4995,8 +6532,8 @@ func NewGrafeasProjectsClient(cc *grpc.ClientConn) GrafeasProjectsClient { return &grafeasProjectsClient{cc} } -func (c *grafeasProjectsClient) CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { - out := new(google_protobuf.Empty) +func (c *grafeasProjectsClient) CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/CreateProject", in, out, c.cc, opts...) if err != nil { return nil, err @@ -5022,8 +6559,8 @@ func (c *grafeasProjectsClient) ListProjects(ctx context.Context, in *ListProjec return out, nil } -func (c *grafeasProjectsClient) DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*google_protobuf.Empty, error) { - out := new(google_protobuf.Empty) +func (c *grafeasProjectsClient) DeleteProject(ctx context.Context, in *DeleteProjectRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) err := grpc.Invoke(ctx, "/grafeas.v1alpha1.api.GrafeasProjects/DeleteProject", in, out, c.cc, opts...) if err != nil { return nil, err @@ -5035,13 +6572,13 @@ func (c *grafeasProjectsClient) DeleteProject(ctx context.Context, in *DeletePro type GrafeasProjectsServer interface { // Creates a new `Project`. - CreateProject(context.Context, *CreateProjectRequest) (*google_protobuf.Empty, error) + CreateProject(context.Context, *CreateProjectRequest) (*empty.Empty, error) // Returns the requested `Project`. GetProject(context.Context, *GetProjectRequest) (*Project, error) // Lists `Projects` ListProjects(context.Context, *ListProjectsRequest) (*ListProjectsResponse, error) // Deletes the given `Project` from the system. - DeleteProject(context.Context, *DeleteProjectRequest) (*google_protobuf.Empty, error) + DeleteProject(context.Context, *DeleteProjectRequest) (*empty.Empty, error) } func RegisterGrafeasProjectsServer(s *grpc.Server, srv GrafeasProjectsServer) { @@ -5145,296 +6682,305 @@ var _GrafeasProjects_serviceDesc = grpc.ServiceDesc{ Metadata: "v1alpha1/proto/grafeas.proto", } -func init() { proto.RegisterFile("v1alpha1/proto/grafeas.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 4608 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x3b, 0x5d, 0x6f, 0xe3, 0xd8, - 0x75, 0xa6, 0xbe, 0x75, 0x24, 0xdb, 0xf4, 0x1d, 0xcf, 0x8c, 0x57, 0xb3, 0x93, 0x99, 0x70, 0x76, - 0x33, 0xb3, 0xde, 0xac, 0xbc, 0xe3, 0xfd, 0xca, 0xee, 0x76, 0xba, 0x91, 0x25, 0x8e, 0xc5, 0x58, - 0x1f, 0x06, 0x25, 0xcf, 0xce, 0xb4, 0x45, 0x59, 0x5a, 0xbc, 0x96, 0x19, 0x4b, 0xa4, 0x42, 0x52, - 0xce, 0x78, 0x83, 0x0d, 0xda, 0x60, 0x83, 0x22, 0x40, 0x81, 0x06, 0x48, 0x03, 0xf4, 0xa1, 0x0f, - 0x6d, 0x1f, 0xfa, 0x50, 0xf4, 0xad, 0x28, 0x50, 0xa0, 0xe8, 0x5b, 0x81, 0xa0, 0x8f, 0x41, 0xd1, - 0x7f, 0x50, 0xb4, 0x68, 0x0b, 0x14, 0x45, 0x5f, 0xfa, 0xd8, 0xe2, 0x7e, 0xf0, 0x43, 0x32, 0x25, - 0x73, 0x76, 0x12, 0xe4, 0xc5, 0xd6, 0x3d, 0x3c, 0xe7, 0xdc, 0x73, 0xcf, 0xf7, 0xbd, 0xe4, 0x85, - 0x57, 0xcf, 0x1f, 0xea, 0xa3, 0xc9, 0xa9, 0xfe, 0x70, 0x67, 0xe2, 0xd8, 0x9e, 0xbd, 0x33, 0x74, - 0xf4, 0x13, 0xac, 0xbb, 0x55, 0x3a, 0x42, 0x9b, 0xfe, 0xd0, 0xc7, 0xaa, 0xea, 0x13, 0xb3, 0x72, - 0x6b, 0x68, 0xdb, 0xc3, 0x11, 0x66, 0x14, 0xc7, 0xd3, 0x93, 0x1d, 0x3c, 0x9e, 0x78, 0x17, 0x8c, - 0xa4, 0xf2, 0x2a, 0x7f, 0xa8, 0x4f, 0xcc, 0x1d, 0xdd, 0xb2, 0x6c, 0x4f, 0xf7, 0x4c, 0xdb, 0xe2, - 0x0c, 0x2b, 0x77, 0xe7, 0x49, 0x4f, 0x4c, 0x3c, 0x32, 0xb4, 0xb1, 0xee, 0x9e, 0x71, 0x8c, 0x3b, - 0xf3, 0x18, 0x9e, 0x39, 0xc6, 0xae, 0xa7, 0x8f, 0x27, 0x1c, 0xe1, 0x1e, 0x47, 0x18, 0xd9, 0xd6, - 0xd0, 0x99, 0x5a, 0x96, 0x69, 0x0d, 0x77, 0xec, 0x09, 0x76, 0xa2, 0xf3, 0x48, 0xdb, 0xb0, 0x59, - 0x77, 0xb0, 0xee, 0xe1, 0x43, 0xc7, 0xfe, 0x36, 0x1e, 0x78, 0x2a, 0xfe, 0xce, 0x14, 0xbb, 0x1e, - 0x42, 0x90, 0xb1, 0xf4, 0x31, 0xde, 0x12, 0xee, 0x0a, 0x0f, 0x8a, 0x2a, 0xfd, 0x2d, 0xdd, 0x87, - 0x8d, 0x7d, 0xec, 0x25, 0x40, 0x34, 0xe1, 0x5a, 0xcb, 0x74, 0x7d, 0x4c, 0xd7, 0x47, 0xbd, 0x01, - 0xb9, 0x13, 0x73, 0xe4, 0x61, 0x87, 0x23, 0xf3, 0x11, 0xba, 0x05, 0xc5, 0x89, 0x3e, 0xc4, 0x9a, - 0x6b, 0x7e, 0x86, 0xb7, 0x52, 0x77, 0x85, 0x07, 0x59, 0xb5, 0x40, 0x00, 0x3d, 0xf3, 0x33, 0x8c, - 0x6e, 0x03, 0xd0, 0x87, 0x9e, 0x7d, 0x86, 0xad, 0xad, 0x34, 0x25, 0xa4, 0xe8, 0x7d, 0x02, 0x20, - 0xf2, 0x37, 0xf0, 0x08, 0x27, 0x92, 0x7f, 0x1b, 0x36, 0xf7, 0xb1, 0xd7, 0x1d, 0x0c, 0xa6, 0x8e, - 0x83, 0xad, 0x01, 0x5e, 0x86, 0xfb, 0x85, 0x00, 0x37, 0xc8, 0x1a, 0x42, 0xec, 0xe8, 0x32, 0x26, - 0xba, 0x83, 0x2d, 0x6f, 0x2b, 0xcb, 0x96, 0xc1, 0x46, 0x91, 0xe5, 0xa5, 0x16, 0x2f, 0x2f, 0xbd, - 0x74, 0x79, 0x99, 0xf9, 0xe5, 0xbd, 0x05, 0x37, 0xd9, 0xf2, 0x92, 0x49, 0xed, 0xc2, 0x4d, 0x66, - 0xcd, 0xcb, 0xe8, 0xa1, 0xd4, 0xe9, 0x19, 0xa9, 0xbf, 0x09, 0x60, 0x07, 0xc8, 0x54, 0xf2, 0xd2, - 0xee, 0xdd, 0x6a, 0x9c, 0x3b, 0x57, 0x23, 0x4c, 0x23, 0x34, 0xd2, 0x5f, 0x09, 0x70, 0xf3, 0x68, - 0x62, 0xe8, 0x09, 0x85, 0x7c, 0xf9, 0x19, 0xd1, 0xc7, 0x50, 0x9a, 0xd2, 0x09, 0x69, 0x3c, 0xd0, - 0x05, 0x95, 0x76, 0x2b, 0x55, 0xe6, 0xef, 0x55, 0x3f, 0x20, 0xaa, 0x8f, 0x49, 0xc8, 0xb4, 0x75, - 0xf7, 0x4c, 0x05, 0x86, 0x4e, 0x7e, 0x4b, 0xaf, 0xc1, 0xda, 0x3e, 0xf6, 0x3a, 0xb6, 0xb7, 0x54, - 0x93, 0x55, 0xd8, 0x9a, 0xf1, 0x95, 0xab, 0xf0, 0xbf, 0x0f, 0x22, 0x71, 0x17, 0x82, 0xf6, 0x2b, - 0x71, 0x94, 0xfb, 0xb0, 0xc1, 0x1c, 0xe5, 0x2a, 0x41, 0x3d, 0xd8, 0x60, 0x2e, 0x12, 0x45, 0x0c, - 0x25, 0xcd, 0xcc, 0x48, 0x7a, 0x13, 0xf2, 0x96, 0xed, 0x61, 0xcd, 0x34, 0x7c, 0x51, 0xc9, 0x50, - 0x31, 0x50, 0x15, 0x32, 0xe4, 0x57, 0xa8, 0xfa, 0x38, 0xeb, 0xd1, 0x19, 0x28, 0x9e, 0xf4, 0x47, - 0x02, 0x6c, 0x30, 0x1f, 0xb9, 0x42, 0xbe, 0x80, 0x73, 0x2a, 0x19, 0xe7, 0x97, 0xf3, 0x85, 0x2f, - 0x04, 0xa8, 0xf8, 0x66, 0x8b, 0x89, 0xf4, 0x38, 0xf9, 0x7e, 0x19, 0xc6, 0xbb, 0x80, 0xcd, 0xd9, - 0x7c, 0xe9, 0x4e, 0x6c, 0xcb, 0xc5, 0xe8, 0x43, 0x28, 0x4c, 0x38, 0x6c, 0x4b, 0xb8, 0x9b, 0x7e, - 0x50, 0xda, 0xbd, 0x1d, 0xaf, 0x0f, 0x3f, 0xf9, 0x05, 0xe8, 0xe8, 0x6b, 0xb0, 0x6e, 0xe1, 0xe7, - 0x9e, 0x16, 0x99, 0x96, 0xc9, 0xbb, 0x4a, 0xc0, 0x87, 0xc1, 0xd4, 0x3f, 0x12, 0xe0, 0x56, 0xac, - 0x06, 0xb8, 0x08, 0x7b, 0x50, 0x0a, 0x03, 0xcf, 0x97, 0xe2, 0xea, 0x68, 0x8d, 0x12, 0x25, 0x96, - 0x65, 0x0c, 0x1b, 0x91, 0x18, 0xe2, 0x02, 0xbc, 0x0d, 0x59, 0x62, 0x67, 0x7f, 0xea, 0x65, 0x0e, - 0xc1, 0x10, 0x13, 0x4f, 0xf7, 0x43, 0x01, 0x6e, 0x5e, 0x4a, 0xf1, 0xbf, 0x82, 0x65, 0x7f, 0xce, - 0x2b, 0x4d, 0x50, 0x9a, 0x03, 0x29, 0x5e, 0x83, 0x59, 0x54, 0xee, 0x88, 0xb3, 0x40, 0xf4, 0x08, - 0x20, 0x2c, 0xeb, 0x5b, 0x29, 0xdf, 0x4f, 0x58, 0x00, 0x44, 0x8a, 0x7f, 0x35, 0x98, 0x41, 0x8d, - 0x10, 0x48, 0x26, 0xdc, 0xe0, 0xd9, 0x3b, 0x78, 0xbc, 0xc4, 0xfd, 0x3f, 0x86, 0x62, 0x40, 0xcb, - 0x83, 0xed, 0x8a, 0xb9, 0x42, 0x7c, 0xe9, 0xc7, 0x02, 0xdc, 0xe0, 0xf5, 0x69, 0x7e, 0xae, 0x30, - 0x03, 0x09, 0x33, 0x19, 0xe8, 0xab, 0x50, 0x0e, 0xe8, 0xc3, 0x34, 0x54, 0x0a, 0x60, 0x8a, 0xf1, - 0x72, 0x22, 0xdd, 0x86, 0x3c, 0x0f, 0x9e, 0xd8, 0x6c, 0xf9, 0xfb, 0x02, 0x6c, 0x04, 0x74, 0x6d, - 0xec, 0xe9, 0x86, 0xee, 0xe9, 0x24, 0xe7, 0x0c, 0xe8, 0x32, 0x34, 0xd2, 0x73, 0x51, 0x82, 0xb8, - 0x9c, 0xd3, 0xf7, 0x1b, 0x32, 0x15, 0x18, 0x3a, 0x01, 0xa0, 0xf7, 0xa0, 0x80, 0x2d, 0x83, 0x51, - 0xa6, 0xae, 0xa4, 0xcc, 0x63, 0xcb, 0x20, 0x23, 0xe9, 0x77, 0xa0, 0x50, 0x73, 0x3c, 0xf3, 0x44, - 0x8f, 0x97, 0x14, 0x55, 0xa0, 0x30, 0x38, 0xc5, 0x83, 0x33, 0x77, 0x3a, 0xe6, 0x4a, 0x0a, 0xc6, - 0x68, 0x0d, 0x52, 0xa6, 0xc1, 0xeb, 0x7e, 0xca, 0x34, 0xd0, 0x26, 0x64, 0x09, 0x8d, 0xbb, 0x95, - 0xb9, 0x9b, 0x7e, 0x50, 0x54, 0xd9, 0x40, 0xfa, 0xbb, 0x14, 0x6c, 0xd6, 0x3c, 0x8f, 0x4c, 0x4c, - 0x56, 0x5b, 0x9b, 0x7a, 0xa7, 0xb6, 0x63, 0x7a, 0x17, 0xe8, 0x53, 0xc8, 0x9c, 0x9a, 0xdc, 0x32, - 0xa5, 0xdd, 0x7a, 0x7c, 0x14, 0xc4, 0x51, 0xc6, 0x02, 0x9b, 0xa6, 0xe5, 0xa9, 0x94, 0x61, 0xe5, - 0x5b, 0xb0, 0xb5, 0x08, 0x03, 0x55, 0xe1, 0xda, 0xe9, 0x74, 0xac, 0x5b, 0x9a, 0x83, 0x75, 0x43, - 0x3f, 0x1e, 0x61, 0x2d, 0xb2, 0xe4, 0x0d, 0xfa, 0x48, 0xe5, 0x4f, 0x3a, 0xfa, 0x18, 0x57, 0xbe, - 0x0f, 0xa5, 0x08, 0x2f, 0x74, 0x0c, 0x37, 0x26, 0xc3, 0x89, 0xe6, 0x9a, 0x43, 0x0b, 0x1b, 0x9a, - 0x1e, 0x3e, 0xe1, 0xab, 0xd8, 0x5e, 0x90, 0x48, 0x87, 0x93, 0x1e, 0x25, 0x89, 0xf0, 0x6a, 0xae, - 0xa8, 0x9b, 0x93, 0x18, 0xf8, 0x5e, 0x09, 0x8a, 0x84, 0xbf, 0xee, 0x4d, 0x1d, 0x2c, 0xfd, 0xae, - 0x00, 0xe5, 0xbd, 0xa9, 0x39, 0x32, 0x1a, 0xd8, 0xd3, 0xcd, 0x91, 0x8b, 0x64, 0x80, 0x89, 0x63, - 0x9f, 0x63, 0x4b, 0x27, 0x6d, 0x0e, 0x9b, 0xf5, 0xf5, 0xf8, 0x59, 0x29, 0xdd, 0x61, 0x80, 0xac, - 0x46, 0x08, 0xd1, 0x1b, 0x20, 0x86, 0x23, 0xed, 0xf8, 0x82, 0xa4, 0x42, 0x66, 0xdf, 0xf5, 0x10, - 0xbe, 0x47, 0xc0, 0xd2, 0xbf, 0x66, 0x61, 0x7d, 0x8e, 0x15, 0x37, 0xbd, 0x10, 0x98, 0xfe, 0x36, - 0x95, 0x8a, 0xf8, 0x7b, 0x18, 0x4d, 0x45, 0x0e, 0x51, 0x0c, 0x52, 0x71, 0x06, 0xf6, 0x78, 0xac, - 0x5b, 0x86, 0xbb, 0x95, 0x5d, 0x56, 0x71, 0xea, 0x0c, 0x4b, 0x0d, 0xd0, 0xd1, 0x3e, 0xac, 0x1f, - 0x4f, 0xcd, 0x91, 0xa7, 0xe9, 0xdc, 0x4d, 0xdd, 0xad, 0x1c, 0xe5, 0xf0, 0x95, 0x05, 0x0e, 0xc3, - 0xd1, 0xd4, 0x35, 0x4a, 0xe6, 0x0f, 0xdd, 0xf9, 0xe8, 0xca, 0xbf, 0x50, 0x74, 0x7d, 0x08, 0xe0, - 0x7a, 0xba, 0xe3, 0x31, 0xda, 0xc2, 0x95, 0xb4, 0x45, 0x8a, 0x4d, 0x49, 0x3f, 0x86, 0xd2, 0x89, - 0x69, 0x99, 0xee, 0x29, 0xa3, 0x2d, 0x5e, 0x3d, 0x2f, 0x43, 0xa7, 0xc4, 0x5b, 0x90, 0xa7, 0x52, - 0xd8, 0xce, 0x56, 0x89, 0x2a, 0xd5, 0x1f, 0xa2, 0x3b, 0x50, 0x1a, 0xd9, 0x43, 0x57, 0x3b, 0x9e, - 0x0e, 0xce, 0xb0, 0xb7, 0xb5, 0x4a, 0x9f, 0x02, 0x01, 0xed, 0x51, 0x08, 0x52, 0x60, 0xc3, 0xb5, - 0xa7, 0xce, 0x00, 0x6b, 0x11, 0x7f, 0x59, 0xa3, 0xb3, 0xbf, 0x1a, 0xaf, 0xba, 0x1e, 0x45, 0x57, - 0x45, 0x46, 0x16, 0xb1, 0xf6, 0x6d, 0x00, 0xcf, 0x31, 0x87, 0x43, 0xec, 0x10, 0xeb, 0xae, 0x33, - 0xeb, 0x72, 0x88, 0x62, 0xa0, 0xdf, 0x82, 0x55, 0xa2, 0x6b, 0x43, 0xb3, 0x27, 0xac, 0x58, 0x88, - 0xd4, 0x40, 0x1f, 0x24, 0xf2, 0x4a, 0x36, 0xee, 0x32, 0x4a, 0xd9, 0xf2, 0x9c, 0x0b, 0xb5, 0x7c, - 0x1c, 0x01, 0xa1, 0xfb, 0xcc, 0x01, 0x0c, 0xec, 0x68, 0xe7, 0xd8, 0x71, 0x49, 0xac, 0x6d, 0x50, - 0x09, 0xd6, 0x38, 0xf8, 0x09, 0x83, 0x56, 0x3e, 0x81, 0x8d, 0x4b, 0xbc, 0x90, 0x08, 0xe9, 0x33, - 0x7c, 0xc1, 0x3d, 0x95, 0xfc, 0x24, 0x59, 0xea, 0x5c, 0x1f, 0x4d, 0x31, 0xf7, 0x52, 0x36, 0xf8, - 0x28, 0xf5, 0x0d, 0x41, 0xfa, 0x5f, 0x01, 0xd6, 0x28, 0x87, 0x9e, 0x1f, 0x7e, 0xd4, 0xaf, 0xa7, - 0xc7, 0x23, 0x73, 0xa0, 0x85, 0x5c, 0x8a, 0x0c, 0x72, 0x80, 0x2f, 0xd0, 0xab, 0x91, 0x50, 0xf5, - 0xbd, 0x3e, 0x00, 0xa0, 0xeb, 0x90, 0x3b, 0xc3, 0x17, 0x5a, 0x90, 0x23, 0xb3, 0x67, 0xf8, 0x42, - 0x31, 0xd0, 0x3e, 0x14, 0x08, 0xd8, 0xbb, 0x98, 0x60, 0xda, 0xb3, 0xad, 0xed, 0x7e, 0x7d, 0x89, - 0xa6, 0x02, 0x59, 0xaa, 0x07, 0xf8, 0xa2, 0x7f, 0x31, 0xc1, 0x6a, 0xfe, 0x8c, 0xfd, 0x90, 0x9a, - 0x90, 0xe7, 0x30, 0xb4, 0x05, 0x9b, 0x07, 0xf2, 0x33, 0xad, 0xff, 0xec, 0x50, 0xd6, 0x8e, 0x3a, - 0xbd, 0x43, 0xb9, 0xae, 0x3c, 0x56, 0xe4, 0x86, 0xb8, 0x82, 0xae, 0xc3, 0xc6, 0xe1, 0xfe, 0xa1, - 0x56, 0xeb, 0xd5, 0x15, 0x45, 0xab, 0xa9, 0xed, 0xae, 0x2a, 0x37, 0x44, 0x01, 0x95, 0xa1, 0x70, - 0x78, 0xa0, 0x3c, 0xd5, 0x0e, 0xe5, 0xb6, 0x98, 0x92, 0x9e, 0x43, 0x91, 0x4e, 0x46, 0x79, 0xc5, - 0x28, 0x5c, 0x88, 0x53, 0x38, 0xda, 0x9b, 0x5f, 0x7d, 0x69, 0xf7, 0xb5, 0x24, 0x2b, 0x51, 0x67, - 0xf3, 0x5b, 0x9e, 0x07, 0x7d, 0x6c, 0xfd, 0x11, 0x21, 0x8d, 0xad, 0x73, 0xda, 0x7e, 0x14, 0x55, - 0xf2, 0x93, 0x60, 0xe9, 0xce, 0xd0, 0xdd, 0x4a, 0x53, 0x10, 0xfd, 0x4d, 0xb0, 0x0c, 0xd3, 0xe1, - 0x1d, 0x30, 0xf9, 0xc9, 0x13, 0x54, 0x36, 0x48, 0x50, 0xaf, 0x40, 0xe1, 0xbb, 0xba, 0xe9, 0x69, - 0x27, 0xb6, 0x43, 0xf3, 0x47, 0x51, 0xcd, 0x93, 0xf1, 0x63, 0xdb, 0x91, 0x7e, 0x96, 0x06, 0x68, - 0xe0, 0xc9, 0xc8, 0xbe, 0x20, 0x59, 0x9f, 0xb4, 0x06, 0x0e, 0xe6, 0x91, 0x33, 0x75, 0x4c, 0xda, - 0xa4, 0x15, 0xd5, 0x92, 0x0f, 0x3b, 0x72, 0xcc, 0xca, 0x4f, 0x03, 0x8a, 0x31, 0x69, 0x26, 0x6e, - 0x03, 0x4c, 0x5d, 0xec, 0x68, 0x78, 0xac, 0x9b, 0x23, 0xdf, 0x49, 0x08, 0x44, 0x26, 0x00, 0x92, - 0x00, 0x0c, 0x8a, 0x9c, 0xb4, 0x38, 0x03, 0x43, 0xa7, 0x09, 0xe0, 0x13, 0x58, 0x9d, 0x5a, 0x51, - 0xf2, 0xf4, 0x95, 0xe4, 0x65, 0x9f, 0x80, 0x32, 0xb8, 0x01, 0xb9, 0x81, 0x6d, 0x9d, 0x98, 0x43, - 0x9a, 0xb5, 0x8a, 0x2a, 0x1f, 0x91, 0xcc, 0xa2, 0x1b, 0x86, 0x83, 0x5d, 0x97, 0x6b, 0xc9, 0x1f, - 0x5e, 0x52, 0x40, 0xee, 0x92, 0x02, 0xd0, 0x21, 0x14, 0x26, 0x23, 0xdd, 0x3b, 0xb1, 0x9d, 0x31, - 0x4d, 0xa4, 0x6b, 0xbb, 0xef, 0xc6, 0x1b, 0x3e, 0xd4, 0x6b, 0x35, 0x54, 0x58, 0xf5, 0x90, 0xd3, - 0xaa, 0x01, 0x17, 0xa9, 0x0e, 0x05, 0x1f, 0x4a, 0x9c, 0xf9, 0xb0, 0x55, 0xeb, 0x3f, 0xee, 0xaa, - 0xed, 0x39, 0x67, 0xce, 0x43, 0x7a, 0xff, 0x40, 0x16, 0x05, 0x54, 0x80, 0xcc, 0xe3, 0x96, 0xfc, - 0x54, 0x4c, 0x21, 0x80, 0x5c, 0xfd, 0xa8, 0xd7, 0xef, 0xb6, 0xc5, 0xb4, 0xf4, 0xf7, 0x39, 0x28, - 0x35, 0xec, 0xc1, 0x19, 0x76, 0x94, 0xb1, 0x3e, 0xc4, 0x95, 0x7f, 0x4f, 0x41, 0xb6, 0xa5, 0x5f, - 0x60, 0x07, 0x75, 0xa1, 0x68, 0x98, 0x0e, 0x1e, 0x78, 0xe6, 0x39, 0xf3, 0xaf, 0xb5, 0xdd, 0x87, - 0x0b, 0x24, 0x0e, 0xe9, 0xab, 0x94, 0xb6, 0xda, 0xf0, 0x09, 0xd5, 0x90, 0x07, 0x89, 0x7c, 0xdd, - 0x19, 0x4e, 0xc9, 0x72, 0xfc, 0xc2, 0x19, 0x02, 0xa4, 0xff, 0x16, 0xa0, 0x18, 0x90, 0xa1, 0x57, - 0xe0, 0x7a, 0x43, 0x51, 0xe5, 0x7a, 0x5f, 0x79, 0x32, 0x1f, 0x9d, 0x6b, 0x00, 0xed, 0x9a, 0xd2, - 0xe9, 0xd7, 0x94, 0x8e, 0xac, 0x8a, 0x02, 0x59, 0xa0, 0x7a, 0xd4, 0x11, 0x53, 0xe4, 0x47, 0xbd, - 0xdd, 0x10, 0xd3, 0xa8, 0x08, 0xd9, 0x56, 0x6d, 0x4f, 0x6e, 0x89, 0x19, 0xb2, 0x54, 0xf9, 0xe9, - 0x61, 0xb7, 0x27, 0x8b, 0x59, 0xf2, 0x5c, 0xee, 0x3c, 0x11, 0x73, 0xe4, 0x47, 0xad, 0xd1, 0x10, - 0xf3, 0x44, 0x25, 0xf5, 0xee, 0xe1, 0x33, 0xb1, 0x40, 0x98, 0xca, 0x9d, 0xbe, 0xfa, 0xec, 0xb0, - 0xab, 0x74, 0xfa, 0x62, 0x91, 0xd0, 0x3d, 0xe9, 0xb6, 0x8e, 0xda, 0xb2, 0x08, 0x04, 0xeb, 0xa8, - 0x27, 0xab, 0x62, 0x09, 0x95, 0x20, 0xff, 0x69, 0x57, 0x3d, 0x68, 0x28, 0xaa, 0x58, 0xa6, 0x5c, - 0xd4, 0x7d, 0x71, 0x95, 0x40, 0xbb, 0x9d, 0xbd, 0x23, 0xa5, 0xd5, 0x10, 0xd7, 0x08, 0xa3, 0x5e, - 0xbf, 0x7b, 0xd8, 0x53, 0xf6, 0x3b, 0xb5, 0x96, 0xb8, 0x8e, 0xd6, 0xa1, 0xd4, 0x94, 0x6b, 0xad, - 0x7e, 0xb3, 0xde, 0x94, 0xeb, 0x07, 0xa2, 0x48, 0x84, 0xeb, 0x35, 0xe5, 0x56, 0x4b, 0xdc, 0xa8, - 0x3c, 0x85, 0xd2, 0x63, 0xd3, 0x1a, 0x62, 0x67, 0xe2, 0x98, 0x6c, 0x8b, 0x7f, 0xfe, 0x30, 0xda, - 0x5b, 0xe5, 0xce, 0x1f, 0x92, 0x86, 0x8a, 0x3e, 0xd8, 0xd5, 0x8e, 0x47, 0xf6, 0x31, 0x0f, 0xea, - 0xdc, 0xf9, 0xee, 0xde, 0xc8, 0x3e, 0xe6, 0x0f, 0x28, 0x05, 0x3f, 0x4a, 0x3a, 0xdf, 0xa5, 0x2d, - 0xd8, 0xf7, 0x20, 0xbb, 0xa7, 0xbb, 0xe6, 0xbc, 0x63, 0xfa, 0x91, 0x16, 0x71, 0xcc, 0x11, 0xea, - 0xd2, 0x62, 0xeb, 0x4b, 0xc1, 0x63, 0xed, 0xad, 0xab, 0x2d, 0x1d, 0x11, 0x5d, 0x8d, 0x72, 0xa8, - 0xfc, 0x87, 0x00, 0xf9, 0x06, 0x76, 0xcc, 0x73, 0x6c, 0xcc, 0x33, 0x17, 0x5e, 0x96, 0x39, 0x69, - 0xae, 0x0d, 0xd3, 0xf5, 0x74, 0xff, 0xc0, 0x6a, 0x55, 0x0d, 0xc6, 0xe8, 0x31, 0xc0, 0x88, 0xb8, - 0x9f, 0x66, 0x5a, 0x27, 0x36, 0x4d, 0x76, 0xa5, 0xdd, 0xfb, 0x09, 0x5d, 0x56, 0x2d, 0x52, 0x52, - 0xc5, 0x3a, 0xb1, 0xd1, 0x36, 0x6c, 0x1c, 0xeb, 0x2e, 0xd6, 0x66, 0x34, 0xc7, 0x12, 0xe5, 0x3a, - 0x79, 0xa0, 0x86, 0xda, 0x93, 0xfe, 0x84, 0xba, 0xad, 0x3b, 0xb0, 0xcf, 0xb1, 0x73, 0x81, 0x1a, - 0xb0, 0xaa, 0x5b, 0xfa, 0xe8, 0xc2, 0x35, 0x5d, 0xed, 0xcc, 0xb4, 0x0c, 0x1e, 0x37, 0x77, 0x16, - 0x6f, 0x95, 0xab, 0x07, 0xa6, 0x65, 0xa8, 0x65, 0x9f, 0x8a, 0x8c, 0x2a, 0x0a, 0x80, 0xcf, 0x12, - 0xcf, 0x6d, 0xaa, 0x84, 0x17, 0xdc, 0x54, 0xc9, 0x00, 0x8f, 0xcd, 0x11, 0x6e, 0xea, 0xee, 0x29, - 0x76, 0xd1, 0x07, 0x50, 0x3c, 0x31, 0x47, 0x58, 0x3b, 0xd5, 0xdd, 0xd3, 0xe5, 0xbb, 0x78, 0x42, - 0xa0, 0x16, 0x4e, 0x38, 0xa9, 0xf4, 0x5d, 0xc8, 0x90, 0xff, 0xe8, 0x03, 0xc8, 0xd0, 0x1a, 0xcc, - 0x96, 0x75, 0x6f, 0x31, 0x2d, 0xfd, 0x43, 0x4b, 0x2f, 0x25, 0x98, 0xed, 0x20, 0xca, 0xbc, 0x83, - 0x90, 0xee, 0x42, 0xc1, 0xc7, 0x23, 0x51, 0xd6, 0xe9, 0x76, 0x64, 0x71, 0x85, 0xc4, 0x5e, 0xaf, - 0x59, 0xdb, 0x7d, 0xef, 0x7d, 0x51, 0x90, 0xfe, 0x96, 0x80, 0x6d, 0x0f, 0xc7, 0x16, 0xba, 0x37, - 0x61, 0xc3, 0x3d, 0xb5, 0x1d, 0x4f, 0x33, 0xb0, 0x3b, 0x70, 0xcc, 0x49, 0xb0, 0xed, 0x2c, 0xaa, - 0x22, 0x7d, 0xd0, 0x08, 0xe1, 0xa4, 0x7b, 0x27, 0xda, 0x9a, 0xc1, 0xe5, 0x36, 0x25, 0xf0, 0x28, - 0xea, 0x3b, 0x90, 0xa1, 0xc6, 0x2b, 0x26, 0x33, 0x1e, 0x45, 0x46, 0x4f, 0x01, 0x9d, 0x4f, 0x47, - 0x16, 0x76, 0xf4, 0x63, 0x73, 0x64, 0x7a, 0xbc, 0x59, 0xc9, 0x51, 0x7b, 0x2d, 0x70, 0xc2, 0x27, - 0x51, 0x7c, 0xa2, 0x84, 0xe6, 0x8a, 0xba, 0x71, 0x3e, 0x0f, 0x44, 0xdf, 0x04, 0x60, 0xbd, 0x22, - 0xe5, 0xc8, 0x1a, 0xe9, 0x3b, 0x4b, 0x9a, 0x06, 0xce, 0xa9, 0x78, 0x1c, 0xb4, 0x27, 0x4d, 0x00, - 0xea, 0xd0, 0x26, 0xf1, 0x77, 0xda, 0xf7, 0x26, 0x0a, 0x0c, 0x9a, 0x42, 0x28, 0x27, 0xdd, 0xc5, - 0x14, 0x84, 0x9a, 0x90, 0x9f, 0xe8, 0x83, 0x33, 0xc2, 0x86, 0xf5, 0xc5, 0x0b, 0xfa, 0xb0, 0x43, - 0x86, 0xd4, 0xd6, 0x2d, 0x7d, 0x88, 0x1d, 0x7f, 0xd8, 0x5c, 0x51, 0x7d, 0x72, 0xb4, 0x07, 0xbc, - 0x66, 0x93, 0x4a, 0x47, 0xdb, 0xd3, 0x85, 0xc7, 0x3a, 0x61, 0x45, 0x6c, 0xae, 0xa8, 0x11, 0x2a, - 0xf4, 0x09, 0x29, 0x51, 0x3c, 0xf6, 0xb6, 0xd0, 0x32, 0xc5, 0x04, 0x21, 0x4a, 0x96, 0x13, 0xd0, - 0xa0, 0xc7, 0x50, 0x72, 0xf0, 0x48, 0xf7, 0xb0, 0x41, 0x63, 0x3c, 0x4f, 0x43, 0xe2, 0xf5, 0x25, - 0x06, 0x57, 0x19, 0xf6, 0x91, 0x33, 0x52, 0xc1, 0x09, 0x7e, 0xa3, 0x3a, 0xac, 0xe3, 0xe7, 0x13, - 0x93, 0x1f, 0x8e, 0xd0, 0xa6, 0x03, 0xae, 0x6c, 0x3a, 0xd6, 0x42, 0x12, 0x7f, 0xd7, 0x13, 0xdd, - 0x6d, 0x95, 0x5e, 0x68, 0xb7, 0x15, 0x1e, 0xbe, 0x52, 0xe2, 0xf2, 0xd5, 0xc4, 0x0c, 0x9d, 0x00, - 0x2a, 0xef, 0x02, 0x84, 0x0b, 0x23, 0x9d, 0x61, 0x58, 0x2a, 0xc8, 0x4f, 0x12, 0xbd, 0x23, 0xfd, - 0x18, 0x8f, 0xfc, 0xfe, 0x9f, 0x0e, 0xa4, 0x3f, 0x14, 0x20, 0x43, 0x02, 0x00, 0x6d, 0x82, 0x78, - 0xa0, 0x74, 0x1a, 0x73, 0x75, 0xfa, 0x15, 0xb8, 0x7e, 0x58, 0xab, 0x1f, 0xd4, 0xf6, 0x65, 0xed, - 0xc9, 0x51, 0xab, 0x23, 0xab, 0xb5, 0x3d, 0xa5, 0xa5, 0xf4, 0x9f, 0x89, 0x29, 0xb4, 0x01, 0xab, - 0xb4, 0x5e, 0x6a, 0x0d, 0xb9, 0x5f, 0x53, 0x5a, 0x3d, 0x31, 0x4d, 0xea, 0xa4, 0xd2, 0x26, 0xb8, - 0x7b, 0xb5, 0x9e, 0xd2, 0x13, 0x33, 0xe8, 0x1a, 0xac, 0xfb, 0xe4, 0xed, 0x5a, 0xa7, 0xb6, 0x2f, - 0xab, 0x62, 0x96, 0x54, 0xd7, 0x86, 0x7c, 0xd8, 0xea, 0x3e, 0xab, 0xed, 0xb5, 0x64, 0x31, 0x87, - 0x56, 0xa1, 0xd8, 0x50, 0x7a, 0xf5, 0xee, 0x13, 0x59, 0x7d, 0x26, 0xe6, 0xf7, 0x4a, 0x50, 0xa4, - 0x87, 0xe4, 0x24, 0x50, 0xa4, 0x7f, 0xc8, 0x01, 0x84, 0x07, 0x82, 0xb1, 0x09, 0x64, 0xbe, 0x3a, - 0xa6, 0x2e, 0x57, 0xc7, 0x5b, 0x9c, 0x65, 0xa4, 0xc8, 0x16, 0x08, 0x80, 0x16, 0x66, 0x3f, 0x51, - 0xe4, 0x5e, 0x24, 0x51, 0xb8, 0x70, 0x7d, 0x36, 0x51, 0x18, 0xec, 0x98, 0x82, 0x47, 0xf6, 0xaf, - 0x25, 0xcc, 0x15, 0xb3, 0x10, 0x7e, 0xd4, 0xd1, 0x5c, 0x51, 0x37, 0xcf, 0x63, 0xe0, 0x48, 0xf1, - 0xf7, 0x9b, 0xfe, 0x64, 0x6c, 0x2f, 0x2f, 0x2d, 0x49, 0x23, 0x21, 0x4b, 0xb6, 0xb9, 0xf4, 0x59, - 0x1d, 0xc2, 0xaa, 0xc1, 0xaa, 0x3b, 0xcf, 0x27, 0xcc, 0x51, 0xdf, 0xb8, 0x3a, 0x9f, 0xf0, 0xa6, - 0x80, 0x70, 0xe4, 0x1c, 0x58, 0x52, 0xf9, 0x14, 0xca, 0xa6, 0xe5, 0x7a, 0xfa, 0x68, 0xc4, 0x8a, - 0x1c, 0x73, 0xde, 0x87, 0x89, 0x32, 0x8b, 0x12, 0x21, 0x24, 0x8c, 0xa3, 0x8c, 0x50, 0xdb, 0xcf, - 0x31, 0xa4, 0xc5, 0xe4, 0x09, 0xeb, 0xcd, 0x17, 0xe8, 0xba, 0xc3, 0x74, 0x43, 0x37, 0x2d, 0x2d, - 0x00, 0x23, 0xa8, 0xcb, 0x74, 0x4f, 0xbf, 0xf0, 0xf4, 0x2a, 0xc8, 0x37, 0xd5, 0xb0, 0x92, 0x53, - 0x6e, 0x61, 0x5d, 0xbf, 0x4b, 0x72, 0xcf, 0x18, 0x1b, 0x26, 0x5b, 0x74, 0xd6, 0xf7, 0xbd, 0x00, - 0x34, 0x9f, 0x10, 0x8a, 0x2f, 0x93, 0x10, 0xe0, 0x45, 0x12, 0xc2, 0x5e, 0x11, 0xf2, 0xdc, 0x51, - 0xa4, 0x7f, 0xcb, 0xc2, 0xda, 0xac, 0xce, 0x2b, 0x7f, 0x99, 0x82, 0x72, 0xc3, 0x74, 0x3d, 0xc7, - 0x3c, 0x9e, 0x52, 0x41, 0x6f, 0x42, 0x7e, 0x30, 0xf1, 0xb7, 0x7e, 0x6c, 0xc7, 0x34, 0xa1, 0x9b, - 0x9e, 0x23, 0x28, 0xeb, 0xce, 0xe0, 0xd4, 0xf4, 0xf0, 0x20, 0xd8, 0xf1, 0xae, 0x25, 0xb4, 0x6c, - 0x2d, 0x42, 0xa8, 0xce, 0xb0, 0x41, 0x4f, 0x60, 0x8d, 0x64, 0x2b, 0xd7, 0x0b, 0x76, 0xdb, 0x6c, - 0x8b, 0xb7, 0x93, 0x38, 0x76, 0x18, 0x99, 0xba, 0xca, 0xd8, 0xf8, 0xbb, 0xf3, 0xaf, 0x00, 0x8c, - 0x75, 0xd3, 0xf2, 0x74, 0xd3, 0xc2, 0xfe, 0xd6, 0x38, 0x02, 0xf1, 0x33, 0x63, 0x2e, 0xcc, 0x8c, - 0x77, 0xc9, 0x46, 0x35, 0x6c, 0x28, 0xf2, 0xcc, 0x88, 0x11, 0x50, 0xe5, 0x07, 0x02, 0x14, 0x5a, - 0xf6, 0x40, 0x5f, 0xae, 0x28, 0x05, 0xf2, 0xfe, 0x52, 0x52, 0x5f, 0x6e, 0x29, 0x3e, 0x3d, 0x49, - 0x74, 0x13, 0xdd, 0x3b, 0xe5, 0xc9, 0x8a, 0xfe, 0xae, 0x78, 0x90, 0xe7, 0xda, 0x8d, 0xcd, 0x83, - 0x47, 0x50, 0x36, 0x22, 0xf6, 0xdc, 0x02, 0x5a, 0x07, 0x93, 0x99, 0x29, 0xea, 0x08, 0xea, 0x0c, - 0x9b, 0xca, 0x18, 0xca, 0xd1, 0xf0, 0x8c, 0x9d, 0x5a, 0x81, 0xc2, 0x88, 0x6b, 0x87, 0xbf, 0x30, - 0x79, 0x2b, 0xd1, 0xb4, 0xbe, 0x4a, 0xd5, 0x80, 0x5c, 0xfa, 0x75, 0x28, 0x47, 0x7d, 0x06, 0xbd, - 0x0a, 0x5b, 0x35, 0xb5, 0xde, 0x54, 0xfa, 0x72, 0xbd, 0x7f, 0xa4, 0xca, 0x97, 0xf7, 0xc5, 0x4f, - 0xbf, 0xf1, 0x3e, 0xdb, 0x3f, 0x3e, 0x7d, 0xff, 0x5d, 0x31, 0x25, 0xfd, 0x9f, 0x00, 0x9b, 0x71, - 0xa7, 0xce, 0xb3, 0x47, 0x56, 0xc2, 0xfc, 0x91, 0xd5, 0x33, 0x28, 0x0f, 0x6c, 0xcb, 0xc3, 0x96, - 0xc7, 0x1a, 0xb4, 0x34, 0xf5, 0xf1, 0xf7, 0x93, 0x9f, 0x6a, 0x57, 0xeb, 0x8c, 0x9c, 0xb6, 0xcb, - 0xa5, 0x41, 0x38, 0x20, 0xfe, 0x38, 0x19, 0x4e, 0x34, 0x7e, 0x22, 0x46, 0xab, 0x53, 0x73, 0x45, - 0x2d, 0x4c, 0x86, 0x93, 0x03, 0x7c, 0xa1, 0x18, 0x52, 0x03, 0x4a, 0x11, 0x5a, 0xb2, 0xe0, 0x7a, - 0xb7, 0xd3, 0x97, 0x3b, 0xfd, 0xb8, 0x53, 0xad, 0x9b, 0x70, 0xad, 0xa7, 0xb4, 0x0f, 0x5b, 0xb2, - 0x46, 0x36, 0xa7, 0x4a, 0x67, 0x5f, 0xfb, 0x56, 0xaf, 0xdb, 0x11, 0x85, 0xbd, 0x82, 0x7f, 0xe6, - 0x26, 0xfd, 0x2c, 0x03, 0x39, 0x76, 0xa2, 0x89, 0x5a, 0xb0, 0xe6, 0x7a, 0xb6, 0x43, 0xdf, 0xa3, - 0x52, 0x08, 0xdf, 0x7a, 0x2c, 0xe8, 0xf9, 0x7b, 0x0c, 0x97, 0x11, 0x37, 0x57, 0xd4, 0x55, 0x37, - 0x0a, 0x40, 0x75, 0x92, 0xeb, 0x26, 0xb6, 0xcf, 0x6a, 0xe9, 0x97, 0x06, 0x2a, 0x9e, 0xd8, 0x01, - 0x1f, 0x70, 0x82, 0x11, 0xfa, 0x4d, 0xb8, 0xe9, 0x1f, 0x68, 0x6b, 0x73, 0xb2, 0x65, 0x12, 0xcb, - 0xa6, 0x5e, 0xf7, 0x79, 0xcc, 0x80, 0x51, 0x9b, 0x6c, 0x54, 0xf9, 0xd6, 0x08, 0xbb, 0x7c, 0xf3, - 0xf8, 0xf5, 0x65, 0x87, 0xbe, 0xd5, 0x70, 0x63, 0xc5, 0xce, 0x60, 0xe1, 0x24, 0xdc, 0x69, 0x3d, - 0x82, 0x3c, 0x35, 0xe4, 0x73, 0x8f, 0x57, 0xda, 0x7b, 0xcb, 0x58, 0xd5, 0x19, 0xaa, 0xea, 0xd3, - 0xa0, 0x3e, 0x5c, 0xd3, 0x0d, 0xc3, 0x24, 0xde, 0xa1, 0x8f, 0x34, 0x0e, 0x25, 0x1d, 0x42, 0x3a, - 0x29, 0x2b, 0x14, 0xd2, 0x73, 0x90, 0x5b, 0xd1, 0x60, 0x7d, 0x4e, 0xe6, 0x98, 0xb3, 0xde, 0xf7, - 0xa3, 0x3b, 0xb5, 0x85, 0x46, 0x0a, 0xf9, 0x44, 0x4e, 0x83, 0x89, 0x27, 0x31, 0x83, 0x48, 0x7f, - 0x23, 0x90, 0x96, 0x32, 0x30, 0xdd, 0xec, 0xbb, 0x0e, 0x61, 0xfe, 0x5d, 0xc7, 0x2d, 0x28, 0x52, - 0xf7, 0xa0, 0xd9, 0x81, 0xbf, 0x32, 0x23, 0x80, 0x0e, 0x6b, 0xd2, 0x4a, 0xc7, 0x8e, 0x6e, 0x0d, - 0x4e, 0x23, 0x3d, 0x18, 0xf1, 0x0c, 0x06, 0xa4, 0x28, 0xb7, 0xa0, 0xe0, 0xe9, 0x43, 0xf6, 0x3c, - 0xc3, 0x9f, 0xe7, 0x3d, 0x7d, 0x48, 0x1f, 0xde, 0x01, 0x18, 0xd8, 0xe3, 0xb1, 0xe9, 0x69, 0xee, - 0xa9, 0xce, 0xca, 0x2c, 0xd9, 0x04, 0x30, 0x58, 0xef, 0x54, 0xdf, 0x03, 0x28, 0x38, 0xf8, 0xdc, - 0x24, 0xc9, 0x53, 0xd2, 0x60, 0x75, 0xd6, 0x2f, 0x6e, 0x40, 0x8e, 0xbf, 0x2e, 0xe0, 0x09, 0x9b, - 0x8d, 0x08, 0xdc, 0x3e, 0x26, 0xe2, 0xfb, 0x1f, 0x1f, 0xb0, 0x11, 0x09, 0xd9, 0x21, 0xb6, 0xa2, - 0xef, 0x40, 0xd3, 0x6a, 0x04, 0x22, 0xfd, 0x67, 0x19, 0x36, 0x2e, 0x25, 0x71, 0xa2, 0x9f, 0xc1, - 0xb9, 0xeb, 0x6a, 0xee, 0xc0, 0xe6, 0x55, 0x32, 0xa5, 0x16, 0x09, 0xa4, 0x47, 0x00, 0xa8, 0x05, - 0x05, 0x17, 0x9f, 0x63, 0xc7, 0xf4, 0x2e, 0x78, 0x7a, 0x79, 0x3b, 0x69, 0x79, 0xe8, 0x71, 0x3a, - 0x35, 0xe0, 0x40, 0xf6, 0x70, 0x7e, 0x17, 0x98, 0xa1, 0x0e, 0x55, 0x4d, 0xca, 0x8c, 0xb5, 0x80, - 0xaa, 0x4f, 0x5e, 0xf9, 0x67, 0x01, 0xf2, 0x7e, 0xed, 0xdc, 0x84, 0x2c, 0x9e, 0xd8, 0x83, 0x53, - 0xaa, 0xa7, 0xac, 0xca, 0x06, 0x41, 0xca, 0x4f, 0xcd, 0xbe, 0x1f, 0xf5, 0xf5, 0xed, 0x77, 0xd4, - 0xfe, 0x18, 0x75, 0x79, 0x47, 0x9d, 0xa5, 0xab, 0xfc, 0xf8, 0x05, 0x8b, 0xa0, 0xff, 0x3f, 0xec, - 0xb6, 0xa5, 0x77, 0xa0, 0x14, 0x01, 0x22, 0x80, 0x5c, 0xa7, 0xab, 0xb6, 0x6b, 0x2d, 0x71, 0x05, - 0x95, 0x20, 0xdf, 0x56, 0x3a, 0x4a, 0xfb, 0xa8, 0x2d, 0x0a, 0x74, 0x50, 0x7b, 0x4a, 0x07, 0xa9, - 0xca, 0xcf, 0xd3, 0x90, 0x63, 0x6b, 0x5d, 0x5c, 0xb1, 0xb7, 0xc2, 0x9d, 0x30, 0x3b, 0x25, 0x0e, - 0x76, 0xb6, 0x3a, 0x6c, 0x8e, 0x4d, 0x4b, 0xd3, 0x4f, 0x4e, 0xf0, 0x80, 0xec, 0x2c, 0xfd, 0xc2, - 0x9e, 0xfb, 0x72, 0x85, 0x1d, 0x8d, 0x4d, 0xab, 0xc6, 0x79, 0xf9, 0xca, 0x26, 0x53, 0xe8, 0xcf, - 0x2f, 0x4f, 0x91, 0xff, 0xb2, 0x53, 0xe8, 0xcf, 0xe7, 0xa7, 0xb8, 0x07, 0xab, 0xbe, 0xc7, 0x44, - 0x02, 0x4b, 0x2d, 0xfb, 0x40, 0x1a, 0x5b, 0x73, 0xed, 0x4f, 0xf1, 0x52, 0xfb, 0x83, 0x0c, 0x58, - 0x3b, 0x31, 0x9f, 0x63, 0x43, 0x0b, 0xaa, 0x7c, 0x96, 0xca, 0xf8, 0xe8, 0x4b, 0x6d, 0x73, 0x82, - 0xaa, 0xbf, 0x4a, 0x99, 0x06, 0x7d, 0xd5, 0x57, 0xa1, 0xcc, 0xb5, 0xcf, 0x6a, 0x30, 0x30, 0x41, - 0x38, 0x8c, 0xf0, 0xa9, 0xfc, 0x8f, 0x00, 0x9b, 0x71, 0x5b, 0x26, 0xe2, 0xa2, 0x41, 0xdd, 0x2e, - 0xf2, 0xe3, 0xaa, 0x68, 0xc0, 0x65, 0x5e, 0x3a, 0xe0, 0x66, 0xa3, 0x3b, 0x3b, 0x1f, 0xdd, 0xcf, - 0x60, 0xd5, 0x17, 0xde, 0x74, 0xdd, 0x29, 0xe6, 0x2f, 0x6b, 0xdf, 0x4d, 0x3a, 0x23, 0xef, 0x8c, - 0x14, 0x42, 0xab, 0xfa, 0x7a, 0xa0, 0xa3, 0xca, 0x0f, 0x53, 0x50, 0x8e, 0x3e, 0x46, 0xdf, 0x86, - 0x8d, 0xc0, 0x69, 0x02, 0x8b, 0x08, 0xbf, 0x08, 0x8b, 0x88, 0x3e, 0xdf, 0xc0, 0x28, 0x97, 0x4d, - 0x9f, 0xfa, 0x25, 0x98, 0xfe, 0x92, 0x9f, 0xa6, 0x2f, 0xfb, 0x69, 0xe5, 0xa7, 0x02, 0x5c, 0x8f, - 0xe5, 0x96, 0x28, 0xbe, 0x53, 0xb3, 0xf1, 0x1d, 0xe9, 0xd5, 0x33, 0x2f, 0xd7, 0xab, 0x4b, 0xbf, - 0x0d, 0x05, 0xdf, 0x5f, 0xd0, 0x16, 0x6c, 0xf6, 0xe4, 0x27, 0xb2, 0xaa, 0xf4, 0x9f, 0xcd, 0x75, - 0x6e, 0x7e, 0xa2, 0xaa, 0xb5, 0x58, 0xbb, 0xda, 0xea, 0x7e, 0xca, 0xde, 0xe2, 0xb4, 0xe5, 0x86, - 0x72, 0xd4, 0x16, 0xd3, 0xa8, 0x00, 0x99, 0xa6, 0xb2, 0xdf, 0x14, 0x33, 0xa8, 0x0c, 0x85, 0xba, - 0xaa, 0xf4, 0x95, 0x7a, 0xad, 0x25, 0x66, 0xa5, 0xff, 0x4a, 0xc1, 0xea, 0x4c, 0x5f, 0x40, 0xb6, - 0xc4, 0x83, 0x91, 0x3d, 0x35, 0x34, 0x52, 0x5f, 0xb9, 0xe5, 0x17, 0xb4, 0x39, 0x75, 0x82, 0x17, - 0x16, 0x71, 0xce, 0x81, 0xd6, 0x4e, 0xff, 0x09, 0xaa, 0x43, 0x6e, 0x88, 0x1d, 0xc7, 0xf4, 0xdf, - 0x1b, 0x2c, 0x38, 0x05, 0xd8, 0xa7, 0x38, 0xf3, 0x7c, 0x38, 0x29, 0xfa, 0x08, 0xd2, 0x43, 0xd3, - 0xe3, 0x7b, 0xb8, 0xaf, 0x2d, 0xe0, 0x70, 0x99, 0x9c, 0x10, 0xa1, 0x7d, 0xc8, 0xd1, 0xd3, 0x28, - 0xbf, 0x96, 0xed, 0x24, 0x68, 0x8e, 0xaa, 0x2d, 0x4a, 0xc1, 0xba, 0x36, 0x4e, 0x5e, 0xf9, 0x10, - 0x4a, 0x11, 0xf0, 0x8b, 0xbc, 0x04, 0x27, 0xbb, 0x65, 0xde, 0xa2, 0x49, 0x7f, 0x2a, 0x40, 0xb9, - 0x36, 0x32, 0x75, 0xd7, 0xd7, 0xf7, 0xc7, 0xbc, 0xa0, 0xb1, 0x13, 0xf3, 0x05, 0x87, 0xae, 0x51, - 0x8a, 0xe8, 0x51, 0x51, 0x4c, 0xf5, 0x94, 0x3e, 0x59, 0x7a, 0xe8, 0x56, 0x84, 0xec, 0x63, 0xe5, - 0x29, 0x7d, 0x5d, 0x4d, 0xbc, 0xa6, 0xfb, 0x84, 0x1e, 0x94, 0xa5, 0x08, 0xbc, 0xdb, 0x6f, 0xca, - 0xaa, 0x98, 0x91, 0xfe, 0x51, 0x80, 0x1b, 0xf1, 0xa6, 0x45, 0xef, 0x41, 0x9e, 0xf6, 0x61, 0xbc, - 0x47, 0x5b, 0xf8, 0xd5, 0x03, 0xa1, 0x54, 0x0c, 0x35, 0xe7, 0xd0, 0xff, 0xa4, 0x43, 0xf3, 0x0b, - 0x78, 0x74, 0x9f, 0x02, 0x3e, 0x50, 0x31, 0x90, 0x02, 0xab, 0x3a, 0x59, 0xa4, 0xdf, 0xcb, 0x72, - 0x63, 0x4b, 0x57, 0xeb, 0xa3, 0xb9, 0xa2, 0x96, 0xf5, 0xc8, 0x78, 0xa6, 0x5d, 0xfb, 0xb9, 0x00, - 0xd7, 0x62, 0x7c, 0x0b, 0xbd, 0x02, 0x85, 0x53, 0xdb, 0xf5, 0x22, 0x61, 0x9d, 0x27, 0x63, 0x12, - 0xd7, 0xaf, 0xc3, 0x1a, 0x73, 0x3b, 0x8d, 0xf7, 0x9f, 0xfe, 0xa7, 0x80, 0x0c, 0xea, 0x7f, 0x82, - 0x36, 0xb7, 0xa6, 0x74, 0x92, 0x35, 0x65, 0x7e, 0x21, 0x6b, 0x92, 0x41, 0x9c, 0x77, 0xf6, 0x98, - 0x23, 0xd9, 0x3b, 0x31, 0x3a, 0x8f, 0x4a, 0x27, 0x7d, 0x07, 0x72, 0xcc, 0x4c, 0xa8, 0x0d, 0xeb, - 0x7e, 0xf3, 0x3d, 0x6b, 0xdd, 0x7b, 0xcb, 0x3f, 0x61, 0xa5, 0xd4, 0x64, 0x2f, 0x37, 0x89, 0x02, - 0x10, 0x82, 0xf4, 0x34, 0x62, 0x65, 0x32, 0xd8, 0xcb, 0x40, 0xca, 0x34, 0xa4, 0x03, 0x58, 0x9d, - 0xa1, 0x7d, 0x99, 0xb6, 0x7f, 0xf7, 0x2f, 0x44, 0xc8, 0xef, 0x33, 0xf1, 0xd0, 0x8f, 0x05, 0x58, - 0x9d, 0xf9, 0x06, 0x1c, 0x6d, 0x2f, 0xca, 0x33, 0x97, 0x2f, 0x15, 0x54, 0xae, 0xfc, 0x58, 0x54, - 0x7a, 0xf8, 0x83, 0x7f, 0xfa, 0x97, 0x9f, 0xa4, 0xde, 0x44, 0x6f, 0xec, 0x04, 0x57, 0x4b, 0xbe, - 0x47, 0x84, 0x7a, 0xe4, 0x7f, 0xc4, 0xbb, 0xb3, 0xbd, 0x13, 0xf9, 0x9e, 0x74, 0x67, 0xfb, 0x73, - 0xf4, 0x67, 0x02, 0xac, 0xcf, 0x7d, 0xb2, 0x8a, 0x16, 0xe4, 0xd1, 0xf8, 0xcb, 0x0b, 0x95, 0xb7, - 0x12, 0x62, 0xb3, 0x2f, 0x50, 0x63, 0x65, 0x64, 0x5f, 0x66, 0x46, 0xa4, 0xfc, 0x3c, 0x2a, 0x26, - 0xfa, 0x03, 0x01, 0xc4, 0xf9, 0x3b, 0x0b, 0x68, 0xd1, 0xcb, 0xd7, 0xf8, 0xbb, 0x0d, 0x95, 0x1b, - 0x97, 0x4e, 0x0c, 0xe5, 0xf1, 0xc4, 0xbb, 0xf0, 0xc5, 0xd9, 0x7e, 0x01, 0x95, 0xfd, 0xb9, 0x00, - 0xe2, 0xfc, 0x9d, 0x88, 0x45, 0xe2, 0x2c, 0xb8, 0x3b, 0x91, 0xc0, 0x96, 0x8f, 0xa8, 0x60, 0x1f, - 0x48, 0xc9, 0xf5, 0xf4, 0x51, 0xf4, 0x42, 0x03, 0x11, 0x72, 0xfe, 0x0a, 0xc5, 0x22, 0x21, 0x17, - 0x5c, 0xb5, 0x48, 0x2e, 0xe4, 0x6e, 0x72, 0xed, 0xcd, 0x08, 0xf9, 0xc7, 0x02, 0xbd, 0xff, 0x33, - 0x7b, 0x27, 0x02, 0x55, 0x13, 0xc4, 0x44, 0xe4, 0x9b, 0xff, 0xca, 0x92, 0x0f, 0xb8, 0xa5, 0x0f, - 0xa8, 0x80, 0x0f, 0xd1, 0x4e, 0x62, 0x01, 0x77, 0xd8, 0x27, 0xdf, 0x3f, 0x11, 0x60, 0x7d, 0xee, - 0xc3, 0xe2, 0x45, 0x71, 0x11, 0xff, 0xfd, 0x71, 0x65, 0xf9, 0xcb, 0x6d, 0xe9, 0x1d, 0x2a, 0xd9, - 0x5b, 0xd2, 0x83, 0x2b, 0xec, 0x1b, 0x7c, 0x56, 0xfd, 0x91, 0xb0, 0x4d, 0xa5, 0x9a, 0xfb, 0xb4, - 0x7a, 0x91, 0x54, 0xf1, 0x5f, 0x60, 0x27, 0x94, 0x6a, 0xf7, 0xc1, 0x32, 0x7d, 0x05, 0x22, 0x11, - 0x7b, 0x0a, 0xdb, 0xe8, 0x39, 0xe4, 0xf9, 0xfd, 0x17, 0xf4, 0xda, 0x42, 0xdb, 0x25, 0xb5, 0xd8, - 0x9b, 0x54, 0x82, 0xd7, 0xd1, 0xbd, 0x25, 0x12, 0x50, 0x13, 0x91, 0x50, 0xfc, 0x91, 0x00, 0xc5, - 0xe0, 0x03, 0x7f, 0xf4, 0xb5, 0xc5, 0x99, 0x28, 0x7a, 0x8b, 0xa6, 0x72, 0xff, 0x4a, 0x3c, 0x9e, - 0xab, 0xe2, 0x64, 0x89, 0xb1, 0x11, 0xf3, 0x98, 0xcf, 0x00, 0xc2, 0xfb, 0x32, 0xe8, 0xfe, 0xb2, - 0xf4, 0x14, 0xd5, 0xc5, 0xa2, 0xc4, 0xc4, 0xe7, 0xde, 0x4e, 0xa4, 0x87, 0x2f, 0x04, 0x80, 0xf0, - 0x0e, 0xce, 0xa2, 0xc9, 0x2f, 0xdd, 0xd2, 0x59, 0x6a, 0x08, 0x9e, 0x19, 0xa5, 0x24, 0x8b, 0xff, - 0x88, 0xdd, 0x9c, 0x21, 0x62, 0x84, 0x77, 0x72, 0x16, 0x89, 0x71, 0xe9, 0xd6, 0x4e, 0x12, 0x31, - 0x76, 0x93, 0xe8, 0x81, 0x8b, 0xf1, 0xd7, 0x02, 0xbb, 0x2d, 0x38, 0x77, 0x03, 0x05, 0xbd, 0xbd, - 0xdc, 0xee, 0x31, 0xb5, 0xed, 0xe1, 0x0b, 0x50, 0x70, 0x9f, 0x49, 0x92, 0x71, 0x7c, 0x79, 0xa3, - 0xa9, 0x67, 0xf7, 0x8b, 0x0c, 0xac, 0xf3, 0x46, 0xc1, 0xbf, 0xb6, 0x83, 0x9e, 0xc3, 0xea, 0xcc, - 0x5d, 0xca, 0x45, 0xfd, 0x42, 0xdc, 0x85, 0xcb, 0x85, 0x9e, 0x75, 0x8f, 0x4a, 0x78, 0x5b, 0xba, - 0xb5, 0x58, 0xc2, 0xcf, 0xd1, 0x05, 0x40, 0x78, 0x33, 0x73, 0x91, 0x25, 0x2f, 0xdd, 0xdd, 0xac, - 0x2c, 0xbf, 0x4d, 0xe4, 0x4f, 0x8d, 0x96, 0x4e, 0xfd, 0x7b, 0x02, 0x94, 0xa3, 0x97, 0x97, 0xd0, - 0x1b, 0x8b, 0xad, 0x30, 0x77, 0x21, 0xb4, 0xb2, 0x9d, 0x04, 0x95, 0x5b, 0xaa, 0x42, 0x85, 0xd9, - 0x44, 0x68, 0x27, 0x7a, 0x11, 0x37, 0x50, 0xfc, 0xcc, 0x25, 0xd0, 0x45, 0x8a, 0x8f, 0xbb, 0x29, - 0x7a, 0x95, 0xe2, 0xb7, 0x97, 0xad, 0x7e, 0xaf, 0xf8, 0x1b, 0x79, 0x3e, 0xd3, 0x71, 0x8e, 0xd2, - 0xbf, 0xf3, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf4, 0xa4, 0xcb, 0x85, 0x29, 0x3c, 0x00, 0x00, +func init() { + proto.RegisterFile("v1alpha1/proto/grafeas.proto", fileDescriptor_grafeas_f4518a1c0d4387f8) +} + +var fileDescriptor_grafeas_f4518a1c0d4387f8 = []byte{ + // 4716 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x7b, 0xdd, 0x6f, 0x23, 0xd7, + 0x75, 0xb8, 0x86, 0xa4, 0xf8, 0x71, 0x48, 0x49, 0xa3, 0xbb, 0xda, 0x5d, 0x99, 0xeb, 0xcd, 0x6e, + 0x66, 0xed, 0xac, 0xad, 0xc4, 0x94, 0x57, 0x8e, 0xbd, 0xf6, 0xfa, 0xe7, 0x9f, 0x43, 0x91, 0x5c, + 0x71, 0x22, 0x7e, 0x08, 0x43, 0x6a, 0xbd, 0xdb, 0x16, 0x9d, 0x8e, 0x38, 0x57, 0xd4, 0x44, 0xe4, + 0x0c, 0x33, 0x33, 0x54, 0x56, 0x0e, 0x5c, 0xb4, 0x81, 0x8d, 0x22, 0x40, 0x1f, 0x02, 0xa4, 0x05, + 0xfa, 0xd8, 0x36, 0x40, 0x1f, 0x82, 0xbe, 0x15, 0xf9, 0x0b, 0x5a, 0xa0, 0xe8, 0x63, 0x50, 0xe4, + 0x3f, 0x28, 0x0a, 0x14, 0x05, 0x82, 0xb6, 0x2f, 0x05, 0xfa, 0xd2, 0xe2, 0x7e, 0xcd, 0x0c, 0xc9, + 0x21, 0xc5, 0xf5, 0xc6, 0xc8, 0x8b, 0xc4, 0x7b, 0xe6, 0x9c, 0x73, 0xcf, 0x3d, 0xdf, 0xf7, 0xce, + 0x5c, 0x78, 0xf5, 0xe2, 0x81, 0x31, 0x18, 0x9d, 0x19, 0x0f, 0x76, 0x47, 0xae, 0xe3, 0x3b, 0xbb, + 0x7d, 0xd7, 0x38, 0xc5, 0x86, 0x57, 0xa2, 0x23, 0xb4, 0x25, 0x86, 0x02, 0xab, 0x64, 0x8c, 0xac, + 0xe2, 0xad, 0xbe, 0xe3, 0xf4, 0x07, 0x98, 0x51, 0x9c, 0x8c, 0x4f, 0x77, 0xf1, 0x70, 0xe4, 0x5f, + 0x32, 0x92, 0xe2, 0xab, 0xfc, 0xa1, 0x31, 0xb2, 0x76, 0x0d, 0xdb, 0x76, 0x7c, 0xc3, 0xb7, 0x1c, + 0x9b, 0x33, 0x2c, 0xde, 0x9d, 0x26, 0x3d, 0xb5, 0xf0, 0xc0, 0xd4, 0x87, 0x86, 0x77, 0xce, 0x31, + 0xee, 0x4c, 0x63, 0xf8, 0xd6, 0x10, 0x7b, 0xbe, 0x31, 0x1c, 0x71, 0x84, 0x7b, 0x1c, 0x61, 0xe0, + 0xd8, 0x7d, 0x77, 0x6c, 0xdb, 0x96, 0xdd, 0xdf, 0x75, 0x46, 0xd8, 0x8d, 0xce, 0xa3, 0xb4, 0x61, + 0xab, 0xe2, 0x62, 0xc3, 0xc7, 0x47, 0xae, 0xf3, 0x3d, 0xdc, 0xf3, 0x35, 0xfc, 0xfd, 0x31, 0xf6, + 0x7c, 0xf4, 0x10, 0x32, 0x23, 0x06, 0xd9, 0x96, 0xee, 0x4a, 0x6f, 0xe4, 0xf7, 0x6e, 0x97, 0xe2, + 0x96, 0x58, 0x12, 0x64, 0x02, 0x5b, 0xb9, 0x0f, 0x9b, 0x07, 0xd8, 0x9f, 0xe2, 0x86, 0x20, 0x65, + 0x1b, 0x43, 0x4c, 0x59, 0xe5, 0x34, 0xfa, 0x5b, 0xb1, 0xe0, 0x5a, 0xc3, 0xf2, 0x04, 0xa6, 0x27, + 0x50, 0x6f, 0x40, 0xfa, 0xd4, 0x1a, 0xf8, 0xd8, 0xe5, 0xc8, 0x7c, 0x84, 0x6e, 0x41, 0x6e, 0x64, + 0xf4, 0xb1, 0xee, 0x59, 0x9f, 0xe2, 0xed, 0xc4, 0x5d, 0xe9, 0x8d, 0x55, 0x2d, 0x4b, 0x00, 0x1d, + 0xeb, 0x53, 0x8c, 0x6e, 0x03, 0xd0, 0x87, 0xbe, 0x73, 0x8e, 0xed, 0xed, 0x24, 0x25, 0xa4, 0xe8, + 0x5d, 0x02, 0x50, 0x76, 0x60, 0xab, 0x8a, 0x07, 0x78, 0x66, 0x91, 0x71, 0x62, 0xed, 0xc0, 0xd6, + 0x01, 0xf6, 0xdb, 0xbd, 0xde, 0xd8, 0x75, 0xb1, 0xdd, 0xc3, 0x8b, 0x70, 0x3f, 0x97, 0xe0, 0x06, + 0x59, 0x43, 0x88, 0x1d, 0x5d, 0xc6, 0xc8, 0x70, 0xb1, 0xed, 0x6f, 0xaf, 0xb2, 0x65, 0xb0, 0x51, + 0x64, 0x79, 0x89, 0xf9, 0xcb, 0x4b, 0x2e, 0x5c, 0x5e, 0x6a, 0x7a, 0x79, 0x6f, 0xc1, 0x4d, 0xb6, + 0xbc, 0xe5, 0xa4, 0xf6, 0xe0, 0x26, 0x33, 0xf9, 0x2c, 0x7a, 0x28, 0x75, 0x72, 0x42, 0xea, 0xef, + 0x00, 0x38, 0x01, 0x32, 0x95, 0x3c, 0xbf, 0x77, 0x37, 0xde, 0x21, 0x22, 0x4c, 0x23, 0x34, 0xca, + 0xdf, 0x4a, 0x70, 0xf3, 0x78, 0x64, 0x1a, 0x4b, 0x0a, 0xf9, 0xf2, 0x33, 0xa2, 0x0f, 0x21, 0x3f, + 0xa6, 0x13, 0xd2, 0xa0, 0xa1, 0x0b, 0xca, 0xef, 0x15, 0x4b, 0x2c, 0x28, 0x4a, 0x22, 0x6a, 0x4a, + 0x8f, 0x49, 0x5c, 0x35, 0x0d, 0xef, 0x5c, 0x03, 0x86, 0x4e, 0x7e, 0x2b, 0xaf, 0xc1, 0xfa, 0x01, + 0xf6, 0x5b, 0x8e, 0xbf, 0x50, 0x93, 0x25, 0xd8, 0x9e, 0xf0, 0x95, 0xab, 0xf0, 0xff, 0x10, 0x64, + 0xe2, 0x2e, 0x04, 0xed, 0xb7, 0xe2, 0x28, 0xf7, 0x61, 0x93, 0x39, 0xca, 0x55, 0x82, 0xfa, 0xb0, + 0xc9, 0x5c, 0x24, 0x8a, 0x18, 0x4a, 0x9a, 0x9a, 0x90, 0xf4, 0x26, 0x64, 0x6c, 0xc7, 0xc7, 0xba, + 0x65, 0x0a, 0x51, 0xc9, 0x50, 0x35, 0x51, 0x09, 0x52, 0xe4, 0x57, 0xa8, 0xfa, 0x38, 0xeb, 0xd1, + 0x19, 0x28, 0x9e, 0xf2, 0x67, 0x12, 0x6c, 0x32, 0x1f, 0xb9, 0x42, 0xbe, 0x80, 0x73, 0x62, 0x39, + 0xce, 0x2f, 0xe7, 0x0b, 0x9f, 0x4b, 0x50, 0x14, 0x66, 0x8b, 0x89, 0xf4, 0x38, 0xf9, 0xbe, 0x0a, + 0xe3, 0x5d, 0xc2, 0xd6, 0x64, 0xbe, 0xf4, 0x46, 0x8e, 0xed, 0x61, 0xf4, 0x01, 0x64, 0x79, 0xee, + 0xf5, 0xb6, 0xa5, 0xbb, 0xc9, 0xab, 0x53, 0x75, 0x80, 0x8e, 0xbe, 0x01, 0x1b, 0x36, 0x7e, 0xee, + 0xeb, 0x91, 0x69, 0x99, 0xbc, 0x6b, 0x04, 0x7c, 0x14, 0x4c, 0xfd, 0x63, 0x09, 0x6e, 0xc5, 0x6a, + 0x80, 0x8b, 0xb0, 0x0f, 0xf9, 0x30, 0xf0, 0x84, 0x14, 0x57, 0x47, 0x6b, 0x94, 0x68, 0x69, 0x59, + 0x86, 0xb0, 0x19, 0x89, 0x21, 0x2e, 0xc0, 0xdb, 0xb0, 0x4a, 0xec, 0x2c, 0xa6, 0x5e, 0xe4, 0x10, + 0x0c, 0x71, 0xe9, 0xe9, 0xbe, 0x90, 0xe0, 0xe6, 0x4c, 0x8a, 0xff, 0x2d, 0x2c, 0xfb, 0x33, 0x5e, + 0x69, 0x82, 0xfa, 0x1d, 0x48, 0xf1, 0x1a, 0x4c, 0xa2, 0x72, 0x47, 0x9c, 0x04, 0xa2, 0x8f, 0x00, + 0xc2, 0xda, 0xbf, 0x9d, 0x10, 0x7e, 0xc2, 0x02, 0x20, 0xd2, 0x21, 0x94, 0x82, 0x19, 0xb4, 0x08, + 0x81, 0x62, 0xc1, 0x0d, 0x9e, 0xbd, 0x83, 0xc7, 0x0b, 0xdc, 0xff, 0x43, 0xc8, 0x05, 0xb4, 0x3c, + 0xd8, 0xae, 0x98, 0x2b, 0xc4, 0x57, 0x7e, 0x22, 0xc1, 0x0d, 0x5e, 0x9f, 0xa6, 0xe7, 0x0a, 0x33, + 0x90, 0x34, 0x91, 0x81, 0xbe, 0x0e, 0x85, 0x80, 0x3e, 0x4c, 0x43, 0xf9, 0x00, 0xa6, 0x9a, 0x2f, + 0x27, 0xd2, 0x6d, 0xc8, 0xf0, 0xe0, 0x89, 0xcd, 0x96, 0x7f, 0x22, 0xc1, 0x66, 0x40, 0xd7, 0xc4, + 0xbe, 0x61, 0x1a, 0xbe, 0x41, 0x72, 0x4e, 0x8f, 0x2e, 0x43, 0x27, 0x8d, 0x19, 0xef, 0xa2, 0x66, + 0x73, 0x4e, 0x57, 0x74, 0x6d, 0x1a, 0x30, 0x74, 0x02, 0x40, 0xef, 0x42, 0x16, 0xdb, 0x26, 0xa3, + 0x4c, 0x5c, 0x49, 0x99, 0xc1, 0xb6, 0x49, 0x46, 0xca, 0x1f, 0x40, 0xb6, 0xec, 0xfa, 0xd6, 0xa9, + 0x11, 0x2f, 0x29, 0x2a, 0x42, 0xb6, 0x77, 0x86, 0x7b, 0xe7, 0xde, 0x78, 0xc8, 0x95, 0x14, 0x8c, + 0xd1, 0x3a, 0x24, 0x2c, 0x93, 0xd7, 0xfd, 0x84, 0x65, 0xa2, 0x2d, 0x58, 0x25, 0x34, 0xde, 0x76, + 0xea, 0x6e, 0xf2, 0x8d, 0x9c, 0xc6, 0x06, 0xca, 0xdf, 0x27, 0x60, 0xab, 0xec, 0xfb, 0x64, 0x62, + 0xb2, 0xda, 0xf2, 0xd8, 0x3f, 0x73, 0x5c, 0xcb, 0xbf, 0x44, 0x9f, 0x40, 0xea, 0xcc, 0xb2, 0x45, + 0xb7, 0x58, 0x89, 0x8f, 0x82, 0x38, 0xca, 0x58, 0x60, 0xdd, 0xb2, 0x7d, 0x8d, 0x32, 0x2c, 0x7e, + 0x17, 0xb6, 0xe7, 0x61, 0xa0, 0x12, 0x5c, 0x3b, 0x1b, 0x0f, 0x0d, 0x5b, 0x77, 0xb1, 0x61, 0x1a, + 0x27, 0x03, 0xac, 0x47, 0x96, 0xbc, 0x49, 0x1f, 0x69, 0xfc, 0x49, 0xcb, 0x18, 0xe2, 0xe2, 0x17, + 0x12, 0xa0, 0x08, 0xb3, 0x2a, 0xf6, 0x0d, 0x6b, 0xe0, 0xa1, 0x13, 0xb8, 0x31, 0xea, 0x8f, 0x74, + 0xcf, 0xea, 0xdb, 0xd8, 0xd4, 0x8d, 0x10, 0x81, 0xaf, 0x66, 0x67, 0x4e, 0x42, 0xed, 0x8f, 0x3a, + 0x94, 0x24, 0xc2, 0xb2, 0xbe, 0xa2, 0x6d, 0x8d, 0x62, 0xe0, 0xfb, 0x79, 0xc8, 0x11, 0xfe, 0x86, + 0x3f, 0x76, 0xb1, 0xf2, 0x47, 0x12, 0x14, 0xf6, 0xc7, 0xd6, 0xc0, 0x14, 0x12, 0xd4, 0x00, 0x46, + 0xae, 0x73, 0x81, 0x6d, 0x83, 0xb4, 0x3b, 0x6c, 0xd6, 0xd7, 0xe3, 0x67, 0xa5, 0x74, 0x47, 0x01, + 0xb2, 0x16, 0x21, 0x44, 0x6f, 0x82, 0x1c, 0x8e, 0xf4, 0x93, 0x4b, 0x92, 0x12, 0x99, 0x9d, 0x37, + 0x42, 0xf8, 0x3e, 0x01, 0x2b, 0xff, 0xba, 0x0a, 0x1b, 0x53, 0xac, 0xb8, 0x0b, 0x48, 0x81, 0x0b, + 0xdc, 0xa6, 0x52, 0x11, 0xbf, 0x0f, 0xa3, 0x2a, 0xc7, 0x21, 0xaa, 0x49, 0x2a, 0x4f, 0xcf, 0x19, + 0x0e, 0x0d, 0xdb, 0xf4, 0xb6, 0x57, 0x17, 0x55, 0x9e, 0x0a, 0xc3, 0xd2, 0x02, 0x74, 0x74, 0x00, + 0x1b, 0x27, 0x63, 0x6b, 0xe0, 0xeb, 0x06, 0x77, 0x57, 0x6f, 0x3b, 0x4d, 0x39, 0x7c, 0x6d, 0x8e, + 0xe3, 0x70, 0x34, 0x6d, 0x9d, 0x92, 0x89, 0xa1, 0x37, 0x1d, 0x65, 0x99, 0x17, 0x8a, 0xb2, 0x0f, + 0x00, 0x3c, 0xdf, 0x70, 0x7d, 0x46, 0x9b, 0xbd, 0x92, 0x36, 0x47, 0xb1, 0x29, 0xe9, 0x87, 0x90, + 0x3f, 0xb5, 0x6c, 0xcb, 0x3b, 0x63, 0xb4, 0xb9, 0xab, 0xe7, 0x65, 0xe8, 0x94, 0x78, 0x1b, 0x32, + 0x54, 0x0a, 0xc7, 0xdd, 0xce, 0x53, 0xa5, 0x8a, 0x21, 0xba, 0x03, 0xf9, 0x81, 0xd3, 0xf7, 0xf4, + 0x93, 0x71, 0xef, 0x1c, 0xfb, 0xdb, 0x6b, 0xf4, 0x29, 0x10, 0xd0, 0x3e, 0x85, 0x20, 0x15, 0x36, + 0x3d, 0x67, 0xec, 0xf6, 0xb0, 0x1e, 0xf1, 0x97, 0x75, 0x3a, 0xfb, 0xab, 0xf1, 0xaa, 0xeb, 0x50, + 0x74, 0x4d, 0x66, 0x64, 0x11, 0x6b, 0xdf, 0x06, 0xf0, 0x5d, 0xab, 0xdf, 0xc7, 0x2e, 0xb1, 0xee, + 0x06, 0xb3, 0x2e, 0x87, 0xa8, 0x26, 0xfa, 0x3d, 0x58, 0x23, 0xba, 0x36, 0x75, 0x67, 0xc4, 0x8a, + 0x86, 0x4c, 0x0d, 0xf4, 0x70, 0x29, 0xaf, 0x64, 0xe3, 0x36, 0xa3, 0xac, 0xd9, 0xbe, 0x7b, 0xa9, + 0x15, 0x4e, 0x22, 0x20, 0x74, 0x9f, 0x39, 0x80, 0x89, 0x5d, 0xfd, 0x02, 0xbb, 0x1e, 0x89, 0xb5, + 0x4d, 0x2a, 0xc1, 0x3a, 0x07, 0x3f, 0x61, 0xd0, 0xe2, 0xc7, 0xb0, 0x39, 0xc3, 0x0b, 0xc9, 0x90, + 0x3c, 0xc7, 0x97, 0xdc, 0x53, 0xc9, 0x4f, 0x92, 0xad, 0x2e, 0x8c, 0xc1, 0x18, 0x73, 0x2f, 0x65, + 0x83, 0x47, 0x89, 0xf7, 0x25, 0xe5, 0xbf, 0x25, 0x58, 0xa7, 0x1c, 0x3a, 0x22, 0xfc, 0xa8, 0x5f, + 0x8f, 0x4f, 0x06, 0x56, 0x4f, 0x0f, 0xb9, 0xe4, 0x18, 0xe4, 0x10, 0x5f, 0xa2, 0x57, 0x23, 0xa1, + 0x2a, 0xbc, 0x3e, 0x00, 0xa0, 0xeb, 0x90, 0x3e, 0xc7, 0x97, 0x7a, 0x90, 0x2b, 0x57, 0xcf, 0xf1, + 0xa5, 0x6a, 0xa2, 0x03, 0xc8, 0x12, 0xb0, 0x7f, 0x39, 0xc2, 0xb4, 0x77, 0x5b, 0xdf, 0xfb, 0xd6, + 0x02, 0x4d, 0x05, 0xb2, 0x94, 0x0e, 0xf1, 0x65, 0xf7, 0x72, 0x84, 0xb5, 0xcc, 0x39, 0xfb, 0xa1, + 0xd4, 0x21, 0xc3, 0x61, 0x68, 0x1b, 0xb6, 0x0e, 0x6b, 0xcf, 0xf4, 0xee, 0xb3, 0xa3, 0x9a, 0x7e, + 0xdc, 0xea, 0x1c, 0xd5, 0x2a, 0xea, 0x63, 0xb5, 0x56, 0x95, 0x57, 0xd0, 0x75, 0xd8, 0x3c, 0x3a, + 0x38, 0xd2, 0xcb, 0x9d, 0x8a, 0xaa, 0xea, 0x65, 0xad, 0xd9, 0xd6, 0x6a, 0x55, 0x59, 0x42, 0x05, + 0xc8, 0x1e, 0x1d, 0xaa, 0x4f, 0xf5, 0xa3, 0x5a, 0x53, 0x4e, 0x28, 0xcf, 0x21, 0x47, 0x27, 0xa3, + 0xbc, 0x62, 0x14, 0x2e, 0xc5, 0x29, 0x1c, 0xed, 0x4f, 0xaf, 0x3e, 0xbf, 0xf7, 0xda, 0x32, 0x2b, + 0xd1, 0x26, 0xf3, 0x5b, 0x86, 0x07, 0x7d, 0x6c, 0x1d, 0x92, 0x21, 0x89, 0xed, 0x0b, 0xda, 0x86, + 0xe4, 0x34, 0xf2, 0x93, 0x60, 0x19, 0x6e, 0xdf, 0xdb, 0x4e, 0x52, 0x10, 0xfd, 0x4d, 0xb0, 0x4c, + 0xcb, 0xe5, 0x9d, 0x30, 0xf9, 0xc9, 0x13, 0xd4, 0x6a, 0x90, 0xa0, 0x5e, 0x81, 0xec, 0x0f, 0x0c, + 0xcb, 0xd7, 0x4f, 0x1d, 0x97, 0xe6, 0x8f, 0x9c, 0x96, 0x21, 0xe3, 0xc7, 0x8e, 0xab, 0xfc, 0x2a, + 0x09, 0x50, 0xc5, 0xa3, 0x81, 0x73, 0x49, 0xb2, 0x3f, 0x69, 0x11, 0x5c, 0xcc, 0x23, 0x67, 0xec, + 0x5a, 0xb4, 0x59, 0xcb, 0x69, 0x79, 0x01, 0x3b, 0x76, 0xad, 0xe2, 0xcf, 0x92, 0x64, 0x7b, 0x44, + 0x28, 0x86, 0xd8, 0xf6, 0x45, 0x66, 0xbe, 0x0d, 0x30, 0xf6, 0xb0, 0xab, 0xe3, 0xa1, 0x61, 0x0d, + 0x84, 0xaf, 0x10, 0x48, 0x8d, 0x00, 0x48, 0x1e, 0x30, 0x29, 0xcd, 0xb2, 0xb5, 0x1a, 0x18, 0x3a, + 0xcd, 0x03, 0x1f, 0xc3, 0xda, 0xd8, 0x8e, 0x92, 0x27, 0xaf, 0x24, 0x2f, 0x08, 0x02, 0xca, 0xe0, + 0x06, 0xa4, 0x7b, 0x8e, 0x7d, 0x6a, 0xf5, 0x69, 0xf2, 0xca, 0x69, 0x7c, 0x44, 0x12, 0x8c, 0x61, + 0x9a, 0x2e, 0xf6, 0x3c, 0xae, 0x2c, 0x31, 0x9c, 0xd1, 0x43, 0x7a, 0x46, 0x0f, 0xe8, 0x13, 0xc8, + 0x8e, 0x06, 0x86, 0x7f, 0xea, 0xb8, 0x43, 0x9a, 0x4f, 0xd7, 0xf7, 0x3e, 0x8c, 0xb7, 0x7f, 0xa8, + 0xde, 0xd2, 0x8c, 0xde, 0x4a, 0x47, 0x9c, 0x85, 0x16, 0x30, 0x53, 0x2a, 0x90, 0x15, 0x50, 0xe2, + 0xda, 0x47, 0x8d, 0x72, 0xf7, 0x71, 0x5b, 0x6b, 0x4e, 0xb9, 0x76, 0x06, 0x92, 0x07, 0x87, 0x35, + 0x59, 0x42, 0x59, 0x48, 0x3d, 0x6e, 0xd4, 0x9e, 0xca, 0x09, 0x04, 0x90, 0xae, 0x1c, 0x77, 0xba, + 0xed, 0xa6, 0x9c, 0x54, 0xfe, 0x21, 0x0d, 0xf9, 0xaa, 0xd3, 0x3b, 0xc7, 0xae, 0x3a, 0x34, 0xfa, + 0xb8, 0xf8, 0x6f, 0x09, 0x58, 0x6d, 0x18, 0x97, 0xd8, 0x45, 0x6d, 0xc8, 0x99, 0x96, 0x8b, 0x7b, + 0xbe, 0x75, 0xc1, 0xbc, 0x6d, 0x7d, 0xef, 0xc1, 0x1c, 0xc1, 0x43, 0xfa, 0x12, 0xa5, 0x2d, 0x55, + 0x05, 0xa1, 0x16, 0xf2, 0x20, 0x79, 0xc0, 0x70, 0xfb, 0x63, 0xb2, 0x2a, 0x51, 0x46, 0x43, 0x80, + 0xf2, 0x1f, 0x12, 0xe4, 0x02, 0x32, 0xf4, 0x0a, 0x5c, 0xaf, 0xaa, 0x5a, 0xad, 0xd2, 0x55, 0x9f, + 0x4c, 0xc7, 0xea, 0x3a, 0x40, 0xb3, 0xac, 0xb6, 0xba, 0x65, 0xb5, 0x55, 0xd3, 0x64, 0x89, 0x2c, + 0x50, 0x3b, 0x6e, 0xc9, 0x09, 0xf2, 0xa3, 0xd2, 0xac, 0xca, 0x49, 0x94, 0x83, 0xd5, 0x46, 0x79, + 0xbf, 0xd6, 0x90, 0x53, 0x64, 0xa9, 0xb5, 0xa7, 0x47, 0xed, 0x4e, 0x4d, 0x5e, 0x25, 0xcf, 0x6b, + 0xad, 0x27, 0x72, 0x9a, 0xfc, 0x28, 0x57, 0xab, 0x72, 0x86, 0xa8, 0xa4, 0xd2, 0x3e, 0x7a, 0x26, + 0x67, 0x09, 0xd3, 0x5a, 0xab, 0xab, 0x3d, 0x3b, 0x6a, 0xab, 0xad, 0xae, 0x9c, 0x23, 0x74, 0x4f, + 0xda, 0x8d, 0xe3, 0x66, 0x4d, 0x06, 0x82, 0x75, 0xdc, 0xa9, 0x69, 0x72, 0x1e, 0xe5, 0x21, 0xf3, + 0x49, 0x5b, 0x3b, 0xac, 0xaa, 0x9a, 0x5c, 0xa0, 0x5c, 0xb4, 0x03, 0x79, 0x8d, 0x40, 0xdb, 0xad, + 0xfd, 0x63, 0xb5, 0x51, 0x95, 0xd7, 0x09, 0xa3, 0x4e, 0xb7, 0x7d, 0xd4, 0x51, 0x0f, 0x5a, 0xe5, + 0x86, 0xbc, 0x81, 0x36, 0x20, 0x5f, 0xaf, 0x95, 0x1b, 0xdd, 0x7a, 0xa5, 0x5e, 0xab, 0x1c, 0xca, + 0x32, 0x11, 0xae, 0x53, 0xaf, 0x35, 0x1a, 0xf2, 0x66, 0xf1, 0x29, 0xe4, 0x1f, 0x5b, 0x76, 0x1f, + 0xbb, 0x23, 0xd7, 0x62, 0x1b, 0xff, 0x8b, 0x07, 0xd1, 0x8e, 0x2b, 0x7d, 0xf1, 0x80, 0xb4, 0x59, + 0xf4, 0xc1, 0x9e, 0x7e, 0x32, 0x70, 0x4e, 0x78, 0x88, 0xa7, 0x2f, 0xf6, 0xf6, 0x07, 0xce, 0x09, + 0x7f, 0x40, 0x29, 0xf8, 0x01, 0xd3, 0xc5, 0x1e, 0x6d, 0xcc, 0x7e, 0x08, 0xab, 0xfb, 0x86, 0x67, + 0x4d, 0xfb, 0xa7, 0x08, 0xb8, 0x88, 0x7f, 0x0e, 0x50, 0x9b, 0x96, 0x5e, 0x21, 0x05, 0x0f, 0xb9, + 0xb7, 0xae, 0xb6, 0x74, 0x44, 0x74, 0x2d, 0xca, 0xa1, 0xf8, 0x9f, 0x12, 0xac, 0x57, 0xb1, 0x6b, + 0x5d, 0xe0, 0xa0, 0x1f, 0x9b, 0x9a, 0x43, 0x7a, 0xd9, 0x39, 0x48, 0xe7, 0x6d, 0x5a, 0x9e, 0x6f, + 0x88, 0xd3, 0xac, 0x35, 0x2d, 0x18, 0xa3, 0xc7, 0x00, 0x03, 0xe2, 0x85, 0xba, 0x65, 0x9f, 0x3a, + 0x34, 0x03, 0xe6, 0xf7, 0xee, 0x2f, 0xe9, 0xb9, 0x5a, 0x8e, 0x92, 0xaa, 0xf6, 0xa9, 0x83, 0x76, + 0x60, 0xf3, 0xc4, 0xf0, 0xb0, 0x3e, 0xa1, 0x40, 0x96, 0x3d, 0x37, 0xc8, 0x03, 0x2d, 0x54, 0xa2, + 0xf2, 0x33, 0xea, 0xbd, 0x5e, 0xcf, 0xb9, 0xc0, 0xee, 0x25, 0xaa, 0xc2, 0x9a, 0x61, 0x1b, 0x83, + 0x4b, 0xcf, 0xf2, 0xf4, 0x73, 0xcb, 0x36, 0x79, 0xf8, 0xdc, 0x99, 0xbf, 0x8f, 0x2e, 0x1d, 0x5a, + 0xb6, 0xa9, 0x15, 0x04, 0x15, 0x19, 0x15, 0x8f, 0x60, 0x53, 0xb0, 0x0c, 0x35, 0x39, 0xb1, 0xf1, + 0x92, 0x5e, 0x70, 0xe3, 0x55, 0x03, 0x78, 0x6c, 0x0d, 0x70, 0xdd, 0xf0, 0xce, 0xb0, 0x87, 0x1e, + 0x42, 0xee, 0xd4, 0x1a, 0x60, 0xfd, 0xcc, 0xf0, 0xce, 0x16, 0xef, 0xf4, 0x09, 0x81, 0x96, 0x3d, + 0xe5, 0xa4, 0xca, 0x0f, 0x20, 0x45, 0xfe, 0xa3, 0x87, 0x90, 0xa2, 0xf5, 0x99, 0xad, 0xee, 0xde, + 0x7c, 0x5a, 0xfa, 0x87, 0x96, 0x65, 0x4a, 0x30, 0xd9, 0x5d, 0x14, 0x78, 0x77, 0xa1, 0xdc, 0x85, + 0xac, 0xc0, 0x23, 0x31, 0xd7, 0x6a, 0xb7, 0x6a, 0xf2, 0x0a, 0x89, 0xc4, 0x4e, 0xbd, 0xbc, 0xf7, + 0xee, 0x7b, 0xb2, 0xa4, 0xfc, 0x0f, 0x01, 0x3b, 0x3e, 0x8e, 0x2d, 0x82, 0xdf, 0x84, 0x4d, 0xef, + 0xcc, 0x71, 0x7d, 0xdd, 0xc4, 0x5e, 0xcf, 0xb5, 0x46, 0xc1, 0xd6, 0x34, 0xa7, 0xc9, 0xf4, 0x41, + 0x35, 0x84, 0x93, 0xce, 0x9e, 0x68, 0x6b, 0x02, 0x97, 0x9b, 0x96, 0xc0, 0xa3, 0xa8, 0xef, 0x40, + 0x8a, 0xda, 0x30, 0xb7, 0x9c, 0x0d, 0x29, 0x32, 0x7a, 0x0a, 0xe8, 0x62, 0x3c, 0xb0, 0xb1, 0x6b, + 0x9c, 0x58, 0x03, 0xcb, 0xe7, 0x8d, 0x4c, 0x9a, 0xda, 0x6b, 0x8e, 0x2f, 0x3e, 0x89, 0xe2, 0x13, + 0x25, 0xd4, 0x57, 0xb4, 0xcd, 0x8b, 0x69, 0x20, 0xfa, 0x0e, 0x00, 0xeb, 0x23, 0x29, 0x47, 0xd6, + 0x64, 0xdf, 0x59, 0xd0, 0x50, 0x70, 0x4e, 0xb9, 0x93, 0xa0, 0x75, 0xa9, 0x03, 0x50, 0xbf, 0xb6, + 0x88, 0xdb, 0xd3, 0x9e, 0x78, 0xa9, 0xf8, 0xa0, 0x09, 0x85, 0x72, 0x32, 0x3c, 0x4c, 0x41, 0xa8, + 0x0e, 0x99, 0x91, 0xd1, 0x3b, 0x27, 0x6c, 0x58, 0xcf, 0x3c, 0xa7, 0x47, 0x3b, 0x62, 0x48, 0x4d, + 0xc3, 0x36, 0xfa, 0xd8, 0x15, 0xc3, 0xfa, 0x8a, 0x26, 0xc8, 0xd1, 0x3e, 0xf0, 0x42, 0x4e, 0xca, + 0x1f, 0x6d, 0x5d, 0xe7, 0x1e, 0xfd, 0x84, 0x65, 0xb2, 0xbe, 0xa2, 0x45, 0xa8, 0xd0, 0xc7, 0xa4, + 0x60, 0xf1, 0x10, 0xdc, 0x46, 0x8b, 0x14, 0x13, 0x44, 0x2a, 0x59, 0x4e, 0x40, 0x83, 0x1e, 0x43, + 0xde, 0xc5, 0x03, 0xc3, 0xc7, 0x26, 0x0d, 0xf5, 0x0c, 0x0d, 0x89, 0xd7, 0x17, 0x18, 0x5c, 0x63, + 0xd8, 0xc7, 0xee, 0x40, 0x03, 0x37, 0xf8, 0x8d, 0x2a, 0xb0, 0x81, 0x9f, 0x8f, 0x2c, 0x7e, 0x80, + 0x42, 0x3b, 0x11, 0xb8, 0xb2, 0x13, 0x59, 0x0f, 0x49, 0xc4, 0x8e, 0x28, 0xba, 0x13, 0xcb, 0xbf, + 0xd0, 0x4e, 0x2c, 0x3c, 0xa0, 0xa5, 0xc4, 0x85, 0xab, 0x89, 0x19, 0x3a, 0x25, 0x7e, 0x1d, 0xd6, + 0xc3, 0xe3, 0x1f, 0x1a, 0x66, 0xd7, 0xd8, 0x11, 0x58, 0x00, 0xa5, 0x35, 0xe6, 0xdb, 0x00, 0xe1, + 0xfa, 0x49, 0x73, 0x19, 0xd6, 0x17, 0xf2, 0x93, 0x04, 0xf9, 0xc0, 0x38, 0xc1, 0x03, 0xb1, 0x85, + 0xa0, 0x03, 0xe5, 0xe7, 0x12, 0xa4, 0x48, 0x9c, 0xa0, 0x2d, 0x90, 0x0f, 0xd5, 0x56, 0x75, 0xaa, + 0xb8, 0xbf, 0x02, 0xd7, 0x8f, 0xca, 0x95, 0xc3, 0xf2, 0x41, 0x4d, 0x7f, 0x72, 0xdc, 0x68, 0xd5, + 0xb4, 0xf2, 0xbe, 0xda, 0x50, 0xbb, 0xcf, 0xe4, 0x04, 0xda, 0x84, 0x35, 0x5a, 0x64, 0xf5, 0x6a, + 0xad, 0x5b, 0x56, 0x1b, 0x1d, 0x39, 0x49, 0x8a, 0xab, 0xda, 0x24, 0xb8, 0xfb, 0xe5, 0x8e, 0xda, + 0x91, 0x53, 0xe8, 0x1a, 0x6c, 0x08, 0xf2, 0x66, 0xb9, 0x55, 0x3e, 0xa8, 0x69, 0xf2, 0x2a, 0x29, + 0xc9, 0xd5, 0xda, 0x51, 0xa3, 0xfd, 0xac, 0xbc, 0xdf, 0xa8, 0xc9, 0x69, 0xb4, 0x06, 0xb9, 0xaa, + 0xda, 0xa9, 0xb4, 0x9f, 0xd4, 0xb4, 0x67, 0x72, 0x86, 0x4c, 0x59, 0xee, 0x76, 0x6b, 0x9d, 0x6e, + 0xb9, 0xab, 0xb6, 0x5b, 0x7a, 0xf9, 0xb8, 0x5b, 0x6f, 0x6b, 0x64, 0xca, 0xec, 0x7e, 0x1e, 0x72, + 0xf4, 0x28, 0x9e, 0x84, 0x9a, 0xf2, 0x8b, 0x2c, 0x40, 0x78, 0xec, 0x18, 0x9b, 0x82, 0xa6, 0xab, + 0x6d, 0x62, 0xb6, 0xda, 0xde, 0xe2, 0x2c, 0x23, 0x45, 0x3b, 0x4b, 0x00, 0xb4, 0xd0, 0x8b, 0x54, + 0x93, 0x7e, 0x91, 0x54, 0xe3, 0xc1, 0xf5, 0xc9, 0x54, 0x63, 0xb2, 0x52, 0xc1, 0x73, 0xc3, 0xff, + 0x5b, 0x32, 0xdb, 0x4c, 0x42, 0x78, 0xb9, 0xa9, 0xaf, 0x68, 0x5b, 0x17, 0x31, 0x70, 0xa4, 0x8a, + 0xdd, 0xac, 0x98, 0x8c, 0x9d, 0x14, 0x28, 0x0b, 0x12, 0x51, 0xc8, 0x92, 0x6d, 0x5d, 0x05, 0xab, + 0x53, 0xb8, 0x6e, 0xb2, 0x6e, 0x81, 0x65, 0xa4, 0x80, 0x25, 0x73, 0xf9, 0xb7, 0xaf, 0xce, 0x4c, + 0x93, 0xcd, 0x46, 0x7d, 0x45, 0xbb, 0xc6, 0x19, 0xd2, 0xc7, 0x62, 0x9e, 0x21, 0x6c, 0x59, 0xb6, + 0xe7, 0x1b, 0x83, 0x01, 0xf3, 0x6c, 0x31, 0x0d, 0x0b, 0x8e, 0xf7, 0x97, 0xca, 0x5c, 0x6a, 0x84, + 0x41, 0x64, 0x3a, 0x6b, 0x16, 0x8c, 0x7a, 0x80, 0xcc, 0xa0, 0x8b, 0x0f, 0x26, 0x63, 0x69, 0x72, + 0xef, 0xc5, 0x37, 0x00, 0xa4, 0x18, 0x98, 0x33, 0xbb, 0xa9, 0x13, 0x40, 0x66, 0xd0, 0x22, 0x04, + 0x93, 0x6c, 0xd0, 0x49, 0x1e, 0x5c, 0x91, 0xfb, 0x4a, 0x33, 0xcd, 0x05, 0x9d, 0x63, 0xa6, 0xe3, + 0x18, 0xc1, 0xb5, 0xc8, 0x11, 0x5e, 0x30, 0x89, 0x4c, 0x27, 0xf9, 0xe8, 0xcb, 0x1d, 0x4c, 0x86, + 0x13, 0x22, 0x63, 0xf6, 0xfc, 0xf0, 0x2e, 0xc9, 0xc3, 0x43, 0x6c, 0x5a, 0xac, 0xcb, 0x59, 0x15, + 0x51, 0x14, 0x80, 0xa6, 0x93, 0x63, 0xee, 0x65, 0x92, 0x23, 0x7c, 0x05, 0xc9, 0x71, 0x3f, 0x07, + 0x19, 0xae, 0x28, 0xe5, 0xd7, 0xab, 0xb0, 0x3e, 0xe9, 0x46, 0xc5, 0x9f, 0x27, 0xa0, 0x50, 0xb5, + 0x3c, 0xdf, 0xb5, 0x4e, 0xc6, 0x74, 0x3d, 0x37, 0x21, 0xd3, 0x1b, 0x89, 0x9d, 0x34, 0xdb, 0x79, + 0x8e, 0xe8, 0xe6, 0xf1, 0x18, 0x0a, 0x86, 0xdb, 0x3b, 0xb3, 0x7c, 0xdc, 0x0b, 0x0e, 0x10, 0xe6, + 0xee, 0xc3, 0xa6, 0x9c, 0xb5, 0x1c, 0x21, 0xd4, 0x26, 0xd8, 0xa0, 0x27, 0xb0, 0x4e, 0x32, 0xb7, + 0xe7, 0x07, 0x87, 0x17, 0x6c, 0xab, 0xbc, 0xbb, 0x74, 0xb2, 0x60, 0x64, 0xda, 0x1a, 0x63, 0x23, + 0x0e, 0x3b, 0xbe, 0x06, 0x30, 0x34, 0x2c, 0xdb, 0x37, 0x2c, 0x1b, 0x8b, 0x93, 0x86, 0x08, 0x44, + 0x54, 0x89, 0x74, 0x58, 0x25, 0xee, 0x92, 0x0d, 0x7f, 0xd8, 0x83, 0x65, 0x98, 0xad, 0x23, 0xa0, + 0xe2, 0x8f, 0x24, 0xc8, 0x36, 0x9c, 0x9e, 0xb1, 0x58, 0x51, 0x2a, 0x64, 0xc4, 0x52, 0x12, 0x5f, + 0x6e, 0x29, 0x82, 0x9e, 0x64, 0xf6, 0x91, 0xe1, 0x9f, 0xf1, 0xec, 0x4c, 0x7f, 0x17, 0x7d, 0xc8, + 0x70, 0xed, 0xc6, 0x26, 0xfe, 0x63, 0x28, 0x98, 0x11, 0x7b, 0x6e, 0x03, 0x6d, 0x1d, 0x96, 0x33, + 0x53, 0xd4, 0x11, 0xb4, 0x09, 0x36, 0x45, 0x1f, 0xae, 0xc5, 0x64, 0x9c, 0x58, 0x09, 0x54, 0xc8, + 0x0e, 0xb8, 0x92, 0xf8, 0xeb, 0xa8, 0xb7, 0x96, 0x9a, 0x5d, 0x68, 0x56, 0x0b, 0xc8, 0x95, 0xff, + 0x0f, 0x85, 0xa8, 0xeb, 0xa0, 0x57, 0x61, 0xbb, 0xac, 0x55, 0xea, 0x6a, 0xb7, 0x56, 0xe9, 0x1e, + 0x6b, 0xb5, 0xd9, 0xf3, 0x85, 0xa7, 0xef, 0xbf, 0xc7, 0xf6, 0xe1, 0x4f, 0xdf, 0xfb, 0xb6, 0x9c, + 0x50, 0xfe, 0x57, 0x82, 0xad, 0xb8, 0xb3, 0xfc, 0xc9, 0x83, 0x40, 0x69, 0xfa, 0x20, 0xf0, 0x19, + 0x14, 0x7a, 0x8e, 0xed, 0x93, 0x6c, 0x49, 0x5b, 0xdb, 0x24, 0x75, 0xf5, 0xf7, 0x96, 0x7f, 0x57, + 0x50, 0xaa, 0x30, 0x72, 0xba, 0xd1, 0xc8, 0xf7, 0xc2, 0x01, 0x71, 0xcb, 0x51, 0x7f, 0xa4, 0xf3, + 0x73, 0x46, 0x5a, 0x95, 0xeb, 0x2b, 0x5a, 0x76, 0xd4, 0x1f, 0x1d, 0xe2, 0x4b, 0xd5, 0x54, 0xaa, + 0x90, 0x8f, 0xd0, 0x92, 0x05, 0x57, 0xda, 0xad, 0x6e, 0xad, 0xd5, 0x8d, 0x3b, 0x2b, 0xbc, 0x09, + 0xd7, 0x3a, 0x6a, 0xf3, 0xa8, 0x51, 0xd3, 0xc9, 0x26, 0x5f, 0x6d, 0x1d, 0xe8, 0xdf, 0xed, 0xb4, + 0x5b, 0xb2, 0xb4, 0x9f, 0x15, 0x27, 0x99, 0xca, 0x3f, 0xa6, 0x20, 0xcd, 0xce, 0x89, 0x51, 0x03, + 0xd6, 0x3d, 0xdf, 0x71, 0xe9, 0x5b, 0x6a, 0x0a, 0xe1, 0x9b, 0xb6, 0x39, 0xbb, 0xa5, 0x0e, 0xc3, + 0x65, 0xc4, 0xf5, 0x15, 0x6d, 0xcd, 0x8b, 0x02, 0x50, 0x85, 0x64, 0xc6, 0x91, 0x23, 0x58, 0x2d, + 0xfc, 0x8e, 0x43, 0xc3, 0x23, 0x27, 0xe0, 0x03, 0x6e, 0x30, 0x42, 0xbf, 0x0b, 0x37, 0xc5, 0x6b, + 0x02, 0x7d, 0x4a, 0xb6, 0xd4, 0xd2, 0xb2, 0x69, 0xd7, 0x05, 0x8f, 0x09, 0x30, 0x6a, 0x92, 0x9d, + 0x3e, 0xdf, 0x54, 0x62, 0x8f, 0xef, 0xbe, 0xbf, 0xb5, 0xe8, 0x28, 0xbd, 0x14, 0x6e, 0x49, 0xd9, + 0xc9, 0x36, 0x9c, 0x86, 0x7b, 0xd4, 0x8f, 0x20, 0x43, 0x0d, 0xf9, 0xdc, 0xe7, 0x1d, 0xc6, 0xbd, + 0x45, 0xac, 0x2a, 0x0c, 0x55, 0x13, 0x34, 0xa8, 0x0b, 0xd7, 0x0c, 0xd3, 0xb4, 0x88, 0x77, 0x18, + 0x03, 0x9d, 0x43, 0x49, 0x67, 0x94, 0x5c, 0x96, 0x15, 0x0a, 0xe9, 0x39, 0xc8, 0x2b, 0xea, 0xb0, + 0x31, 0x25, 0x73, 0xcc, 0x09, 0xfa, 0x7b, 0xd1, 0x3d, 0xee, 0x5c, 0x23, 0x85, 0x7c, 0x22, 0x67, + 0xec, 0xc4, 0x93, 0x98, 0x41, 0x94, 0x5f, 0x48, 0xa4, 0xcb, 0x0e, 0x4c, 0x37, 0xf9, 0x06, 0x49, + 0x9a, 0x7e, 0x83, 0x74, 0x0b, 0x72, 0xd4, 0x3d, 0x68, 0x76, 0xe0, 0x2f, 0x24, 0x09, 0xa0, 0xc5, + 0x9a, 0xd3, 0xfc, 0x89, 0x6b, 0xd8, 0xbd, 0xb3, 0x48, 0xef, 0x49, 0x3c, 0x83, 0x01, 0x29, 0xca, + 0x2d, 0xc8, 0xfa, 0x46, 0x9f, 0x3d, 0x4f, 0xf1, 0xe7, 0x19, 0xdf, 0xe8, 0xd3, 0x87, 0x77, 0x00, + 0x7a, 0xce, 0x70, 0x68, 0xf9, 0xba, 0x77, 0x66, 0xb0, 0xa2, 0x4c, 0xb6, 0x4f, 0x0c, 0xd6, 0x39, + 0x33, 0xf6, 0x01, 0xb2, 0x2e, 0xbe, 0xb0, 0x48, 0x0e, 0x55, 0x74, 0x58, 0x9b, 0xf4, 0x8b, 0x1b, + 0x90, 0xe6, 0x2f, 0x61, 0x78, 0xde, 0x66, 0x23, 0x02, 0x77, 0x4e, 0xe8, 0x77, 0x71, 0xfc, 0xd3, + 0x0e, 0x36, 0x22, 0x21, 0xdb, 0xc7, 0x76, 0xf4, 0x0d, 0x73, 0x52, 0x8b, 0x40, 0x94, 0x7f, 0x2f, + 0xc0, 0xe6, 0x4c, 0x2e, 0x27, 0xfa, 0xe9, 0x5d, 0x78, 0x9e, 0xee, 0xf5, 0x1c, 0x5e, 0x2c, 0x13, + 0x5a, 0x8e, 0x40, 0x3a, 0x04, 0x80, 0x1a, 0x90, 0xf5, 0xf0, 0x05, 0x26, 0xdd, 0x08, 0x4f, 0x2f, + 0x6f, 0x2f, 0x5b, 0x25, 0x3a, 0x9c, 0x4e, 0x0b, 0x38, 0x90, 0xdd, 0xaf, 0x68, 0x86, 0x52, 0xd4, + 0xa1, 0x4a, 0xcb, 0x32, 0x63, 0x89, 0x5c, 0x13, 0xe4, 0xc5, 0x5f, 0x49, 0x90, 0x11, 0x25, 0x74, + 0x0b, 0x56, 0xf1, 0xc8, 0xe9, 0x9d, 0x51, 0x3d, 0xad, 0x6a, 0x6c, 0x10, 0xa4, 0xfc, 0xc4, 0xe4, + 0xdb, 0x67, 0xa1, 0x6f, 0xb1, 0x93, 0x10, 0x63, 0xd4, 0xe6, 0x3b, 0x89, 0xd5, 0x45, 0x07, 0xce, + 0x73, 0x6b, 0xa1, 0xf8, 0x1f, 0xee, 0x32, 0x94, 0x77, 0x20, 0x1f, 0x01, 0x22, 0x80, 0x74, 0xab, + 0xad, 0x35, 0xcb, 0x0d, 0x79, 0x05, 0xe5, 0x21, 0xd3, 0x54, 0x5b, 0x6a, 0xf3, 0xb8, 0x29, 0x4b, + 0x74, 0x50, 0x7e, 0x4a, 0x07, 0x89, 0xe2, 0x2f, 0x93, 0x90, 0x66, 0x6b, 0x9d, 0x5f, 0xb8, 0xb7, + 0xc3, 0x33, 0x04, 0x76, 0xe8, 0x1e, 0x9c, 0x09, 0x18, 0xb0, 0x35, 0xb4, 0x6c, 0xdd, 0x38, 0x3d, + 0xc5, 0x3d, 0xb2, 0x27, 0x17, 0xf5, 0x3d, 0xfd, 0xe5, 0xea, 0x3b, 0x1a, 0x5a, 0x76, 0x99, 0xf3, + 0x12, 0xca, 0x26, 0x53, 0x18, 0xcf, 0x67, 0xa7, 0xc8, 0x7c, 0xd9, 0x29, 0x8c, 0xe7, 0xd3, 0x53, + 0xdc, 0x83, 0x35, 0xe1, 0x31, 0x91, 0xc0, 0xd2, 0x0a, 0x02, 0x48, 0x63, 0x6b, 0xaa, 0x0b, 0xca, + 0xcd, 0x74, 0x41, 0xc8, 0x84, 0xf5, 0x53, 0xeb, 0x39, 0x36, 0xf5, 0xa0, 0xca, 0xaf, 0x2e, 0x6a, + 0xc0, 0xaf, 0xd8, 0xde, 0x05, 0x55, 0x7f, 0x8d, 0x32, 0x0d, 0xda, 0xab, 0xaf, 0x43, 0x81, 0x6b, + 0x9f, 0xd5, 0x60, 0x60, 0x82, 0x70, 0x18, 0xe1, 0x53, 0xfc, 0x2f, 0x09, 0xb6, 0xe2, 0xb6, 0x8a, + 0xc4, 0x45, 0x83, 0xba, 0x9d, 0xe3, 0x07, 0x7d, 0xd1, 0x80, 0x4b, 0xbd, 0x74, 0xc0, 0x4d, 0x46, + 0xf7, 0xea, 0x74, 0x74, 0x3f, 0x83, 0x35, 0x21, 0xbc, 0xe5, 0x79, 0x63, 0xcc, 0x5f, 0x81, 0x7f, + 0x7b, 0xd9, 0x19, 0x79, 0x67, 0xa4, 0x12, 0x5a, 0x4d, 0xe8, 0x81, 0x8e, 0x8a, 0x5f, 0x24, 0xa0, + 0x10, 0x7d, 0x8c, 0xbe, 0x07, 0x9b, 0x81, 0xd3, 0x04, 0x16, 0x91, 0x7e, 0x13, 0x16, 0x91, 0x05, + 0xdf, 0xc0, 0x28, 0xb3, 0xa6, 0x4f, 0x7c, 0x05, 0xa6, 0x9f, 0xf1, 0xd3, 0xe4, 0xac, 0x9f, 0x16, + 0xff, 0x5c, 0x82, 0xeb, 0xb1, 0xdc, 0x96, 0x8a, 0xef, 0xc4, 0x64, 0x7c, 0x47, 0x5a, 0xf6, 0xd4, + 0xcb, 0xb5, 0xec, 0xca, 0xef, 0x43, 0x56, 0xf8, 0x0b, 0xda, 0x86, 0xad, 0x4e, 0xed, 0x49, 0x4d, + 0x53, 0xbb, 0xcf, 0xa6, 0x3a, 0x37, 0x91, 0xa8, 0xca, 0x0d, 0xd6, 0xae, 0x36, 0xda, 0x9f, 0xb0, + 0xb7, 0x61, 0xcd, 0x5a, 0x55, 0x3d, 0x6e, 0xca, 0x49, 0x94, 0x85, 0x54, 0x5d, 0x3d, 0xa8, 0xcb, + 0x29, 0x54, 0x80, 0x6c, 0x45, 0x53, 0xbb, 0x6a, 0xa5, 0xdc, 0x90, 0x57, 0x95, 0x5f, 0x27, 0x60, + 0x6d, 0xa2, 0x2f, 0x40, 0x4d, 0x80, 0xde, 0xc0, 0x19, 0x9b, 0x3a, 0xa9, 0xaf, 0xdc, 0xf2, 0x73, + 0xda, 0x9c, 0x0a, 0xc1, 0x0b, 0x8b, 0x38, 0xe7, 0x40, 0x6b, 0xa7, 0x78, 0x82, 0x2a, 0x90, 0xee, + 0x63, 0xd7, 0xb5, 0xc4, 0xfb, 0x97, 0x37, 0xe3, 0x59, 0x1d, 0x50, 0x9c, 0x69, 0x3e, 0x9c, 0x14, + 0x3d, 0x82, 0x64, 0xdf, 0xf2, 0xf9, 0x56, 0xee, 0x1b, 0x73, 0x38, 0xcc, 0x92, 0x13, 0x22, 0x74, + 0x00, 0x69, 0x7a, 0x40, 0x27, 0x6a, 0xd9, 0xee, 0x12, 0xcd, 0x51, 0xa9, 0x41, 0x29, 0x58, 0xd7, + 0xc6, 0xc9, 0x8b, 0x1f, 0x40, 0x3e, 0x02, 0x7e, 0x91, 0x4f, 0x0b, 0xc8, 0xa6, 0x99, 0xb7, 0x68, + 0xca, 0x5f, 0x4a, 0x50, 0x28, 0x0f, 0x2c, 0xc3, 0x13, 0xfa, 0xfe, 0x90, 0x17, 0x34, 0xf6, 0xae, + 0x61, 0xce, 0x71, 0x75, 0x94, 0x22, 0x7a, 0x44, 0x16, 0x53, 0x3d, 0x95, 0x8f, 0x17, 0x9e, 0x43, + 0xe6, 0x60, 0xf5, 0xb1, 0xfa, 0x94, 0x7e, 0x04, 0x40, 0xbc, 0xa6, 0xfd, 0x84, 0x9e, 0x1d, 0x26, + 0x08, 0xbc, 0xdd, 0xad, 0xd7, 0x34, 0x39, 0xa5, 0xfc, 0x93, 0x04, 0x37, 0xe2, 0x4d, 0x8b, 0xde, + 0x85, 0x0c, 0xed, 0xc3, 0x78, 0x8f, 0x36, 0xf7, 0x5b, 0x12, 0x42, 0xa9, 0x9a, 0x5a, 0xda, 0xa5, + 0xff, 0x49, 0x87, 0x26, 0x0a, 0x78, 0x74, 0x9f, 0x02, 0x02, 0xa8, 0x9a, 0x48, 0x85, 0x35, 0x83, + 0x2c, 0x52, 0xf4, 0xb2, 0xdc, 0xd8, 0xca, 0xd5, 0xfa, 0xa8, 0xaf, 0x68, 0x05, 0x23, 0x32, 0x9e, + 0x68, 0xd7, 0x7e, 0x29, 0xc1, 0xb5, 0x18, 0xdf, 0x42, 0xaf, 0x40, 0xf6, 0xcc, 0xf1, 0xfc, 0x48, + 0x58, 0x67, 0xc8, 0x98, 0xc4, 0xf5, 0xeb, 0xb0, 0xce, 0xdc, 0x4e, 0x17, 0x17, 0x1b, 0xf8, 0x87, + 0x96, 0x0c, 0x2a, 0x3e, 0xf0, 0x9b, 0x5a, 0x53, 0x72, 0x99, 0x35, 0xa5, 0x7e, 0x23, 0x6b, 0xaa, + 0x81, 0x3c, 0xed, 0xec, 0x31, 0xa7, 0xd4, 0x77, 0x62, 0x74, 0x1e, 0x95, 0x4e, 0xf9, 0x3e, 0xa4, + 0x99, 0x99, 0x50, 0x13, 0x36, 0x44, 0xf3, 0x3d, 0x69, 0xdd, 0x7b, 0x8b, 0x3f, 0x10, 0xa6, 0xd4, + 0x64, 0x2f, 0x37, 0x8a, 0x02, 0x10, 0x82, 0xe4, 0x38, 0x62, 0x65, 0x32, 0xd8, 0x4f, 0x41, 0xc2, + 0x32, 0x95, 0x43, 0x58, 0x9b, 0xa0, 0x7d, 0x99, 0xb6, 0x7f, 0xef, 0x6f, 0x64, 0xc8, 0x1c, 0x30, + 0xf1, 0xd0, 0x4f, 0x24, 0x58, 0x9b, 0xf8, 0xc2, 0x1e, 0xed, 0xcc, 0xcb, 0x33, 0xb3, 0x57, 0x36, + 0x8a, 0x57, 0x7e, 0x8a, 0xab, 0x3c, 0xf8, 0xd1, 0x3f, 0xff, 0xcb, 0x4f, 0x13, 0xdf, 0x44, 0x6f, + 0xee, 0x06, 0xb7, 0x7b, 0x7e, 0x48, 0x84, 0xfa, 0x48, 0x7c, 0x22, 0xbd, 0xbb, 0xb3, 0x1b, 0xf9, + 0x5a, 0x77, 0x77, 0xe7, 0x33, 0xf4, 0x57, 0x12, 0x6c, 0x4c, 0x7d, 0x10, 0x8c, 0xe6, 0xe4, 0xd1, + 0xf8, 0xab, 0x21, 0xc5, 0xb7, 0x96, 0xc4, 0x66, 0xdf, 0xf7, 0xc6, 0xca, 0xc8, 0xbe, 0x7b, 0x8d, + 0x48, 0xf9, 0x59, 0x54, 0x4c, 0xf4, 0xa7, 0x12, 0xc8, 0xd3, 0x37, 0x42, 0xd0, 0xbc, 0xb7, 0xd7, + 0xf1, 0x37, 0x47, 0x8a, 0x37, 0x66, 0xce, 0x17, 0x6b, 0xc3, 0x91, 0x7f, 0x29, 0xc4, 0xd9, 0x79, + 0x01, 0x95, 0xfd, 0xb5, 0x04, 0xf2, 0xf4, 0x8d, 0x93, 0x79, 0xe2, 0xcc, 0xb9, 0x99, 0xb2, 0x84, + 0x2d, 0x3f, 0xa2, 0x82, 0x3d, 0x54, 0x96, 0xd7, 0xd3, 0xa3, 0xe8, 0x75, 0x11, 0x22, 0xe4, 0xf4, + 0x05, 0x95, 0x79, 0x42, 0xce, 0xb9, 0xc8, 0xb2, 0xbc, 0x90, 0x7b, 0xcb, 0x6b, 0x6f, 0x42, 0xc8, + 0xbf, 0x90, 0xe8, 0xed, 0xaa, 0xc9, 0x1b, 0x27, 0xa8, 0xb4, 0x44, 0x4c, 0x44, 0x6e, 0x54, 0x14, + 0x17, 0x7c, 0x1e, 0xaf, 0x3c, 0xa4, 0x02, 0x3e, 0x40, 0xbb, 0x4b, 0x0b, 0xb8, 0xcb, 0x3e, 0xa8, + 0xff, 0xa9, 0x04, 0x1b, 0x53, 0x9f, 0x6d, 0xcf, 0x8b, 0x8b, 0xf8, 0xaf, 0xbb, 0x8b, 0x8b, 0x3f, + 0x0b, 0x50, 0xde, 0xa1, 0x92, 0xbd, 0xa5, 0xbc, 0x71, 0x85, 0x7d, 0x83, 0x8f, 0xd6, 0x1f, 0x49, + 0x3b, 0x54, 0xaa, 0xa9, 0x0f, 0xd7, 0xe7, 0x49, 0x15, 0xff, 0x7d, 0xfb, 0x92, 0x52, 0xed, 0xbd, + 0xb1, 0x48, 0x5f, 0x81, 0x48, 0xc4, 0x9e, 0xd2, 0x0e, 0x7a, 0x0e, 0x19, 0x7e, 0xbb, 0x08, 0xbd, + 0x36, 0xd7, 0x76, 0xcb, 0x5a, 0xec, 0x9b, 0x54, 0x82, 0xd7, 0xd1, 0xbd, 0x05, 0x12, 0x50, 0x13, + 0x91, 0x50, 0xfc, 0xb1, 0x04, 0xb9, 0xe0, 0xfa, 0x04, 0xfa, 0xc6, 0xfc, 0x4c, 0x14, 0xbd, 0xa3, + 0x54, 0xbc, 0x7f, 0x25, 0x1e, 0xcf, 0x55, 0x71, 0xb2, 0xc4, 0xd8, 0x88, 0x79, 0xcc, 0xa7, 0x00, + 0xe1, 0x6d, 0x24, 0x74, 0x7f, 0x51, 0x7a, 0x8a, 0xea, 0x62, 0x5e, 0x62, 0xe2, 0x73, 0xef, 0x2c, + 0xa5, 0x87, 0xcf, 0x25, 0x80, 0xf0, 0x86, 0xd3, 0xbc, 0xc9, 0x67, 0xee, 0x40, 0x2d, 0x34, 0x04, + 0xcf, 0x8c, 0xca, 0x32, 0x8b, 0x7f, 0xc4, 0xee, 0x25, 0x11, 0x31, 0xc2, 0x1b, 0x4f, 0xf3, 0xc4, + 0x98, 0xb9, 0x13, 0xb5, 0x8c, 0x18, 0x7b, 0xcb, 0xe8, 0x81, 0x8b, 0xf1, 0x77, 0x12, 0xbb, 0x8b, + 0x39, 0x75, 0xbf, 0x07, 0xbd, 0xbd, 0xd8, 0xee, 0x31, 0xb5, 0xed, 0xc1, 0x0b, 0x50, 0x70, 0x9f, + 0x59, 0x26, 0xe3, 0x08, 0x79, 0xa3, 0xa9, 0x67, 0xef, 0xf3, 0x14, 0x6c, 0xf0, 0x46, 0x41, 0x5c, + 0x8a, 0x42, 0xcf, 0x61, 0x6d, 0xe2, 0x3a, 0xeb, 0xbc, 0x7e, 0x21, 0xee, 0xce, 0xeb, 0x5c, 0xcf, + 0xba, 0x47, 0x25, 0xbc, 0xad, 0xa0, 0xdd, 0xe8, 0x1d, 0x60, 0x3a, 0xe1, 0x23, 0x71, 0xef, 0x15, + 0x5d, 0x02, 0x84, 0xf7, 0x5e, 0xe7, 0x59, 0x72, 0xe6, 0x66, 0x6c, 0x71, 0xf1, 0x5d, 0x2d, 0x31, + 0x35, 0xba, 0x35, 0x5f, 0x39, 0x9f, 0xa1, 0x3f, 0x96, 0xa0, 0x10, 0xbd, 0x1a, 0x86, 0xde, 0x9c, + 0x6f, 0x85, 0xa9, 0xeb, 0xb6, 0xc5, 0x9d, 0x65, 0x50, 0xb9, 0xa5, 0x8a, 0x54, 0x98, 0x2d, 0x14, + 0xa3, 0x07, 0xa2, 0xf8, 0x89, 0x2b, 0xb6, 0xf3, 0x14, 0x1f, 0x77, 0x0f, 0xf7, 0x2a, 0xc5, 0xef, + 0x2c, 0x5a, 0xfd, 0x7e, 0xee, 0x77, 0x32, 0x7c, 0xa6, 0x93, 0x34, 0xa5, 0x7f, 0xe7, 0xff, 0x02, + 0x00, 0x00, 0xff, 0xff, 0x64, 0x71, 0x32, 0x3e, 0xac, 0x3d, 0x00, 0x00, } diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go index 4bfe6548..34efa287 100644 --- a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.pb.gw.go @@ -121,7 +121,7 @@ func request_Grafeas_CreateOccurrence_0(ctx context.Context, marshaler runtime.M var protoReq CreateOccurrenceRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -156,7 +156,7 @@ func request_Grafeas_UpdateOccurrence_0(ctx context.Context, marshaler runtime.M var protoReq UpdateOccurrenceRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Occurrence); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -218,7 +218,7 @@ func request_Grafeas_CreateOperation_0(ctx context.Context, marshaler runtime.Ma var protoReq CreateOperationRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -249,7 +249,7 @@ func request_Grafeas_UpdateOperation_0(ctx context.Context, marshaler runtime.Ma var protoReq UpdateOperationRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -373,7 +373,7 @@ func request_Grafeas_CreateNote_0(ctx context.Context, marshaler runtime.Marshal var protoReq CreateNoteRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -412,7 +412,7 @@ func request_Grafeas_UpdateNote_0(ctx context.Context, marshaler runtime.Marshal var protoReq UpdateNoteRequest var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Note); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -482,22 +482,8 @@ func request_GrafeasProjects_CreateProject_0(ctx context.Context, marshaler runt var protoReq CreateProjectRequest var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") - } - - protoReq.Name, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Project); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } msg, err := client.CreateProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -1241,7 +1227,7 @@ func RegisterGrafeasProjectsHandlerClient(ctx context.Context, mux *runtime.Serv } var ( - pattern_GrafeasProjects_CreateProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2}, []string{"v1alpha1", "projects", "name"}, "")) + pattern_GrafeasProjects_CreateProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1alpha1", "projects"}, "")) pattern_GrafeasProjects_GetProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 2, 5, 2}, []string{"v1alpha1", "projects", "name"}, "")) diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto index 0ba5c78a..d9df8ebb 100644 --- a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.proto @@ -25,9 +25,8 @@ option go_package = "grafeas"; // Request to insert a new Project. message CreateProjectRequest { - // The name of the project of the form - // "projects/{project_id}" - string name = 1; + // The project to be inserted + Project project = 1; } // Request to get a Project. @@ -313,7 +312,7 @@ message AttestationAuthority { // The human readable name of this Attestation Authority, e.g. "qa". string human_readable_name = 1; } - message Attestation { + message AttestationDetails { // The signature, generally over the `resource_url`, that verifies this // attestation. The semantics of the signature veracity are ultimately // determined by the verification engine. @@ -469,7 +468,7 @@ message Command { // An artifact that can be deployed in some runtime. message Deployable { // The period during which some deployable was active in a runtime. - message Deployment { + message DeploymentDetails { // Types of platforms. enum Platform { // Unknown @@ -616,7 +615,7 @@ message DockerImage { // Derived describes the derived image portion (Occurrence) of the // DockerImage relationship. This image would be produced from a Dockerfile // with FROM . - message Derived { + message DerivedDetails { // The fingerprint of the derived image Fingerprint fingerprint = 1; @@ -643,7 +642,7 @@ message DockerImage { // note for a resource indicates that analysis hasn't started. message Discovery { // Provides information about the scan status of a discovered resource. - message Discovered { + message DiscoveredDetails { // Output only. An operation that indicates the status of the current scan. google.longrunning.Operation operation = 1; } @@ -711,6 +710,9 @@ message Note { // The note and occurrence track the initial discovery status of a resource. DISCOVERY = 7; + + // This represents a logical "role" that can attest to artifacts. + ATTESTATION_AUTHORITY = 8; } // The name of the note in the form @@ -761,6 +763,10 @@ message Note { // Output only. The time this note was last updated. This field can be used as // a filter in list requests. google.protobuf.Timestamp update_time = 12; + + // The name of the `Operation` in the form + // "projects/{project_id}/operation/{OPERATION_ID}" + string operation_name = 19; } // `Occurrence` includes information about analysis occurrences for an image. @@ -793,16 +799,19 @@ message Occurrence { // Describes how this resource derives from the basis // in the associated note. - DockerImage.Derived derived_image = 11; + DockerImage.DerivedDetails derived_image_details = 11; // Describes the installation of a package on the linked resource. - PackageManager.Installation installation = 12; + PackageManager.InstallationDetails installation_details = 12; // Describes the deployment of an artifact on a runtime. - Deployable.Deployment deployment = 14; + Deployable.DeploymentDetails deployment_details = 14; // Describes the initial scan status for this resource. - Discovery.Discovered discovered = 15; + Discovery.DiscoveredDetails discovered_details = 15; + + // Describes an attestation of an artifact. + AttestationAuthority.AttestationDetails attestation_details = 16; } // A description of actions that can be taken to remedy the `Note` @@ -813,6 +822,11 @@ message Occurrence { // Output only. The time this `Occurrence` was last updated. google.protobuf.Timestamp update_time = 10; + + // The name of the `Operation` in the form + // "projects/{project_id}/operation/{OPERATION_ID}" + string operation_name = 19; + } // PackageManager provides metadata about available / installed packages. @@ -870,7 +884,7 @@ message PackageManager { // This represents how a particular software package may be installed on // a system. - message Installation { + message InstallationDetails { // Output only. The name of the installed package. string name = 1; @@ -933,13 +947,22 @@ message PgpSignedAttestation { // be present in the signature content above, but that is not expected to be // used by the verifier. oneof key_id { - // The ID of the key, as output by `gpg --list-keys`. This should be 8 - // hexidecimal digits, capitalized. e.g. - // $ gpg --list-keys pub - // 2048R/A663AEEA 2017-08-01 uid Fake Name - // - // In the above example, the `key_id` is "A663AEEA". - // Note that in practice this ID is the last 64 bits of the key fingerprint. + // The cryptographic fingerprint of the key used to generate the signature, + // as output by, e.g. `gpg --list-keys`. This should be the version 4, full + // 160-bit fingerprint, expressed as a 40 character hexidecimal string. See + // https://tools.ietf.org/html/rfc4880#section-12.2 for details. + // Implementations may choose to acknowledge "LONG", "SHORT", or other + // abbreviated key IDs, but only the full fingerprint is guaranteed to work. + // In gpg, the full fingerprint can be retrieved from the `fpr` field + // returned when calling --list-keys with --with-colons. For example: + // ``` + // gpg --with-colons --with-fingerprint --force-v4-certs \ + // --list-keys attester@example.com + // tru::1:1513631572:0:3:1:5 + // pub:...... + // fpr:::::::::24FF6481B76AC91E66A00AC657A93A81EF3AE6FB: + // ``` + // Above, the fingerprint is `24FF6481B76AC91E66A00AC657A93A81EF3AE6FB`. string pgp_key_id = 2; } } @@ -1425,7 +1448,8 @@ service GrafeasProjects { // Creates a new `Project`. rpc CreateProject(CreateProjectRequest) returns (google.protobuf.Empty) { option (google.api.http) = { - post: "/v1alpha1/{name=projects/*}" + post: "/v1alpha1/projects" + body: "project" }; } @@ -1451,4 +1475,4 @@ service GrafeasProjects { delete: "/v1alpha1/{name=projects/*}" }; } -} \ No newline at end of file +} diff --git a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json index 9368fb1a..22c3cf0b 100644 --- a/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json +++ b/vendor/github.com/grafeas/grafeas/v1alpha1/proto/grafeas.swagger.json @@ -54,6 +54,31 @@ "tags": [ "GrafeasProjects" ] + }, + "post": { + "summary": "Creates a new `Project`.", + "operationId": "CreateProject", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiProject" + } + } + ], + "tags": [ + "GrafeasProjects" + ] } }, "/v1alpha1/{name}": { @@ -103,29 +128,6 @@ "GrafeasProjects" ] }, - "post": { - "summary": "Creates a new `Project`.", - "operationId": "CreateProject", - "responses": { - "200": { - "description": "", - "schema": { - "$ref": "#/definitions/protobufEmpty" - } - } - }, - "parameters": [ - { - "name": "name", - "in": "path", - "required": true, - "type": "string" - } - ], - "tags": [ - "GrafeasProjects" - ] - }, "patch": { "summary": "Updates an existing `Note`.", "operationId": "UpdateNote", @@ -421,6 +423,14 @@ } }, "definitions": { + "AttestationAuthorityAttestationDetails": { + "type": "object", + "properties": { + "pgp_signed_attestation": { + "$ref": "#/definitions/apiPgpSignedAttestation" + } + } + }, "BuildSignatureKeyType": { "type": "string", "enum": [ @@ -432,7 +442,7 @@ "description": "- KEY_TYPE_UNSPECIFIED: `KeyType` is not set.\n - PGP_ASCII_ARMORED: `PGP ASCII Armored` public key.\n - PKIX_PEM: `PKIX PEM` public key.", "title": "Public key formats" }, - "DeployableDeployment": { + "DeployableDeploymentDetails": { "type": "object", "properties": { "user_email": { @@ -465,13 +475,13 @@ "description": "Output only. Resource URI for the artifact being deployed taken from the\ndeployable field with the same name." }, "platform": { - "$ref": "#/definitions/DeploymentPlatform", + "$ref": "#/definitions/DeploymentDetailsPlatform", "description": "Platform hosting this deployment." } }, "description": "The period during which some deployable was active in a runtime." }, - "DeploymentPlatform": { + "DeploymentDetailsPlatform": { "type": "string", "enum": [ "PLATFORM_UNSPECIFIED", @@ -482,7 +492,7 @@ "default": "PLATFORM_UNSPECIFIED", "description": "Types of platforms.\n\n - PLATFORM_UNSPECIFIED: Unknown\n - GKE: Google Container Engine\n - FLEX: Google App Engine: Flexible Environment\n - CUSTOM: Custom user-defined platform" }, - "DiscoveryDiscovered": { + "DiscoveryDiscoveredDetails": { "type": "object", "properties": { "operation": { @@ -506,7 +516,7 @@ }, "description": "Basis describes the base image portion (Note) of the DockerImage\nrelationship. Linked occurrences are derived from this or an\nequivalent image via:\n FROM \u003cBasis.resource_url\u003e\nOr an equivalent reference, e.g. a tag of the resource_url." }, - "DockerImageDerived": { + "DockerImageDerivedDetails": { "type": "object", "properties": { "fingerprint": { @@ -656,7 +666,7 @@ }, "title": "This represents a particular channel of distribution for a given package.\ne.g. Debian's jessie-backports dpkg mirror" }, - "PackageManagerInstallation": { + "PackageManagerInstallationDetails": { "type": "object", "properties": { "name": { @@ -690,6 +700,15 @@ }, "description": "This represents a particular package that is distributed over\nvarious channels.\ne.g. glibc (aka libc6) is distributed by many, at various versions." }, + "PgpSignedAttestationContentType": { + "type": "string", + "enum": [ + "CONTENT_TYPE_UNSPECIFIED", + "SIMPLE_SIGNING_JSON" + ], + "default": "CONTENT_TYPE_UNSPECIFIED", + "description": "Type (e.g. schema) of the attestation payload that was signed.\n\n - CONTENT_TYPE_UNSPECIFIED: ContentType is not set.\n - SIMPLE_SIGNING_JSON: Atomic format attestation signature. See\nhttps://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md\nThe payload extracted from `signature` is a JSON blob conforming to the\nlinked schema." + }, "VersionVersionKind": { "type": "string", "enum": [ @@ -1301,6 +1320,10 @@ "type": "string", "format": "date-time", "description": "Output only. The time this note was last updated. This field can be used as\na filter in list requests." + }, + "operation_name": { + "type": "string", + "title": "The name of the `Operation` in the form\n\"projects/{project_id}/operation/{OPERATION_ID}\"" } }, "description": "Provides a detailed description of a `Note`." @@ -1314,10 +1337,11 @@ "IMAGE_BASIS", "PACKAGE_MANAGER", "DEPLOYABLE", - "DISCOVERY" + "DISCOVERY", + "ATTESTATION_AUTHORITY" ], "default": "KIND_UNSPECIFIED", - "description": "This must be 1:1 with members of our oneofs, it can be used for filtering\nNote and Occurrence on their kind.\n\n - KIND_UNSPECIFIED: Unknown\n - PACKAGE_VULNERABILITY: The note and occurrence represent a package vulnerability.\n - BUILD_DETAILS: The note and occurrence assert build provenance.\n - IMAGE_BASIS: This represents an image basis relationship.\n - PACKAGE_MANAGER: This represents a package installed via a package manager.\n - DEPLOYABLE: The note and occurrence track deployment events.\n - DISCOVERY: The note and occurrence track the initial discovery status of a resource." + "description": "This must be 1:1 with members of our oneofs, it can be used for filtering\nNote and Occurrence on their kind.\n\n - KIND_UNSPECIFIED: Unknown\n - PACKAGE_VULNERABILITY: The note and occurrence represent a package vulnerability.\n - BUILD_DETAILS: The note and occurrence assert build provenance.\n - IMAGE_BASIS: This represents an image basis relationship.\n - PACKAGE_MANAGER: This represents a package installed via a package manager.\n - DEPLOYABLE: The note and occurrence track deployment events.\n - DISCOVERY: The note and occurrence track the initial discovery status of a resource.\n - ATTESTATION_AUTHORITY: This represents a logical \"role\" that can attest to artifacts." }, "apiOccurrence": { "type": "object", @@ -1346,22 +1370,26 @@ "$ref": "#/definitions/apiBuildDetails", "description": "Build details for a verifiable build." }, - "derived_image": { - "$ref": "#/definitions/DockerImageDerived", + "derived_image_details": { + "$ref": "#/definitions/DockerImageDerivedDetails", "description": "Describes how this resource derives from the basis\nin the associated note." }, - "installation": { - "$ref": "#/definitions/PackageManagerInstallation", + "installation_details": { + "$ref": "#/definitions/PackageManagerInstallationDetails", "description": "Describes the installation of a package on the linked resource." }, - "deployment": { - "$ref": "#/definitions/DeployableDeployment", + "deployment_details": { + "$ref": "#/definitions/DeployableDeploymentDetails", "description": "Describes the deployment of an artifact on a runtime." }, - "discovered": { - "$ref": "#/definitions/DiscoveryDiscovered", + "discovered_details": { + "$ref": "#/definitions/DiscoveryDiscoveredDetails", "description": "Describes the initial scan status for this resource." }, + "attestation_details": { + "$ref": "#/definitions/AttestationAuthorityAttestationDetails", + "description": "Describes an attestation of an artifact." + }, "remediation": { "type": "string", "title": "A description of actions that can be taken to remedy the `Note`" @@ -1375,6 +1403,10 @@ "type": "string", "format": "date-time", "description": "Output only. The time this `Occurrence` was last updated." + }, + "operation_name": { + "type": "string", + "title": "The name of the `Operation` in the form\n\"projects/{project_id}/operation/{OPERATION_ID}\"" } }, "description": "`Occurrence` includes information about analysis occurrences for an image." @@ -1397,6 +1429,24 @@ }, "title": "An occurrence of a particular package installation found within a\nsystem's filesystem.\ne.g. glibc was found in /var/lib/dpkg/status" }, + "apiPgpSignedAttestation": { + "type": "object", + "properties": { + "signature": { + "type": "string", + "description": "The raw content of the signature, as output by gpg or equivalent. Since\nthis message only supports attached signatures, the payload that was signed\nmust be attached. While the signature format supported is dependent on the\nverification implementation, currently only ASCII-armored (`--armor` to\ngpg), non-clearsigned (`--sign` rather than `--clearsign` to gpg) are\nsupported.\nConcretely, `gpg --sign --armor --output=signature.gpg payload.json` will\ncreate the signature content expected in this field in `signature.gpg` for\nthe `payload.json` attestation payload." + }, + "content_type": { + "$ref": "#/definitions/PgpSignedAttestationContentType", + "description": "Type (e.g. schema) of the attestation payload that was signed.\nThe verifier must ensure that the provided type is one that the verifier\nsupports, and that the attestation payload is a valid instantiation of that\ntype (e.g. by validating a JSON schema)." + }, + "pgp_key_id": { + "type": "string", + "description": "The cryptographic fingerprint of the key used to generate the signature,\nas output by, e.g. `gpg --list-keys`. This should be the version 4, full\n160-bit fingerprint, expressed as a 40 character hexidecimal string. See\nhttps://tools.ietf.org/html/rfc4880#section-12.2 for details.\nImplementations may choose to acknowledge \"LONG\", \"SHORT\", or other\nabbreviated key IDs, but only the full fingerprint is guaranteed to work.\nIn gpg, the full fingerprint can be retrieved from the `fpr` field\nreturned when calling --list-keys with --with-colons. For example:\n```\ngpg --with-colons --with-fingerprint --force-v4-certs \\\n --list-keys attester@example.com\ntru::1:1513631572:0:3:1:5\npub:...\u003cSNIP\u003e...\nfpr:::::::::24FF6481B76AC91E66A00AC657A93A81EF3AE6FB:\n```\nAbove, the fingerprint is `24FF6481B76AC91E66A00AC657A93A81EF3AE6FB`." + } + }, + "description": "An attestation wrapper with a PGP-compatible signature.\nThis message only supports ATTACHED signatures, where the payload that is\nsigned is included alongside the signature itself in the same file." + }, "apiProject": { "type": "object", "properties": { @@ -1609,7 +1659,7 @@ "properties": { "type_url": { "type": "string", - "description": "A URL/resource name whose content describes the type of the\nserialized protocol buffer message.\n\nFor URLs which use the scheme `http`, `https`, or no scheme, the\nfollowing restrictions and interpretations apply:\n\n* If no scheme is provided, `https` is assumed.\n* The last segment of the URL's path must represent the fully\n qualified name of the type (as in `path/google.protobuf.Duration`).\n The name should be in a canonical form (e.g., leading \".\" is\n not accepted).\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." }, "value": { "type": "string", @@ -1635,7 +1685,7 @@ "description": "The set of field mask paths." } }, - "description": "paths: \"f.a\"\n paths: \"f.b.d\"\n\nHere `f` represents a field in some root message, `a` and `b`\nfields in the message found in `f`, and `d` a field found in the\nmessage in `f.b`.\n\nField masks are used to specify a subset of fields that should be\nreturned by a get operation or modified by an update operation.\nField masks also have a custom JSON encoding (see below).\n\n# Field Masks in Projections\n\nWhen used in the context of a projection, a response message or\nsub-message is filtered by the API to only contain those fields as\nspecified in the mask. For example, if the mask in the previous\nexample is applied to a response message as follows:\n\n f {\n a : 22\n b {\n d : 1\n x : 2\n }\n y : 13\n }\n z: 8\n\nThe result will not contain specific values for fields x,y and z\n(their value will be set to the default, and omitted in proto text\noutput):\n\n\n f {\n a : 22\n b {\n d : 1\n }\n }\n\nA repeated field is not allowed except at the last position of a\npaths string.\n\nIf a FieldMask object is not present in a get operation, the\noperation applies to all fields (as if a FieldMask of all fields\nhad been specified).\n\nNote that a field mask does not necessarily apply to the\ntop-level response message. In case of a REST get operation, the\nfield mask applies directly to the response, but in case of a REST\nlist operation, the mask instead applies to each individual message\nin the returned resource list. In case of a REST custom method,\nother definitions may be used. Where the mask applies will be\nclearly documented together with its declaration in the API. In\nany case, the effect on the returned resource/resources is required\nbehavior for APIs.\n\n# Field Masks in Update Operations\n\nA field mask in update operations specifies which fields of the\ntargeted resource are going to be updated. The API is required\nto only change the values of the fields as specified in the mask\nand leave the others untouched. If a resource is passed in to\ndescribe the updated values, the API ignores the values of all\nfields not covered by the mask.\n\nIf a repeated field is specified for an update operation, the existing\nrepeated values in the target resource will be overwritten by the new values.\nNote that a repeated field is only allowed in the last position of a `paths`\nstring.\n\nIf a sub-message is specified in the last position of the field mask for an\nupdate operation, then the existing sub-message in the target resource is\noverwritten. Given the target message:\n\n f {\n b {\n d : 1\n x : 2\n }\n c : 1\n }\n\nAnd an update message:\n\n f {\n b {\n d : 10\n }\n }\n\nthen if the field mask is:\n\n paths: \"f.b\"\n\nthen the result will be:\n\n f {\n b {\n d : 10\n }\n c : 1\n }\n\nHowever, if the update mask was:\n\n paths: \"f.b.d\"\n\nthen the result would be:\n\n f {\n b {\n d : 10\n x : 2\n }\n c : 1\n }\n\nIn order to reset a field's value to the default, the field must\nbe in the mask and set to the default value in the provided resource.\nHence, in order to reset all fields of a resource, provide a default\ninstance of the resource and set all fields in the mask, or do\nnot provide a mask as described below.\n\nIf a field mask is not present on update, the operation applies to\nall fields (as if a field mask of all fields has been specified).\nNote that in the presence of schema evolution, this may mean that\nfields the client does not know and has therefore not filled into\nthe request will be reset to their default. If this is unwanted\nbehavior, a specific service may require a client to always specify\na field mask, producing an error if not.\n\nAs with get operations, the location of the resource which\ndescribes the updated values in the request message depends on the\noperation kind. In any case, the effect of the field mask is\nrequired to be honored by the API.\n\n## Considerations for HTTP REST\n\nThe HTTP kind of an update operation which uses a field mask must\nbe set to PATCH instead of PUT in order to satisfy HTTP semantics\n(PUT must only be used for full updates).\n\n# JSON Encoding of Field Masks\n\nIn JSON, a field mask is encoded as a single string where paths are\nseparated by a comma. Fields name in each path are converted\nto/from lower-camel naming conventions.\n\nAs an example, consider the following message declarations:\n\n message Profile {\n User user = 1;\n Photo photo = 2;\n }\n message User {\n string display_name = 1;\n string address = 2;\n }\n\nIn proto a field mask for `Profile` may look as such:\n\n mask {\n paths: \"user.display_name\"\n paths: \"photo\"\n }\n\nIn JSON, the same mask is represented as below:\n\n {\n mask: \"user.displayName,photo\"\n }\n\n# Field Masks and Oneof Fields\n\nField masks treat fields in oneofs just as regular fields. Consider the\nfollowing message:\n\n message SampleMessage {\n oneof test_oneof {\n string name = 4;\n SubMessage sub_message = 9;\n }\n }\n\nThe field mask can be:\n\n mask {\n paths: \"name\"\n }\n\nOr:\n\n mask {\n paths: \"sub_message\"\n }\n\nNote that oneof type names (\"test_oneof\" in this case) cannot be used in\npaths.\n\n## Field Mask Verification\n\nThe implementation of the all the API methods, which have any FieldMask type\nfield in the request, should verify the included field paths, and return\n`INVALID_ARGUMENT` error if any path is duplicated or unmappable.", + "description": "paths: \"f.a\"\n paths: \"f.b.d\"\n\nHere `f` represents a field in some root message, `a` and `b`\nfields in the message found in `f`, and `d` a field found in the\nmessage in `f.b`.\n\nField masks are used to specify a subset of fields that should be\nreturned by a get operation or modified by an update operation.\nField masks also have a custom JSON encoding (see below).\n\n# Field Masks in Projections\n\nWhen used in the context of a projection, a response message or\nsub-message is filtered by the API to only contain those fields as\nspecified in the mask. For example, if the mask in the previous\nexample is applied to a response message as follows:\n\n f {\n a : 22\n b {\n d : 1\n x : 2\n }\n y : 13\n }\n z: 8\n\nThe result will not contain specific values for fields x,y and z\n(their value will be set to the default, and omitted in proto text\noutput):\n\n\n f {\n a : 22\n b {\n d : 1\n }\n }\n\nA repeated field is not allowed except at the last position of a\npaths string.\n\nIf a FieldMask object is not present in a get operation, the\noperation applies to all fields (as if a FieldMask of all fields\nhad been specified).\n\nNote that a field mask does not necessarily apply to the\ntop-level response message. In case of a REST get operation, the\nfield mask applies directly to the response, but in case of a REST\nlist operation, the mask instead applies to each individual message\nin the returned resource list. In case of a REST custom method,\nother definitions may be used. Where the mask applies will be\nclearly documented together with its declaration in the API. In\nany case, the effect on the returned resource/resources is required\nbehavior for APIs.\n\n# Field Masks in Update Operations\n\nA field mask in update operations specifies which fields of the\ntargeted resource are going to be updated. The API is required\nto only change the values of the fields as specified in the mask\nand leave the others untouched. If a resource is passed in to\ndescribe the updated values, the API ignores the values of all\nfields not covered by the mask.\n\nIf a repeated field is specified for an update operation, the existing\nrepeated values in the target resource will be overwritten by the new values.\nNote that a repeated field is only allowed in the last position of a `paths`\nstring.\n\nIf a sub-message is specified in the last position of the field mask for an\nupdate operation, then the existing sub-message in the target resource is\noverwritten. Given the target message:\n\n f {\n b {\n d : 1\n x : 2\n }\n c : 1\n }\n\nAnd an update message:\n\n f {\n b {\n d : 10\n }\n }\n\nthen if the field mask is:\n\n paths: \"f.b\"\n\nthen the result will be:\n\n f {\n b {\n d : 10\n }\n c : 1\n }\n\nHowever, if the update mask was:\n\n paths: \"f.b.d\"\n\nthen the result would be:\n\n f {\n b {\n d : 10\n x : 2\n }\n c : 1\n }\n\nIn order to reset a field's value to the default, the field must\nbe in the mask and set to the default value in the provided resource.\nHence, in order to reset all fields of a resource, provide a default\ninstance of the resource and set all fields in the mask, or do\nnot provide a mask as described below.\n\nIf a field mask is not present on update, the operation applies to\nall fields (as if a field mask of all fields has been specified).\nNote that in the presence of schema evolution, this may mean that\nfields the client does not know and has therefore not filled into\nthe request will be reset to their default. If this is unwanted\nbehavior, a specific service may require a client to always specify\na field mask, producing an error if not.\n\nAs with get operations, the location of the resource which\ndescribes the updated values in the request message depends on the\noperation kind. In any case, the effect of the field mask is\nrequired to be honored by the API.\n\n## Considerations for HTTP REST\n\nThe HTTP kind of an update operation which uses a field mask must\nbe set to PATCH instead of PUT in order to satisfy HTTP semantics\n(PUT must only be used for full updates).\n\n# JSON Encoding of Field Masks\n\nIn JSON, a field mask is encoded as a single string where paths are\nseparated by a comma. Fields name in each path are converted\nto/from lower-camel naming conventions.\n\nAs an example, consider the following message declarations:\n\n message Profile {\n User user = 1;\n Photo photo = 2;\n }\n message User {\n string display_name = 1;\n string address = 2;\n }\n\nIn proto a field mask for `Profile` may look as such:\n\n mask {\n paths: \"user.display_name\"\n paths: \"photo\"\n }\n\nIn JSON, the same mask is represented as below:\n\n {\n mask: \"user.displayName,photo\"\n }\n\n# Field Masks and Oneof Fields\n\nField masks treat fields in oneofs just as regular fields. Consider the\nfollowing message:\n\n message SampleMessage {\n oneof test_oneof {\n string name = 4;\n SubMessage sub_message = 9;\n }\n }\n\nThe field mask can be:\n\n mask {\n paths: \"name\"\n }\n\nOr:\n\n mask {\n paths: \"sub_message\"\n }\n\nNote that oneof type names (\"test_oneof\" in this case) cannot be used in\npaths.\n\n## Field Mask Verification\n\nThe implementation of any API method which has a FieldMask type field in the\nrequest should verify the included field paths, and return an\n`INVALID_ARGUMENT` error if any path is duplicated or unmappable.", "title": "`FieldMask` represents a set of symbolic field paths, for example:" }, "rpcStatus": { diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/attestation.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/attestation.proto new file mode 100644 index 00000000..0654d1c9 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/attestation.proto @@ -0,0 +1,132 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.attestation; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.attestation"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +// An attestation wrapper with a PGP-compatible signature. This message only +// supports `ATTACHED` signatures, where the payload that is signed is included +// alongside the signature itself in the same file. +message PgpSignedAttestation { + // The raw content of the signature, as output by GNU Privacy Guard (GPG) or + // equivalent. Since this message only supports attached signatures, the + // payload that was signed must be attached. While the signature format + // supported is dependent on the verification implementation, currently only + // ASCII-armored (`--armor` to gpg), non-clearsigned (`--sign` rather than + // `--clearsign` to gpg) are supported. Concretely, `gpg --sign --armor + // --output=signature.gpg payload.json` will create the signature content + // expected in this field in `signature.gpg` for the `payload.json` + // attestation payload. + string signature = 1; + + // Type (for example schema) of the attestation payload that was signed. + enum ContentType { + // `ContentType` is not set. + CONTENT_TYPE_UNSPECIFIED = 0; + // Atomic format attestation signature. See + // https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md + // The payload extracted from `signature` is a JSON blob conforming to the + // linked schema. + SIMPLE_SIGNING_JSON = 1; + } + + // Type (for example schema) of the attestation payload that was signed. + // The verifier must ensure that the provided type is one that the verifier + // supports, and that the attestation payload is a valid instantiation of that + // type (for example by validating a JSON schema). + ContentType content_type = 3; + + // This field is used by verifiers to select the public key used to validate + // the signature. Note that the policy of the verifier ultimately determines + // which public keys verify a signature based on the context of the + // verification. There is no guarantee validation will succeed if the + // verifier has no key matching this ID, even if it has a key under a + // different ID that would verify the signature. Note that this ID should also + // be present in the signature content above, but that is not expected to be + // used by the verifier. + oneof key_id { + // The cryptographic fingerprint of the key used to generate the signature, + // as output by, e.g. `gpg --list-keys`. This should be the version 4, full + // 160-bit fingerprint, expressed as a 40 character hexidecimal string. See + // https://tools.ietf.org/html/rfc4880#section-12.2 for details. + // Implementations may choose to acknowledge "LONG", "SHORT", or other + // abbreviated key IDs, but only the full fingerprint is guaranteed to work. + // In gpg, the full fingerprint can be retrieved from the `fpr` field + // returned when calling --list-keys with --with-colons. For example: + // ``` + // gpg --with-colons --with-fingerprint --force-v4-certs \ + // --list-keys attester@example.com + // tru::1:1513631572:0:3:1:5 + // pub:...... + // fpr:::::::::24FF6481B76AC91E66A00AC657A93A81EF3AE6FB: + // ``` + // Above, the fingerprint is `24FF6481B76AC91E66A00AC657A93A81EF3AE6FB`. + string pgp_key_id = 2; + } +} + +// Note kind that represents a logical attestation "role" or "authority". For +// example, an organization might have one `Authority` for "QA" and one for +// "build". This Note is intended to act strictly as a grouping mechanism for +// the attached Occurrences (Attestations). This grouping mechanism also +// provides a security boundary, since IAM ACLs gate the ability for a principle +// to attach an Occurrence to a given Note. It also provides a single point of +// lookup to find all attached Attestation Occurrences, even if they don't all +// live in the same project. +message Authority { + // This submessage provides human-readable hints about the purpose of the + // Authority. Because the name of a Note acts as its resource reference, it is + // important to disambiguate the canonical name of the Note (which might be a + // UUID for security purposes) from "readable" names more suitable for debug + // output. Note that these hints should NOT be used to look up authorities in + // security sensitive contexts, such as when looking up Attestations to + // verify. + message Hint { + // The human readable name of this Attestation Authority, for example "qa". + string human_readable_name = 1; + } + + // Hint hints at the purpose of the attestation authority. + Hint hint = 1; +} + +// Details of an attestation occurrence. +message Details { + // Attestation for the resource. + Attestation attestation = 1; +} + +// Occurrence that represents a single "attestation". The authenticity of an +// Attestation can be verified using the attached signature. If the verifier +// trusts the public key of the signer, then verifying the signature is +// sufficient to establish trust. In this circumstance, the Authority to which +// this Attestation is attached is primarily useful for look-up (how to find +// this Attestation if you already know the Authority and artifact to be +// verified) and intent (which authority was this attestation intended to sign +// for). +message Attestation { + // The signature, generally over the `resource_url`, that verifies this + // attestation. The semantics of the signature veracity are ultimately + // determined by the verification engine. + oneof signature { + // A PGP signed attestation. + PgpSignedAttestation pgp_signed_attestation = 1; + } +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/build.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/build.proto new file mode 100644 index 00000000..bbc999a5 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/build.proto @@ -0,0 +1,96 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.build; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.build"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "v1beta1/proto/provenance.proto"; + +// Note holding the version of the provider's builder and the signature of the +// provenance message in linked BuildDetails. +message Build { + // Version of the builder which produced this Note. + string builder_version = 1; + + // Signature of the build in Occurrences pointing to the Note containing this + // `BuilderDetails`. + BuildSignature signature = 2; +} + +// Message encapsulating the signature of the verified build. +message BuildSignature { + // Public key of the builder which can be used to verify that the related + // findings are valid and unchanged. If `key_type` is empty, this defaults + // to PEM encoded public keys. + // + // This field may be empty if `key_id` references an external key. + // + // For Cloud Container Builder based signatures, this is a PEM encoded public + // key. To verify the Cloud Container Builder signature, place the contents of + // this field into a file (public.pem). The signature field is base64-decoded + // into its binary representation in signature.bin, and the provenance bytes + // from `BuildDetails` are base64-decoded into a binary representation in + // signed.bin. OpenSSL can then verify the signature: + // `openssl sha256 -verify public.pem -signature signature.bin signed.bin` + string public_key = 1; + + // Signature of the related `BuildProvenance`. In JSON, this is base-64 + // encoded. + bytes signature = 2; + + // An ID for the key used to sign. This could be either an Id for the key + // stored in `public_key` (such as the Id or fingerprint for a PGP key, or the + // CN for a cert), or a reference to an external key (such as a reference to a + // key in Cloud Key Management Service). + string key_id = 3; + + // Public key formats + enum KeyType { + // `KeyType` is not set. + KEY_TYPE_UNSPECIFIED = 0; + // `PGP ASCII Armored` public key. + PGP_ASCII_ARMORED = 1; + // `PKIX PEM` public key. + PKIX_PEM = 2; + } + + // The type of the key, either stored in `public_key` or referenced in + // `key_id` + KeyType key_type = 4; +} + +// Details of a build occurrence. +message Details { + // The actual provenance for the build. + grafeas.v1beta1.provenance.BuildProvenance provenance = 1; + + // Serialized JSON representation of the provenance, used in generating the + // `BuildSignature` in the corresponding Result. After verifying the + // signature, `provenance_bytes` can be unmarshalled and compared to the + // provenance to confirm that it is unchanged. A base64-encoded string + // representation of the provenance bytes is used for the signature in order + // to interoperate with openssl which expects this format for signature + // verification. + // + // The serialized form is captured both to avoid ambiguity in how the + // provenance is marshalled to json as well to prevent incompatibilities with + // future changes. + string provenance_bytes = 2; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/deployment.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/deployment.proto new file mode 100644 index 00000000..06de8f8f --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/deployment.proto @@ -0,0 +1,74 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.deployment; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.deployment"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "google/protobuf/timestamp.proto"; + +// An artifact that can be deployed in some runtime. +message Deployable { + // Resource URI for the artifact being deployed. + repeated string resource_uri = 1; +} + +// Details of a deployment occurrence. +message Details { + // Deployment history for the resource. + Deployment deployment = 1; +} + +// The period during which some deployable was active in a runtime. +message Deployment { + // Identity of the user that triggered this deployment. + string user_email = 1; + + // Beginning of the lifetime of this deployment. + google.protobuf.Timestamp deploy_time = 2; + + // End of the lifetime of this deployment. + google.protobuf.Timestamp undeploy_time = 3; + + // Configuration used to create this deployment. + string config = 4; + + // Address of the runtime element hosting this deployment. + string address = 5; + + // Output only. Resource URI for the artifact being deployed taken from + // the deployable field with the same name. + repeated string resource_uri = 6; + + // Types of platforms. + enum Platform { + // Unknown. + PLATFORM_UNSPECIFIED = 0; + // Google Container Engine. + GKE = 1; + // Google App Engine: Flexible Environment. + FLEX = 2; + // Custom user-defined platform. + CUSTOM = 3; + } + // Platform hosting this deployment. + Platform platform = 7; + + // next_id = 8; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/discovery.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/discovery.proto new file mode 100644 index 00000000..24f4653b --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/discovery.proto @@ -0,0 +1,85 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.discovery; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.discovery"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "v1beta1/proto/note_kind.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; + +// A note that indicates a type of analysis a provider would perform. This note +// exists in a provider's project. A `Discovery` occurrence is created in a +// consumer's project at the start of analysis. +message Discovery { + // The kind of analysis that is handled by this discovery. + grafeas.v1beta1.NoteKind analysis_kind = 1; +} + +// Details of a discovery occurrence. +message Details { + // Analysis status for the discovered resource. + Discovered discovered = 1; +} + +// Provides information about the analysis status of a discovered resource. +message Discovered { + // Whether the resource is continuously analyzed. + enum ContinuousAnalysis { + // Unknown. + CONTINUOUS_ANALYSIS_UNSPECIFIED = 0; + // The resource is continuously analyzed. + ACTIVE = 1; + // The resource is ignored for continuous analysis. + INACTIVE = 2; + } + + // Whether the resource is continuously analyzed. + ContinuousAnalysis continuous_analysis = 1; + + // The last time continuous analysis was done for this resource. + google.protobuf.Timestamp last_analysis_time = 2; + + // Analysis status for a resource. Currently for initial analysis only (not + // updated in continuous analysis). + enum AnalysisStatus { + // Unknown. + ANALYSIS_STATUS_UNSPECIFIED = 0; + // Resource is known but no action has been taken yet. + PENDING = 1; + // Resource is being analyzed. + SCANNING = 2; + // Analysis has finished successfully. + FINISHED_SUCCESS = 3; + // Analysis has finished unsuccessfully, the analysis itself is in a bad + // state. + FINISHED_FAILED = 4; + // The resource is known not to be supported + FINISHED_UNSUPPORTED = 5; + } + + // The status of discovery for the resource. + AnalysisStatus analysis_status = 3; + + // When an error is encountered this will contain a LocalizedMessage under + // details to show to the user. The LocalizedMessage is output only and + // populated by the API. + google.rpc.Status analysis_status_error = 4; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/grafeas.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/grafeas.proto new file mode 100644 index 00000000..8f18a299 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/grafeas.proto @@ -0,0 +1,586 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "google/api/annotations.proto"; +import "v1beta1/proto/attestation.proto"; +import "v1beta1/proto/build.proto"; +import "v1beta1/proto/deployment.proto"; +import "v1beta1/proto/discovery.proto"; +import "v1beta1/proto/image.proto"; +import "v1beta1/proto/note_kind.proto"; +import "v1beta1/proto/package.proto"; +import "v1beta1/proto/provenance.proto"; +import "v1beta1/proto/vulnerability.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; +import "google/longrunning/operations.proto"; + +// [Grafeas](grafeas.io) API. +// +// Retrieves analysis results of Cloud components such as Docker container +// images. +// +// Analysis results are stored as a series of occurrences. An `Occurrence` +// contains information about a specific analysis instance on a resource. An +// occurrence refers to a `Note`. A note contains details describing the +// analysis and is generally stored in a separate project, called a `Provider`. +// Multiple occurrences can refer to the same note. +// +// For example, an SSL vulnerability could affect multiple images. In this case, +// there would be one note for the vulnerability and an occurrence for each +// image with the vulnerability referring to that note. +service Grafeas { + // Gets the specified occurrence. + rpc GetOccurrence(GetOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/occurrences/*}" + }; + }; + + // Lists occurrences for the specified project. + rpc ListOccurrences(ListOccurrencesRequest) + returns (ListOccurrencesResponse) { + option (google.api.http) = { + get: "/v1beta1/{parent=projects/*}/occurrences" + }; + }; + + // Deletes the specified occurrence. For example, use this method to delete an + // occurrence when the occurrence is no longer applicable for the given + // resource. + rpc DeleteOccurrence(DeleteOccurrenceRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/occurrences/*}" + }; + }; + + // Creates a new occurrence. + rpc CreateOccurrence(CreateOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/occurrences" + body: "occurrence" + }; + }; + + // Creates new occurrences in batch. + rpc BatchCreateOccurrences(BatchCreateOccurrencesRequest) + returns (BatchCreateOccurrencesResponse) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/occurrences:batchCreate" + body: "*" + }; + }; + + // Updates the specified occurrence. + rpc UpdateOccurrence(UpdateOccurrenceRequest) returns (Occurrence) { + option (google.api.http) = { + patch: "/v1beta1/{name=projects/*/occurrences/*}" + body: "occurrence" + }; + }; + + // Gets the note attached to the specified occurrence. Consumer projects can + // use this method to get a note that belongs to a provider project. + rpc GetOccurrenceNote(GetOccurrenceNoteRequest) returns (Note) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/occurrences/*}/notes" + }; + }; + + // Gets the specified note. + rpc GetNote(GetNoteRequest) returns (Note) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/notes/*}" + }; + }; + + // Lists notes for the specified project. + rpc ListNotes(ListNotesRequest) returns (ListNotesResponse) { + option (google.api.http) = { + get: "/v1beta1/{parent=projects/*}/notes" + }; + }; + + // Deletes the specified note. + rpc DeleteNote(DeleteNoteRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/notes/*}" + }; + }; + + // Creates a new note. + rpc CreateNote(CreateNoteRequest) returns (Note) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/notes" + body: "note" + }; + }; + + // Creates new notes in batch. + rpc BatchCreateNotes(BatchCreateNotesRequest) + returns (BatchCreateNotesResponse) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/notes:batchCreate" + body: "*" + }; + }; + + // Updates the specified note. + rpc UpdateNote(UpdateNoteRequest) returns (Note) { + option (google.api.http) = { + patch: "/v1beta1/{name=projects/*/notes/*}" + body: "note" + }; + }; + + // Lists occurrences referencing the specified note. Provider projects can use + // this method to get all occurrences across consumer projects referencing the + // specified note. + rpc ListNoteOccurrences(ListNoteOccurrencesRequest) + returns (ListNoteOccurrencesResponse) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/notes/*}/occurrences" + }; + }; + + // Gets a summary of the number and severity of occurrences. + rpc GetVulnerabilityOccurrencesSummary( + GetVulnerabilityOccurrencesSummaryRequest) + returns (VulnerabilityOccurrencesSummary) { + option (google.api.http) = { + get: "/v1beta1/{parent=projects/*}/occurrences:vulnerabilitySummary" + }; + }; + + // Creates a new operation. + rpc CreateOperation(CreateOperationRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/operations" + body: "operation" + }; + }; + + // Creates new operations in batch. + rpc BatchCreateOperations(BatchCreateOperationsRequest) + returns (BatchCreateOperationsResponse) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*}/operations:batchCreate" + body: "*" + }; + }; + + // Updates the specified operation. At this time, the only valid update is to + // mark the operation as done. + rpc UpdateOperation(UpdateOperationRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + patch: "/v1beta1/{name=projects/*/operations/*}" + body: "operation" + }; + }; +}; + +// An instance of an analysis type that has been found on a resource. +message Occurrence { + // Output only. The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + string name = 1; + + // The resource for which the occurrence applies. + Resource resource = 2; + + // An analysis note associated with this image, in the form of + // `projects[PROVIDER_ID]/notes/[NOTE_ID]`. This field can be used as a filter + // in list requests. + string note_name = 3; + + // Output only. This explicitly denotes which of the occurrence details are + // specified. This field can be used as a filter in list requests. + grafeas.v1beta1.NoteKind kind = 4; + + // A description of actions that can be taken to remedy the note. + string remediation = 5; + + // Output only. The time this occurrence was created. + google.protobuf.Timestamp create_time = 6; + + // Output only. The time this occurrence was last updated. + google.protobuf.Timestamp update_time = 7; + + // The name of the operation that created this occurrence. + string operation_name = 8; + + // Describes the details of the note kind found on this resource. + oneof details { + // Describes a security vulnerability. + grafeas.v1beta1.vulnerability.Details vulnerability = 9; + // Describes a verifiable build. + grafeas.v1beta1.build.Details build = 10; + // Describes how this resource derives from the basis in the associated + // note. + grafeas.v1beta1.image.Details derived_image = 11; + // Describes the installation of a package on the linked resource. + grafeas.v1beta1.package.Details installation = 12; + // Describes the deployment of an artifact on a runtime. + grafeas.v1beta1.deployment.Details deployment = 13; + // Describes when a resource was discovered. + grafeas.v1beta1.discovery.Details discovered = 14; + // Describes an attestation of an artifact. + grafeas.v1beta1.attestation.Details attestation = 15; + } + + // next_id = 16; +} + +// An entity that can have metadata. For example, a Docker image. +message Resource { + // The name of the resource. For example, the name of a Docker image - + // "Debian". + string name = 1; + // The unique URI of the resource. For example, + // `https://gcr.io/project/image@sha256:foo` for a Docker image. + string uri = 2; + // The hash of the resource content. For example, the Docker digest. + grafeas.v1beta1.provenance.Hash content_hash = 3; + + // next_id = 4; +} + +// A type of analysis that can be done for a resource. +message Note { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + string name = 1; + + // A one sentence description of this note. + string short_description = 2; + + // A detailed description of this note. + string long_description = 3; + + // Output only. The type of analysis. This field can be used as a filter in + // list requests. + grafeas.v1beta1.NoteKind kind = 4; + + // URLs associated with this note. + repeated RelatedUrl related_url = 5; + + // Metadata for any related URL information. + message RelatedUrl { + // Specific URL to associate with the note. + string url = 1; + // Label to describe usage of the URL. + string label = 2; + } + + // Time of expiration for this note. Empty if note does not expire. + google.protobuf.Timestamp expiration_time = 6; + + // Output only. The time this note was created. This field can be used as a + // filter in list requests. + google.protobuf.Timestamp create_time = 7; + + // Output only. The time this note was last updated. This field can be used as + // a filter in list requests. + google.protobuf.Timestamp update_time = 8; + + // The name of the operation that created this note. + string operation_name = 9; + + // Other notes related to this note. + repeated string related_note_names = 10; + + // The type of analysis this note represents. + oneof type { + // A note describing a package vulnerability. + grafeas.v1beta1.vulnerability.Vulnerability vulnerability = 11; + // A note describing build provenance for a verifiable build. + grafeas.v1beta1.build.Build build = 12; + // A note describing a base image. + grafeas.v1beta1.image.Basis base_image = 13; + // A note describing a package hosted by various package managers. + grafeas.v1beta1.package.Package package = 14; + // A note describing something that can be deployed. + grafeas.v1beta1.deployment.Deployable deployable = 15; + // A note describing the initial analysis of a resource. + grafeas.v1beta1.discovery.Discovery discovery = 16; + // A note describing an attestation role. + grafeas.v1beta1.attestation.Authority attestation_authority = 17; + } + + // next_id = 18; +} + +// Request to get an occurrence. +message GetOccurrenceRequest { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + string name = 1; +} + +// Request to list occurrences. +message ListOccurrencesRequest { + // The name of the project to list occurrences for in the form of + // `projects/[PROJECT_ID]`. + string parent = 1; + + // The filter expression. + string filter = 2; + + // Number of occurrences to return in the list. + int32 page_size = 3; + + // Token to provide to skip to a particular spot in the list. + string page_token = 4; + + // The kind of occurrences to list. + grafeas.v1beta1.NoteKind kind = 6; + + // next_id = 7; +} + +// Response for listing occurrences. +message ListOccurrencesResponse { + // The occurrences requested. + repeated Occurrence occurrences = 1; + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + string next_page_token = 2; +} + +// Request to delete a occurrence. +message DeleteOccurrenceRequest { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + string name = 1; +} + +// Request to create a new occurrence. +message CreateOccurrenceRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the occurrence is to be created. + string parent = 1; + // The occurrence to create. + Occurrence occurrence = 2; +} + +// Request to update an occurrence. +message UpdateOccurrenceRequest { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + string name = 1; + // The updated occurrence. + Occurrence occurrence = 2; + // The fields to update. + google.protobuf.FieldMask update_mask = 3; +} + +// Request to get a note. +message GetNoteRequest { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + string name = 1; +} + +// Request to get the note to which the specified occurrence is attached. +message GetOccurrenceNoteRequest { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + string name = 1; +} + +// Request to list notes. +message ListNotesRequest { + // The name of the project to list notes for in the form of + // `projects/[PROJECT_ID]`. + string parent = 1; + // The filter expression. + string filter = 2; + // Number of notes to return in the list. + int32 page_size = 3; + // Token to provide to skip to a particular spot in the list. + string page_token = 4; +} + +// Response for listing notes. +message ListNotesResponse { + // The notes requested. + repeated Note notes = 1; + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + string next_page_token = 2; +} + +// Request to delete a note. +message DeleteNoteRequest { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + string name = 1; +} + +// Request to create a new note. +message CreateNoteRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the note is to be created. + string parent = 1; + // The ID to use for this note. + string note_id = 2; + // The note to create. + Note note = 3; +} + +// Request to update a note. +message UpdateNoteRequest { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + string name = 1; + // The updated note. + Note note = 2; + // The fields to update. + google.protobuf.FieldMask update_mask = 3; +} + +// Request to list occurrences for a note. +message ListNoteOccurrencesRequest { + // The name of the note to list occurrences for in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + string name = 1; + // The filter expression. + string filter = 2; + // Number of occurrences to return in the list. + int32 page_size = 3; + // Token to provide to skip to a particular spot in the list. + string page_token = 4; +} + +// Response for listing occurrences for a note. +message ListNoteOccurrencesResponse { + // The occurrences attached to the specified note. + repeated Occurrence occurrences = 1; + // Token to provide to skip to a particular spot in the list. + string next_page_token = 2; +} + +// Request for creating an operation. +message CreateOperationRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the operation is to be created. + string parent = 1; + // The ID to use for this operation. + string operation_id = 2; + // The operation to create. + google.longrunning.Operation operation = 3; +} + +// Request for updating an operation. +message UpdateOperationRequest { + // The name of the operation in the form of + // `projects/[PROVIDER_ID]/operations/[OPERATION_ID]`. + string name = 1; + // The updated operation. + google.longrunning.Operation operation = 3; + // The fields to update. + google.protobuf.FieldMask update_mask = 4; +} + +// Metadata for an operation. +message OperationMetadata { + // Output only. The time this operation was created. + google.protobuf.Timestamp create_time = 1; + // Output only. The time that this operation was marked as done. + google.protobuf.Timestamp end_time = 2; +} + +// Request to create notes in batch. +message BatchCreateNotesRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the notes are to be created. + string parent = 1; + + // The notes to create. + map notes = 2; +} + +// Response for creating notes in batch. +message BatchCreateNotesResponse { + // The notes that were created. + repeated Note notes = 1; +} + +// Request to create occurrences in batch. +message BatchCreateOccurrencesRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the occurrences are to be created. + string parent = 1; + // The occurrences to create. + repeated Occurrence occurrences = 2; +} + +// Response for creating occurrences in batch. +message BatchCreateOccurrencesResponse { + // The occurrences that were created. + repeated Occurrence occurrences = 1; +} + +// Request to create operations in batch. +message BatchCreateOperationsRequest { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the operations are to be created. + string parent = 1; + + // The operations to create. + map operations = 2; +} + +// Response for creating operations in batch. +message BatchCreateOperationsResponse { + // The operations that were created. + repeated google.longrunning.Operation operations = 1; +} + +// Request to get a vulnerability summary for some set of occurrences. +message GetVulnerabilityOccurrencesSummaryRequest { + // The name of the project to get a vulnerability summary for in the form of + // `projects/[PROJECT_ID]`. + string parent = 1; + // The filter expression. + string filter = 2; +} + +// A summary of how many vulnerability occurrences there are per severity type. +message VulnerabilityOccurrencesSummary { + // A map of how many occurrences were found for each severity. + repeated SeverityCount counts = 1; + // The number of occurrences for a specific severity. + message SeverityCount { + // The severity of the occurrences. + grafeas.v1beta1.vulnerability.Severity severity = 1; + // The number of occurrences with the severity. + int64 count = 2; + } +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/image.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/image.proto new file mode 100644 index 00000000..f189addc --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/image.proto @@ -0,0 +1,144 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.image; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.image"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +// Layer holds metadata specific to a layer of a Docker image. +message Layer { + // Instructions from Dockerfile. + enum Directive { + // Default value for unsupported/missing directive. + DIRECTIVE_UNSPECIFIED = 0; + + // https://docs.docker.com/reference/builder/#maintainer + MAINTAINER = 1; + + // https://docs.docker.com/reference/builder/#run + RUN = 2; + + // https://docs.docker.com/reference/builder/#cmd + CMD = 3; + + // https://docs.docker.com/reference/builder/#label + LABEL = 4; + + // https://docs.docker.com/reference/builder/#expose + EXPOSE = 5; + + // https://docs.docker.com/reference/builder/#env + ENV = 6; + + // https://docs.docker.com/reference/builder/#add + ADD = 7; + + // https://docs.docker.com/reference/builder/#copy + COPY = 8; + + // https://docs.docker.com/reference/builder/#entrypoint + ENTRYPOINT = 9; + + // https://docs.docker.com/reference/builder/#volume + VOLUME = 10; + + // https://docs.docker.com/reference/builder/#user + USER = 11; + + // https://docs.docker.com/reference/builder/#workdir + WORKDIR = 12; + + // https://docs.docker.com/reference/builder/#arg + ARG = 13; + + // https://docs.docker.com/reference/builder/#onbuild + ONBUILD = 14; + + // https://docs.docker.com/reference/builder/#stopsignal + STOPSIGNAL = 15; + + // https://docs.docker.com/reference/builder/#healthcheck + HEALTHCHECK = 16; + + // https://docs.docker.com/reference/builder/#shell + SHELL = 17; + } + + // The recovered Dockerfile directive used to construct this layer. + Directive directive = 1; + + // The recovered arguments to the Dockerfile directive. + string arguments = 2; +} + +// A set of properties that uniquely identify a given Docker image. +message Fingerprint { + // The layer-id of the final layer in the Docker image's v1 representation. + string v1_name = 1; + + // The ordered list of v2 blobs that represent a given image. + repeated string v2_blob = 2; + + // Output only. The name of the image's v2 blobs computed via: + // [bottom] := v2_blob[bottom] + // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) + // Only the name of the final blob is kept. + string v2_name = 3; +} + +// Basis describes the base image portion (Note) of the DockerImage +// relationship. Linked occurrences are derived from this or an +// equivalent image via: +// FROM +// Or an equivalent reference, e.g. a tag of the resource_url. +message Basis { + // The resource_url for the resource representing the basis of + // associated occurrence images. + string resource_url = 1; + + // The fingerprint of the base image. + Fingerprint fingerprint = 2; +} + +// Details of an image occurrence. +message Details { + // The child image derived from the base image. + Derived derived_image = 1; +} + +// Derived describes the derived image portion (Occurrence) of the DockerImage +// relationship. This image would be produced from a Dockerfile with FROM +// . +message Derived { + // The fingerprint of the derived image. + Fingerprint fingerprint = 1; + + // Output only. The number of layers by which this image differs from the + // associated image basis. + int32 distance = 2; + + // This contains layer-specific metadata, if populated it has length + // "distance" and is ordered with [distance] being the layer immediately + // following the base image and [1] being the final layer. + repeated Layer layer_info = 3; + + // Output only. This contains the base image URL for the derived image + // occurrence. + string base_resource_url = 4; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/note_kind.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/note_kind.proto new file mode 100644 index 00000000..a936d03e --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/note_kind.proto @@ -0,0 +1,42 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +// Kind represents the kinds of notes supported. +enum NoteKind { + // Unknown. + NOTE_KIND_UNSPECIFIED = 0; + // The note and occurrence represent a package vulnerability. + VULNERABILITY = 1; + // The note and occurrence assert build provenance. + BUILD = 2; + // This represents an image basis relationship. + IMAGE = 3; + // This represents a package installed via a package manager. + PACKAGE = 4; + // The note and occurrence track deployment events. + DEPLOYMENT = 5; + // The note and occurrence track the initial discovery status of a resource. + DISCOVERY = 6; + // This represents a logical "role" that can attest to artifacts. + ATTESTATION = 7; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/package.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/package.proto new file mode 100644 index 00000000..872cf62d --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/package.proto @@ -0,0 +1,127 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.package; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.pkg"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +// Instruction set architectures supported by various package managers. +enum Architecture { + // Unknown architecture. + ARCHITECTURE_UNSPECIFIED = 0; + // X86 architecture. + X86 = 1; + // X64 architecture. + X64 = 2; +} + +// This represents a particular channel of distribution for a given package. +// E.g., Debian's jessie-backports dpkg mirror. +message Distribution { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + string cpe_uri = 1; + + // The CPU architecture for which packages in this distribution channel were + // built. + Architecture architecture = 2; + + // The latest available version of this package in this distribution + // channel. + Version latest_version = 3; + + // A freeform string denoting the maintainer of this package. + string maintainer = 4; + + // The distribution channel-specific homepage for this package. + string url = 5; + + // The distribution channel-specific description of this package. + string description = 6; +} + +// An occurrence of a particular package installation found within a system's +// filesystem. E.g., glibc was found in /var/lib/dpkg/status. +message Location { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + string cpe_uri = 1; + + // The version installed at this location. + Version version = 2; + + // The path from which we gathered that this package/version is installed. + string path = 3; +} + +// This represents a particular package that is distributed over various +// channels. E.g., glibc (aka libc6) is distributed by many, at various +// versions. +message Package { + // The name of the package. + string name = 1; + + // The various channels by which a package is distributed. + repeated Distribution distribution = 10; +} + +// Details of a package occurrence. +message Details { + // Where the package was installed. + Installation installation = 1; +} + +// This represents how a particular software package may be installed on a +// system. +message Installation { + // Output only. The name of the installed package. + string name = 1; + + // All of the places within the filesystem versions of this package + // have been found. + repeated Location location = 2; +} + +// Version contains structured information about the version of a package. +message Version { + // Used to correct mistakes in the version numbering scheme. + int32 epoch = 1; + // The main part of the version name. + string name = 2; + // The iteration of the package build from the above version. + string revision = 3; + + // Whether this is an ordinary package version or a sentinel MIN/MAX version. + enum VersionKind { + // Unknown. + VERSION_KIND_UNSPECIFIED = 0; + // A standard package version, defined by the other fields. + NORMAL = 1; + // A special version representing negative infinity, other fields are + // ignored. + MINIMUM = 2; + // A special version representing positive infinity, other fields are + // ignored. + MAXIMUM = 3; + }; + + // Distinguish between sentinel MIN/MAX versions and normal versions. If + // kind is not NORMAL, then the other fields are ignored. + VersionKind kind = 4; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/project.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/project.proto new file mode 100644 index 00000000..8297f9a0 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/project.proto @@ -0,0 +1,107 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.project; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.project"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; + +// [Projects](grafeas.io) API. +// +// Manages Grafeas `Projects`. Projects contain sets of other Grafeas entities +// such as `Notes`, `Occurrences`, and `Operations`. +service Projects { + // Creates a new project. + rpc CreateProject(CreateProjectRequest) returns (Project) { + option (google.api.http) = { + post: "/v1beta1/projects" + body: "project" + }; + } + + // Gets the specified project. + rpc GetProject(GetProjectRequest) returns (Project) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*}" + }; + } + + // Lists projects. + rpc ListProjects(ListProjectsRequest) returns (ListProjectsResponse) { + option (google.api.http) = { + get: "/v1beta1/projects" + }; + } + + // Deletes the specified project. + rpc DeleteProject(DeleteProjectRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*}" + }; + } +} + +// Request to create a new project. +message CreateProjectRequest { + // The project to create. + Project project = 1; +} + +// Request to get a project. +message GetProjectRequest { + // The name of the project in the form of `projects/{PROJECT_ID}`. + string name = 1; +} + +// Request to list projects. +message ListProjectsRequest { + // The filter expression. + string filter = 1; + + // Number of projects to return in the list. + int32 page_size = 2; + + // Token to provide to skip to a particular spot in the list. + string page_token = 3; +} + +// Request to delete a project. +message DeleteProjectRequest { + // The name of the project in the form of `projects/{PROJECT_ID}`. + string name = 1; +} + +// Response for listing projects. +message ListProjectsResponse { + // The projects requested. + repeated Project projects = 1; + + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + string next_page_token = 2; +} + +// Describes a Grafeas project. +message Project { + // The name of the project in the form of `projects/{PROJECT_ID}`. + string name = 1; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/provenance.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/provenance.proto new file mode 100644 index 00000000..293eebeb --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/provenance.proto @@ -0,0 +1,179 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.provenance; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.provenance"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "google/protobuf/timestamp.proto"; +import "v1beta1/proto/source.proto"; + +// Provenance of a build. Contains all information needed to verify the full +// details about the build from source to completion. +message BuildProvenance { + // Unique identifier of the build. + string id = 1; + + // ID of the project. + string project_id = 2; + + // Commands requested by the build. + repeated Command commands = 3; + + // Output of the build. + repeated Artifact built_artifacts = 4; + + // Time at which the build was created. + google.protobuf.Timestamp create_time = 5; + + // Time at which execution of the build was started. + google.protobuf.Timestamp start_time = 6; + + // Time at which execution of the build was finished. + google.protobuf.Timestamp end_time = 7; + + // E-mail address of the user who initiated this build. Note that this was the + // user's e-mail address at the time the build was initiated; this address may + // not represent the same end-user for all time. + string creator = 8; + + // Google Cloud Storage bucket where logs were written. + string logs_bucket = 9; + + // Details of the Source input to the build. + Source source_provenance = 10; + + // Trigger identifier if the build was triggered automatically; empty if not. + string trigger_id = 11; + + // Special options applied to this build. This is a catch-all field where + // build providers can enter any desired additional details. + map build_options = 12; + + // Version string of the builder at the time this build was executed. + string builder_version = 13; + + // next_id = 14 +} + +// Source describes the location of the source used for the build. +message Source { + // If provided, the input binary artifacts for the build came from this + // location. + StorageSource artifact_storage_source = 1; + + // Hash(es) of the build source, which can be used to verify that the original + // source integrity was maintained in the build. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (.tar.gz), the FileHash will be for the single path to that file. + map file_hashes = 2; + + // If provided, the source code used for the build came from this location. + grafeas.v1beta1.source.SourceContext context = 3; + + // If provided, some of the source code used for the build may be found in + // these locations, in the case where the source repository had multiple + // remotes or submodules. This list will not include the context specified in + // the context field. + repeated grafeas.v1beta1.source.SourceContext additional_contexts = 4; +} + +// Container message for hashes of byte content of files, used in Source +// messages to verify integrity of source input to the build. +message FileHashes { + // Collection of file hashes. + repeated Hash file_hash = 1; +} + +// Container message for hash values. +message Hash { + // Specifies the hash algorithm, if any. + enum HashType { + // Unknown. + HASH_TYPE_UNSPECIFIED = 0; + // A SHA-256 hash. + SHA256 = 1; + } + + // The type of hash that was performed. + HashType type = 1; + // The hash value. + bytes value = 2; +} + +// StorageSource describes the location of the source in an archive file in +// Google Cloud Storage. +message StorageSource { + // Google Cloud Storage bucket containing source (see [Bucket Name + // Requirements] + // (https://cloud.google.com/storage/docs/bucket-naming#requirements)). + string bucket = 1; + + // Google Cloud Storage object containing source. + string object = 2; + + // Google Cloud Storage generation for the object. + int64 generation = 3; +} + +// Command describes a step performed as part of the build pipeline. +message Command { + // Name of the command, as presented on the command line, or if the command is + // packaged as a Docker container, as presented to `docker pull`. + string name = 1; + + // Environment variables set before running this command. + repeated string env = 2; + + // Command-line arguments used when executing this command. + repeated string args = 3; + + // Working directory (relative to project source root) used when running this + // command. + string dir = 4; + + // Optional unique identifier for this command, used in wait_for to reference + // this command as a dependency. + string id = 5; + + // The ID(s) of the command(s) that this command depends on. + repeated string wait_for = 6; +} + +// Artifact describes a build product. +message Artifact { + // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a + // container. + string checksum = 1; + + // Artifact ID, if any; for container images, this will be a URL by digest + // like `gcr.io/projectID/imagename@sha256:123456`. + string id = 2; + + // Related artifact names. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. Note that a + // single Artifact ID can have multiple names, for example if two tags are + // applied to one image. + repeated string names = 3; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/source.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/source.proto new file mode 100644 index 00000000..060336d5 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/source.proto @@ -0,0 +1,134 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.source; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.source"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +message SourceContext { + // A SourceContext can refer any one of the following types of repositories. + oneof context { + // A SourceContext referring to a revision in a Google Cloud Source Repo. + CloudRepoSourceContext cloud_repo = 1; + + // A SourceContext referring to a Gerrit project. + GerritSourceContext gerrit = 2; + + // A SourceContext referring to any third party Git repo (e.g., GitHub). + GitSourceContext git = 3; + } + + // Labels with user defined metadata. + map labels = 4; +} + +// An alias to a repo revision. +message AliasContext { + // The type of an alias. + enum Kind { + // Unknown. + KIND_UNSPECIFIED = 0; + // Git tag. + FIXED = 1; + // Git branch. + MOVABLE = 2; + // Used to specify non-standard aliases. For example, if a Git repo has a + // ref named "refs/foo/bar". + OTHER = 4; + } + + // The alias kind. + Kind kind = 1; + + // The alias name. + string name = 2; +} + +// A CloudRepoSourceContext denotes a particular revision in a Google Cloud +// Source Repo. +message CloudRepoSourceContext { + // The ID of the repo. + RepoId repo_id = 1; + + // A revision in a Cloud Repo can be identified by either its revision ID or + // its alias. + oneof revision { + // A revision ID. + string revision_id = 2; + + // An alias, which may be a branch or tag. + AliasContext alias_context = 3; + } +} + +// A SourceContext referring to a Gerrit project. +message GerritSourceContext { + // The URI of a running Gerrit instance. + string host_uri = 1; + + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. The "repo name" is the + // hostURI/project. + string gerrit_project = 2; + + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + oneof revision { + // A revision (commit) ID. + string revision_id = 3; + + // An alias, which may be a branch or tag. + AliasContext alias_context = 4; + } +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g., GitHub). +message GitSourceContext { + // Git repository URL. + string url = 1; + + // Git commit hash. + string revision_id = 2; +} + +// A unique identifier for a Cloud Repo. +message RepoId { + // A cloud repo can be identified by either its project ID and repository name + // combination, or its globally unique identifier. + oneof id { + // A combination of a project ID and a repo name. + ProjectRepoId project_repo_id = 1; + + // A server-assigned, globally unique identifier. + string uid = 2; + } +} + +// Selects a repo using a Google Cloud Platform project ID (e.g., +// winged-cargo-31) and a repo name within that project. +message ProjectRepoId { + // The ID of the project. + string project_id = 1; + + // The name of the repo. Leave empty for the default repo. + string repo_name = 2; +} diff --git a/vendor/github.com/grafeas/grafeas/v1beta1/proto/vulnerability.proto b/vendor/github.com/grafeas/grafeas/v1beta1/proto/vulnerability.proto new file mode 100644 index 00000000..760413c3 --- /dev/null +++ b/vendor/github.com/grafeas/grafeas/v1beta1/proto/vulnerability.proto @@ -0,0 +1,135 @@ +// Copyright 2018 The Grafeas Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package grafeas.v1beta1.vulnerability; + +option java_multiple_files = true; +option java_package = "io.grafeas.v1beta1.vulnerability"; +option objc_class_prefix = "GRA"; +option py_api_version = 2; + +import "v1beta1/proto/package.proto"; + +// Note provider-assigned severity/impact ranking. +enum Severity { + // Unknown. + SEVERITY_UNSPECIFIED = 0; + // Minimal severity. + MINIMAL = 1; + // Low severity. + LOW = 2; + // Medium severity. + MEDIUM = 3; + // High severity. + HIGH = 4; + // Critical severity. + CRITICAL = 5; +} + +// Vulnerability provides metadata about a security vulnerability. +message Vulnerability { + // The CVSS score for this vulnerability. + float cvss_score = 1; + + // Note provider assigned impact of the vulnerability. + Severity severity = 2; + + // All information about the package to specifically identify this + // vulnerability. One entry per (version range and cpe_uri) the package + // vulnerability has manifested in. + repeated Detail details = 3; + + // Identifies all occurrences of this vulnerability in the package for a + // specific distro/location. For example: glibc in + // cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2 + message Detail { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in + // which the vulnerability manifests. Examples include distro or storage + // location for vulnerable jar. + string cpe_uri = 1; + + // The name of the package where the vulnerability was found. + string package = 2; + + // The min version of the package in which the vulnerability exists. + grafeas.v1beta1.package.Version min_affected_version = 3; + + // The max version of the package in which the vulnerability exists. + grafeas.v1beta1.package.Version max_affected_version = 4; + + // The severity (eg: distro assigned severity) for this vulnerability. + string severity_name = 5; + + // A vendor-specific description of this note. + string description = 6; + + // The fix for this specific package version. + VulnerabilityLocation fixed_location = 7; + + // The type of package; whether native or non native(ruby gems, node.js + // packages etc). + string package_type = 8; + + // Whether this detail is obsolete. Occurrences are expected not to point to + // obsolete details. + bool is_obsolete = 9; + } +} + +// Details of a vulnerability occurrence. +message Details { + // The type of package; whether native or non native(ruby gems, node.js + // packages etc) + string type = 1; + + // Output only. The note provider assigned Severity of the vulnerability. + Severity severity = 2; + + // Output only. The CVSS score of this vulnerability. CVSS score is on a + // scale of 0-10 where 0 indicates low severity and 10 indicates high + // severity. + float cvss_score = 3; + + // The set of affected locations and their fixes (if available) within the + // associated resource. + repeated PackageIssue package_issue = 4; +} + +// This message wraps a location affected by a vulnerability and its +// associated fix (if one is available). +message PackageIssue { + // The location of the vulnerability. + VulnerabilityLocation affected_location = 1; + + // The location of the available fix for vulnerability. + VulnerabilityLocation fixed_location = 2; + + // The severity (e.g., distro assigned severity) for this vulnerability. + string severity_name = 3; +} + +// The location of the vulnerability. +message VulnerabilityLocation { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) + // format. Examples include distro or storage location for vulnerable jar. + string cpe_uri = 1; + + // The package being described. + string package = 2; + + // The version of the package being described. + grafeas.v1beta1.package.Version version = 3; +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.gitignore b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.gitignore index 364c624e..2233cff9 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.gitignore +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.gitignore @@ -1,3 +1,6 @@ +#vendor +vendor/ + # Created by .ignore support plugin (hsz.mobi) coverage.txt ### Go template diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.travis.yml b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.travis.yml index 47d7f90b..2a845b96 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.travis.yml +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/.travis.yml @@ -1,18 +1,25 @@ sudo: false language: go +# * github.com/grpc/grpc-go still supports go1.6 +# - When we drop support for go1.6 we can remove golang.org/x/net/context +# below as it is part of the Go std library since go1.7 +# * github.com/prometheus/client_golang already requires at least go1.7 since +# September 2017 go: - 1.6.x - 1.7.x - 1.8.x + - 1.9.x + - 1.10.x + - master install: - go get github.com/prometheus/client_golang/prometheus - go get google.golang.org/grpc - go get golang.org/x/net/context - go get github.com/stretchr/testify - script: - - ./test_all.sh + - make test after_success: - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/README.md b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/README.md index 616547a7..499c5835 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/README.md +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/README.md @@ -38,7 +38,7 @@ import "github.com/grpc-ecosystem/go-grpc-prometheus" // After all your registrations, make sure all of the Prometheus metrics are initialized. grpc_prometheus.Register(myServer) // Register Prometheus metrics handler. - http.Handle("/metrics", prometheus.Handler()) + http.Handle("/metrics", promhttp.Handler()) ... ``` @@ -49,8 +49,8 @@ import "github.com/grpc-ecosystem/go-grpc-prometheus" ... clientConn, err = grpc.Dial( address, - grpc.WithUnaryInterceptor(UnaryClientInterceptor), - grpc.WithStreamInterceptor(StreamClientInterceptor) + grpc.WithUnaryInterceptor(grpc_prometheus.UnaryClientInterceptor), + grpc.WithStreamInterceptor(grpc_prometheus.StreamClientInterceptor) ) client = pb_testproto.NewTestServiceClient(clientConn) resp, err := client.PingEmpty(s.ctx, &myservice.Request{Msg: "hello"}) @@ -118,7 +118,7 @@ each of the 20 messages sent back, a counter will be incremented: grpc_server_msg_sent_total{grpc_method="PingList",grpc_service="mwitkow.testproto.TestService",grpc_type="server_stream"} 20 ``` -After the call completes, it's status (`OK` or other [gRPC status code](https://github.com/grpc/grpc-go/blob/master/codes/codes.go)) +After the call completes, its status (`OK` or other [gRPC status code](https://github.com/grpc/grpc-go/blob/master/codes/codes.go)) and the relevant call labels increment the `grpc_server_handled_total` counter. ```jsoniq @@ -128,8 +128,8 @@ grpc_server_handled_total{grpc_code="OK",grpc_method="PingList",grpc_service="mw ## Histograms [Prometheus histograms](https://prometheus.io/docs/concepts/metric_types/#histogram) are a great way -to measure latency distributions of your RPCs. However since it is bad practice to have metrics -of [high cardinality](https://prometheus.io/docs/practices/instrumentation/#do-not-overuse-labels)) +to measure latency distributions of your RPCs. However, since it is bad practice to have metrics +of [high cardinality](https://prometheus.io/docs/practices/instrumentation/#do-not-overuse-labels) the latency monitoring metrics are disabled by default. To enable them please call the following in your server initialization code: @@ -137,8 +137,8 @@ in your server initialization code: grpc_prometheus.EnableHandlingTimeHistogram() ``` -After the call completes, it's handling time will be recorded in a [Prometheus histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) -variable `grpc_server_handling_seconds`. It contains three sub-metrics: +After the call completes, its handling time will be recorded in a [Prometheus histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) +variable `grpc_server_handling_seconds`. The histogram variable contains three sub-metrics: * `grpc_server_handling_seconds_count` - the count of all completed RPCs by status and method * `grpc_server_handling_seconds_sum` - cumulative time of RPCs by status and method, useful for @@ -168,7 +168,7 @@ grpc_server_handling_seconds_count{grpc_code="OK",grpc_method="PingList",grpc_se ## Useful query examples -Prometheus philosophy is to provide the most detailed metrics possible to the monitoring system, and +Prometheus philosophy is to provide raw metrics to the monitoring system, and let the aggregations be handled there. The verbosity of above metrics make it possible to have that flexibility. Here's a couple of useful monitoring queries: diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client.go index d9e87b2f..751a4c72 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client.go @@ -6,67 +6,34 @@ package grpc_prometheus import ( - "io" - - "golang.org/x/net/context" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" + prom "github.com/prometheus/client_golang/prometheus" ) -// UnaryClientInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Unary RPCs. -func UnaryClientInterceptor(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { - monitor := newClientReporter(Unary, method) - monitor.SentMessage() - err := invoker(ctx, method, req, reply, cc, opts...) - if err != nil { - monitor.ReceivedMessage() - } - monitor.Handled(grpc.Code(err)) - return err +var ( + // DefaultClientMetrics is the default instance of ClientMetrics. It is + // intended to be used in conjunction the default Prometheus metrics + // registry. + DefaultClientMetrics = NewClientMetrics() + + // UnaryClientInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Unary RPCs. + UnaryClientInterceptor = DefaultClientMetrics.UnaryClientInterceptor() + + // StreamClientInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Streaming RPCs. + StreamClientInterceptor = DefaultClientMetrics.StreamClientInterceptor() +) + +func init() { + prom.MustRegister(DefaultClientMetrics.clientStartedCounter) + prom.MustRegister(DefaultClientMetrics.clientHandledCounter) + prom.MustRegister(DefaultClientMetrics.clientStreamMsgReceived) + prom.MustRegister(DefaultClientMetrics.clientStreamMsgSent) } -// StreamServerInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Streaming RPCs. -func StreamClientInterceptor(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { - monitor := newClientReporter(clientStreamType(desc), method) - clientStream, err := streamer(ctx, desc, cc, method, opts...) - if err != nil { - monitor.Handled(grpc.Code(err)) - return nil, err - } - return &monitoredClientStream{clientStream, monitor}, nil -} - -func clientStreamType(desc *grpc.StreamDesc) grpcType { - if desc.ClientStreams && !desc.ServerStreams { - return ClientStream - } else if !desc.ClientStreams && desc.ServerStreams { - return ServerStream - } - return BidiStream -} - -// monitoredClientStream wraps grpc.ClientStream allowing each Sent/Recv of message to increment counters. -type monitoredClientStream struct { - grpc.ClientStream - monitor *clientReporter -} - -func (s *monitoredClientStream) SendMsg(m interface{}) error { - err := s.ClientStream.SendMsg(m) - if err == nil { - s.monitor.SentMessage() - } - return err -} - -func (s *monitoredClientStream) RecvMsg(m interface{}) error { - err := s.ClientStream.RecvMsg(m) - if err == nil { - s.monitor.ReceivedMessage() - } else if err == io.EOF { - s.monitor.Handled(codes.OK) - } else { - s.monitor.Handled(grpc.Code(err)) - } - return err +// EnableClientHandlingTimeHistogram turns on recording of handling time of +// RPCs. Histogram metrics can be very expensive for Prometheus to retain and +// query. This function acts on the DefaultClientMetrics variable and the +// default Prometheus metrics registry. +func EnableClientHandlingTimeHistogram(opts ...HistogramOption) { + DefaultClientMetrics.EnableClientHandlingTimeHistogram(opts...) + prom.Register(DefaultClientMetrics.clientHandledHistogram) } diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_metrics.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_metrics.go new file mode 100644 index 00000000..9b476f98 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_metrics.go @@ -0,0 +1,170 @@ +package grpc_prometheus + +import ( + "io" + + prom "github.com/prometheus/client_golang/prometheus" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// ClientMetrics represents a collection of metrics to be registered on a +// Prometheus metrics registry for a gRPC client. +type ClientMetrics struct { + clientStartedCounter *prom.CounterVec + clientHandledCounter *prom.CounterVec + clientStreamMsgReceived *prom.CounterVec + clientStreamMsgSent *prom.CounterVec + clientHandledHistogramEnabled bool + clientHandledHistogramOpts prom.HistogramOpts + clientHandledHistogram *prom.HistogramVec +} + +// NewClientMetrics returns a ClientMetrics object. Use a new instance of +// ClientMetrics when not using the default Prometheus metrics registry, for +// example when wanting to control which metrics are added to a registry as +// opposed to automatically adding metrics via init functions. +func NewClientMetrics(counterOpts ...CounterOption) *ClientMetrics { + opts := counterOptions(counterOpts) + return &ClientMetrics{ + clientStartedCounter: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_client_started_total", + Help: "Total number of RPCs started on the client.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + + clientHandledCounter: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_client_handled_total", + Help: "Total number of RPCs completed by the client, regardless of success or failure.", + }), []string{"grpc_type", "grpc_service", "grpc_method", "grpc_code"}), + + clientStreamMsgReceived: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_client_msg_received_total", + Help: "Total number of RPC stream messages received by the client.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + + clientStreamMsgSent: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_client_msg_sent_total", + Help: "Total number of gRPC stream messages sent by the client.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + + clientHandledHistogramEnabled: false, + clientHandledHistogramOpts: prom.HistogramOpts{ + Name: "grpc_client_handling_seconds", + Help: "Histogram of response latency (seconds) of the gRPC until it is finished by the application.", + Buckets: prom.DefBuckets, + }, + clientHandledHistogram: nil, + } +} + +// Describe sends the super-set of all possible descriptors of metrics +// collected by this Collector to the provided channel and returns once +// the last descriptor has been sent. +func (m *ClientMetrics) Describe(ch chan<- *prom.Desc) { + m.clientStartedCounter.Describe(ch) + m.clientHandledCounter.Describe(ch) + m.clientStreamMsgReceived.Describe(ch) + m.clientStreamMsgSent.Describe(ch) + if m.clientHandledHistogramEnabled { + m.clientHandledHistogram.Describe(ch) + } +} + +// Collect is called by the Prometheus registry when collecting +// metrics. The implementation sends each collected metric via the +// provided channel and returns once the last metric has been sent. +func (m *ClientMetrics) Collect(ch chan<- prom.Metric) { + m.clientStartedCounter.Collect(ch) + m.clientHandledCounter.Collect(ch) + m.clientStreamMsgReceived.Collect(ch) + m.clientStreamMsgSent.Collect(ch) + if m.clientHandledHistogramEnabled { + m.clientHandledHistogram.Collect(ch) + } +} + +// EnableClientHandlingTimeHistogram turns on recording of handling time of RPCs. +// Histogram metrics can be very expensive for Prometheus to retain and query. +func (m *ClientMetrics) EnableClientHandlingTimeHistogram(opts ...HistogramOption) { + for _, o := range opts { + o(&m.clientHandledHistogramOpts) + } + if !m.clientHandledHistogramEnabled { + m.clientHandledHistogram = prom.NewHistogramVec( + m.clientHandledHistogramOpts, + []string{"grpc_type", "grpc_service", "grpc_method"}, + ) + } + m.clientHandledHistogramEnabled = true +} + +// UnaryClientInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Unary RPCs. +func (m *ClientMetrics) UnaryClientInterceptor() func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + monitor := newClientReporter(m, Unary, method) + monitor.SentMessage() + err := invoker(ctx, method, req, reply, cc, opts...) + if err != nil { + monitor.ReceivedMessage() + } + st, _ := status.FromError(err) + monitor.Handled(st.Code()) + return err + } +} + +// StreamClientInterceptor is a gRPC client-side interceptor that provides Prometheus monitoring for Streaming RPCs. +func (m *ClientMetrics) StreamClientInterceptor() func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + monitor := newClientReporter(m, clientStreamType(desc), method) + clientStream, err := streamer(ctx, desc, cc, method, opts...) + if err != nil { + st, _ := status.FromError(err) + monitor.Handled(st.Code()) + return nil, err + } + return &monitoredClientStream{clientStream, monitor}, nil + } +} + +func clientStreamType(desc *grpc.StreamDesc) grpcType { + if desc.ClientStreams && !desc.ServerStreams { + return ClientStream + } else if !desc.ClientStreams && desc.ServerStreams { + return ServerStream + } + return BidiStream +} + +// monitoredClientStream wraps grpc.ClientStream allowing each Sent/Recv of message to increment counters. +type monitoredClientStream struct { + grpc.ClientStream + monitor *clientReporter +} + +func (s *monitoredClientStream) SendMsg(m interface{}) error { + err := s.ClientStream.SendMsg(m) + if err == nil { + s.monitor.SentMessage() + } + return err +} + +func (s *monitoredClientStream) RecvMsg(m interface{}) error { + err := s.ClientStream.RecvMsg(m) + if err == nil { + s.monitor.ReceivedMessage() + } else if err == io.EOF { + s.monitor.Handled(codes.OK) + } else { + st, _ := status.FromError(err) + s.monitor.Handled(st.Code()) + } + return err +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_reporter.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_reporter.go index 16b76155..cbf15322 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_reporter.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_reporter.go @@ -7,105 +7,40 @@ import ( "time" "google.golang.org/grpc/codes" - - prom "github.com/prometheus/client_golang/prometheus" ) -var ( - clientStartedCounter = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "client", - Name: "started_total", - Help: "Total number of RPCs started on the client.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - clientHandledCounter = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "client", - Name: "handled_total", - Help: "Total number of RPCs completed by the client, regardless of success or failure.", - }, []string{"grpc_type", "grpc_service", "grpc_method", "grpc_code"}) - - clientStreamMsgReceived = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "client", - Name: "msg_received_total", - Help: "Total number of RPC stream messages received by the client.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - clientStreamMsgSent = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "client", - Name: "msg_sent_total", - Help: "Total number of gRPC stream messages sent by the client.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - clientHandledHistogramEnabled = false - clientHandledHistogramOpts = prom.HistogramOpts{ - Namespace: "grpc", - Subsystem: "client", - Name: "handling_seconds", - Help: "Histogram of response latency (seconds) of the gRPC until it is finished by the application.", - Buckets: prom.DefBuckets, - } - clientHandledHistogram *prom.HistogramVec -) - -func init() { - prom.MustRegister(clientStartedCounter) - prom.MustRegister(clientHandledCounter) - prom.MustRegister(clientStreamMsgReceived) - prom.MustRegister(clientStreamMsgSent) -} - -// EnableClientHandlingTimeHistogram turns on recording of handling time of RPCs. -// Histogram metrics can be very expensive for Prometheus to retain and query. -func EnableClientHandlingTimeHistogram(opts ...HistogramOption) { - for _, o := range opts { - o(&clientHandledHistogramOpts) - } - if !clientHandledHistogramEnabled { - clientHandledHistogram = prom.NewHistogramVec( - clientHandledHistogramOpts, - []string{"grpc_type", "grpc_service", "grpc_method"}, - ) - prom.Register(clientHandledHistogram) - } - clientHandledHistogramEnabled = true -} - type clientReporter struct { + metrics *ClientMetrics rpcType grpcType serviceName string methodName string startTime time.Time } -func newClientReporter(rpcType grpcType, fullMethod string) *clientReporter { - r := &clientReporter{rpcType: rpcType} - if clientHandledHistogramEnabled { +func newClientReporter(m *ClientMetrics, rpcType grpcType, fullMethod string) *clientReporter { + r := &clientReporter{ + metrics: m, + rpcType: rpcType, + } + if r.metrics.clientHandledHistogramEnabled { r.startTime = time.Now() } r.serviceName, r.methodName = splitMethodName(fullMethod) - clientStartedCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.clientStartedCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() return r } func (r *clientReporter) ReceivedMessage() { - clientStreamMsgReceived.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.clientStreamMsgReceived.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() } func (r *clientReporter) SentMessage() { - clientStreamMsgSent.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.clientStreamMsgSent.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() } func (r *clientReporter) Handled(code codes.Code) { - clientHandledCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName, code.String()).Inc() - if clientHandledHistogramEnabled { - clientHandledHistogram.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Observe(time.Since(r.startTime).Seconds()) + r.metrics.clientHandledCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName, code.String()).Inc() + if r.metrics.clientHandledHistogramEnabled { + r.metrics.clientHandledHistogram.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Observe(time.Since(r.startTime).Seconds()) } } diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_test.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_test.go index b2ebda42..8ed96a28 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_test.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/client_test.go @@ -12,12 +12,19 @@ import ( "io" pb_testproto "github.com/grpc-ecosystem/go-grpc-prometheus/examples/testproto" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" "golang.org/x/net/context" "google.golang.org/grpc" "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var ( + // client metrics must satisfy the Collector interface + _ prometheus.Collector = NewClientMetrics() ) func TestClientInterceptorSuite(t *testing.T) { @@ -152,10 +159,11 @@ func (s *ClientInterceptorTestSuite) TestStreamingIncrementsHistograms() { before = sumCountersForMetricAndLabels(s.T(), "grpc_client_handling_seconds_count", "PingList", "server_stream") ss, err := s.testClient.PingList(s.ctx, &pb_testproto.PingRequest{ErrorCodeReturned: uint32(codes.FailedPrecondition)}) // should return with code=FailedPrecondition - require.NoError(s.T(), err, "PingList must not fail immedietely") + require.NoError(s.T(), err, "PingList must not fail immediately") // Do a read, just to progate errors. _, err = ss.Recv() - require.Equal(s.T(), codes.FailedPrecondition, grpc.Code(err), "Recv must return FailedPrecondition, otherwise the test is wrong") + st, _ := status.FromError(err) + require.Equal(s.T(), codes.FailedPrecondition, st.Code(), "Recv must return FailedPrecondition, otherwise the test is wrong") after = sumCountersForMetricAndLabels(s.T(), "grpc_client_handling_seconds_count", "PingList", "server_stream") assert.EqualValues(s.T(), before+1, after, "grpc_client_handling_seconds_count should be incremented for PingList FailedPrecondition") @@ -180,10 +188,11 @@ func (s *ClientInterceptorTestSuite) TestStreamingIncrementsHandled() { before = sumCountersForMetricAndLabels(s.T(), "grpc_client_handled_total", "PingList", "server_stream", "FailedPrecondition") ss, err := s.testClient.PingList(s.ctx, &pb_testproto.PingRequest{ErrorCodeReturned: uint32(codes.FailedPrecondition)}) // should return with code=FailedPrecondition - require.NoError(s.T(), err, "PingList must not fail immedietely") + require.NoError(s.T(), err, "PingList must not fail immediately") // Do a read, just to progate errors. _, err = ss.Recv() - require.Equal(s.T(), codes.FailedPrecondition, grpc.Code(err), "Recv must return FailedPrecondition, otherwise the test is wrong") + st, _ := status.FromError(err) + require.Equal(s.T(), codes.FailedPrecondition, st.Code(), "Recv must return FailedPrecondition, otherwise the test is wrong") after = sumCountersForMetricAndLabels(s.T(), "grpc_client_handled_total", "PingList", "server_stream", "FailedPrecondition") assert.EqualValues(s.T(), before+1, after, "grpc_client_handled_total should be incremented for PingList FailedPrecondition") @@ -201,7 +210,7 @@ func (s *ClientInterceptorTestSuite) TestStreamingIncrementsMessageCounts() { break } require.NoError(s.T(), err, "reading pingList shouldn't fail") - count += 1 + count++ } require.EqualValues(s.T(), countListResponses, count, "Number of received msg on the wire must match") afterSent := sumCountersForMetricAndLabels(s.T(), "grpc_client_msg_sent_total", "PingList", "server_stream") diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/client/client.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/client/client.go new file mode 100644 index 00000000..1b63ab69 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/client/client.go @@ -0,0 +1,34 @@ +package main + +import ( + "fmt" + "log" + + "golang.org/x/net/context" + "google.golang.org/grpc" + + pb "github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf" +) + +func main() { + // Create a insecure gRPC channel to communicate with the server. + conn, err := grpc.Dial( + fmt.Sprintf("localhost:%v", 9093), + grpc.WithInsecure(), + ) + if err != nil { + log.Fatal(err) + } + + defer conn.Close() + + // Create a gRPC server client. + client := pb.NewDemoServiceClient(conn) + // Call “SayHello” method and wait for response from gRPC Server. + resp, err := client.SayHello(context.Background(), &pb.HelloRequest{Name: "Test"}) + if err != nil { + log.Fatal(err) + } + + fmt.Println(resp) +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/prometheus/prometheus.yaml b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/prometheus/prometheus.yaml new file mode 100644 index 00000000..12d75a14 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/prometheus/prometheus.yaml @@ -0,0 +1,30 @@ +# my global config +global: + scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + + # Attach these labels to any time series or alerts when communicating with + # external systems (federation, remote storage, Alertmanager). + external_labels: + monitor: 'kirk-grpc-service-monitor' + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + # - "first.rules" + # - "second.rules" + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + + # - job_name: 'prometheus' + # scrape_interval: 5s + # static_configs: + # - targets: ['localhost:9090'] + + - job_name: 'grpcserver' + scrape_interval: 2s + static_configs: + - targets: ['localhost:9092'] diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.pb.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.pb.go new file mode 100644 index 00000000..f4a0eddf --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: service.proto + +/* +Package proto is a generated protocol buffer package. + +It is generated from these files: + service.proto + +It has these top-level messages: + HelloRequest + HelloResponse +*/ +package proto + +import proto1 "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto1.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto1.ProtoPackageIsVersion2 // please upgrade the proto package + +type HelloRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *HelloRequest) Reset() { *m = HelloRequest{} } +func (m *HelloRequest) String() string { return proto1.CompactTextString(m) } +func (*HelloRequest) ProtoMessage() {} +func (*HelloRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *HelloRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type HelloResponse struct { + Message string `protobuf:"bytes,1,opt,name=message" json:"message,omitempty"` +} + +func (m *HelloResponse) Reset() { *m = HelloResponse{} } +func (m *HelloResponse) String() string { return proto1.CompactTextString(m) } +func (*HelloResponse) ProtoMessage() {} +func (*HelloResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *HelloResponse) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func init() { + proto1.RegisterType((*HelloRequest)(nil), "proto.HelloRequest") + proto1.RegisterType((*HelloResponse)(nil), "proto.HelloResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for DemoService service + +type DemoServiceClient interface { + SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloResponse, error) +} + +type demoServiceClient struct { + cc *grpc.ClientConn +} + +func NewDemoServiceClient(cc *grpc.ClientConn) DemoServiceClient { + return &demoServiceClient{cc} +} + +func (c *demoServiceClient) SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloResponse, error) { + out := new(HelloResponse) + err := grpc.Invoke(ctx, "/proto.DemoService/SayHello", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for DemoService service + +type DemoServiceServer interface { + SayHello(context.Context, *HelloRequest) (*HelloResponse, error) +} + +func RegisterDemoServiceServer(s *grpc.Server, srv DemoServiceServer) { + s.RegisterService(&_DemoService_serviceDesc, srv) +} + +func _DemoService_SayHello_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(HelloRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DemoServiceServer).SayHello(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.DemoService/SayHello", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DemoServiceServer).SayHello(ctx, req.(*HelloRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DemoService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "proto.DemoService", + HandlerType: (*DemoServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SayHello", + Handler: _DemoService_SayHello_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "service.proto", +} + +func init() { proto1.RegisterFile("service.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 142 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2d, 0x4e, 0x2d, 0x2a, + 0xcb, 0x4c, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x05, 0x53, 0x4a, 0x4a, 0x5c, + 0x3c, 0x1e, 0xa9, 0x39, 0x39, 0xf9, 0x41, 0xa9, 0x85, 0xa5, 0xa9, 0xc5, 0x25, 0x42, 0x42, 0x5c, + 0x2c, 0x79, 0x89, 0xb9, 0xa9, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x60, 0xb6, 0x92, 0x26, + 0x17, 0x2f, 0x54, 0x4d, 0x71, 0x41, 0x7e, 0x5e, 0x71, 0xaa, 0x90, 0x04, 0x17, 0x7b, 0x6e, 0x6a, + 0x71, 0x71, 0x62, 0x3a, 0x4c, 0x1d, 0x8c, 0x6b, 0xe4, 0xc6, 0xc5, 0xed, 0x92, 0x9a, 0x9b, 0x1f, + 0x0c, 0xb1, 0x4a, 0xc8, 0x9c, 0x8b, 0x23, 0x38, 0xb1, 0x12, 0xac, 0x59, 0x48, 0x18, 0x62, 0xb1, + 0x1e, 0xb2, 0x75, 0x52, 0x22, 0xa8, 0x82, 0x10, 0xf3, 0x95, 0x18, 0x92, 0xd8, 0xc0, 0xc2, 0xc6, + 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf2, 0xf5, 0x90, 0x47, 0xb5, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.proto b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.proto new file mode 100644 index 00000000..32f9e78a --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf/service.proto @@ -0,0 +1,15 @@ +syntax="proto3"; + +package proto; + +service DemoService { + rpc SayHello(HelloRequest) returns (HelloResponse) {} +} + +message HelloRequest { + string name = 1; +} + +message HelloResponse { + string message = 1; +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/server/server.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/server/server.go new file mode 100644 index 00000000..0b4dbd2d --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/server/server.go @@ -0,0 +1,87 @@ +package main + +import ( + "fmt" + "log" + "net" + "net/http" + + "golang.org/x/net/context" + "google.golang.org/grpc" + + "github.com/grpc-ecosystem/go-grpc-prometheus" + pb "github.com/grpc-ecosystem/go-grpc-prometheus/examples/grpc-server-with-prometheus/protobuf" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +// DemoServiceServer defines a Server. +type DemoServiceServer struct{} + +func newDemoServer() *DemoServiceServer { + return &DemoServiceServer{} +} + +// SayHello implements a interface defined by protobuf. +func (s *DemoServiceServer) SayHello(ctx context.Context, request *pb.HelloRequest) (*pb.HelloResponse, error) { + customizedCounterMetric.WithLabelValues(request.Name).Inc() + return &pb.HelloResponse{Message: fmt.Sprintf("Hello %s", request.Name)}, nil +} + +var ( + // Create a metrics registry. + reg = prometheus.NewRegistry() + + // Create some standard server metrics. + grpcMetrics = grpc_prometheus.NewServerMetrics() + + // Create a customized counter metric. + customizedCounterMetric = prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: "demo_server_say_hello_method_handle_count", + Help: "Total number of RPCs handled on the server.", + }, []string{"name"}) +) + +func init() { + // Register standard server metrics and customized metrics to registry. + reg.MustRegister(grpcMetrics, customizedCounterMetric) + customizedCounterMetric.WithLabelValues("Test") +} + +// NOTE: Graceful shutdown is missing. Don't use this demo in your production setup. +func main() { + // Listen an actual port. + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", 9093)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + defer lis.Close() + + // Create a HTTP server for prometheus. + httpServer := &http.Server{Handler: promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), Addr: fmt.Sprintf("0.0.0.0:%d", 9092)} + + // Create a gRPC Server with gRPC interceptor. + grpcServer := grpc.NewServer( + grpc.StreamInterceptor(grpcMetrics.StreamServerInterceptor()), + grpc.UnaryInterceptor(grpcMetrics.UnaryServerInterceptor()), + ) + + // Create a new api server. + demoServer := newDemoServer() + + // Register your service. + pb.RegisterDemoServiceServer(grpcServer, demoServer) + + // Initialize all metrics. + grpcMetrics.InitializeMetrics(grpcServer) + + // Start your http server for prometheus. + go func() { + if err := httpServer.ListenAndServe(); err != nil { + log.Fatal("Unable to start a http server.") + } + }() + + // Start your gRPC server. + log.Fatal(grpcServer.Serve(lis)) +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/makefile b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/makefile new file mode 100644 index 00000000..74c08422 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/makefile @@ -0,0 +1,16 @@ +SHELL="/bin/bash" + +GOFILES_NOVENDOR = $(shell go list ./... | grep -v /vendor/) + +all: vet fmt test + +fmt: + go fmt $(GOFILES_NOVENDOR) + +vet: + go vet $(GOFILES_NOVENDOR) + +test: vet + ./scripts/test_all.sh + +.PHONY: all vet test diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/metric_options.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/metric_options.go new file mode 100644 index 00000000..9d51aec9 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/metric_options.go @@ -0,0 +1,41 @@ +package grpc_prometheus + +import ( + prom "github.com/prometheus/client_golang/prometheus" +) + +// A CounterOption lets you add options to Counter metrics using With* funcs. +type CounterOption func(*prom.CounterOpts) + +type counterOptions []CounterOption + +func (co counterOptions) apply(o prom.CounterOpts) prom.CounterOpts { + for _, f := range co { + f(&o) + } + return o +} + +// WithConstLabels allows you to add ConstLabels to Counter metrics. +func WithConstLabels(labels prom.Labels) CounterOption { + return func(o *prom.CounterOpts) { + o.ConstLabels = labels + } +} + +// A HistogramOption lets you add options to Histogram metrics using With* +// funcs. +type HistogramOption func(*prom.HistogramOpts) + +// WithHistogramBuckets allows you to specify custom bucket ranges for histograms if EnableHandlingTimeHistogram is on. +func WithHistogramBuckets(buckets []float64) HistogramOption { + return func(o *prom.HistogramOpts) { o.Buckets = buckets } +} + +// WithHistogramConstLabels allows you to add custom ConstLabels to +// histograms metrics. +func WithHistogramConstLabels(labels prom.Labels) HistogramOption { + return func(o *prom.HistogramOpts) { + o.ConstLabels = labels + } +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/test_all.sh b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/scripts/test_all.sh similarity index 100% rename from vendor/github.com/grpc-ecosystem/go-grpc-prometheus/test_all.sh rename to vendor/github.com/grpc-ecosystem/go-grpc-prometheus/scripts/test_all.sh diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server.go index f85c8c23..322f9904 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server.go @@ -6,69 +6,43 @@ package grpc_prometheus import ( - "golang.org/x/net/context" + prom "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc" ) -// PreregisterServices takes a gRPC server and pre-initializes all counters to 0. -// This allows for easier monitoring in Prometheus (no missing metrics), and should be called *after* all services have -// been registered with the server. +var ( + // DefaultServerMetrics is the default instance of ServerMetrics. It is + // intended to be used in conjunction the default Prometheus metrics + // registry. + DefaultServerMetrics = NewServerMetrics() + + // UnaryServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Unary RPCs. + UnaryServerInterceptor = DefaultServerMetrics.UnaryServerInterceptor() + + // StreamServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Streaming RPCs. + StreamServerInterceptor = DefaultServerMetrics.StreamServerInterceptor() +) + +func init() { + prom.MustRegister(DefaultServerMetrics.serverStartedCounter) + prom.MustRegister(DefaultServerMetrics.serverHandledCounter) + prom.MustRegister(DefaultServerMetrics.serverStreamMsgReceived) + prom.MustRegister(DefaultServerMetrics.serverStreamMsgSent) +} + +// Register takes a gRPC server and pre-initializes all counters to 0. This +// allows for easier monitoring in Prometheus (no missing metrics), and should +// be called *after* all services have been registered with the server. This +// function acts on the DefaultServerMetrics variable. func Register(server *grpc.Server) { - serviceInfo := server.GetServiceInfo() - for serviceName, info := range serviceInfo { - for _, mInfo := range info.Methods { - preRegisterMethod(serviceName, &mInfo) - } - } + DefaultServerMetrics.InitializeMetrics(server) } -// UnaryServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Unary RPCs. -func UnaryServerInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { - monitor := newServerReporter(Unary, info.FullMethod) - monitor.ReceivedMessage() - resp, err := handler(ctx, req) - monitor.Handled(grpc.Code(err)) - if err == nil { - monitor.SentMessage() - } - return resp, err -} - -// StreamServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Streaming RPCs. -func StreamServerInterceptor(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { - monitor := newServerReporter(streamRpcType(info), info.FullMethod) - err := handler(srv, &monitoredServerStream{ss, monitor}) - monitor.Handled(grpc.Code(err)) - return err -} - -func streamRpcType(info *grpc.StreamServerInfo) grpcType { - if info.IsClientStream && !info.IsServerStream { - return ClientStream - } else if !info.IsClientStream && info.IsServerStream { - return ServerStream - } - return BidiStream -} - -// monitoredStream wraps grpc.ServerStream allowing each Sent/Recv of message to increment counters. -type monitoredServerStream struct { - grpc.ServerStream - monitor *serverReporter -} - -func (s *monitoredServerStream) SendMsg(m interface{}) error { - err := s.ServerStream.SendMsg(m) - if err == nil { - s.monitor.SentMessage() - } - return err -} - -func (s *monitoredServerStream) RecvMsg(m interface{}) error { - err := s.ServerStream.RecvMsg(m) - if err == nil { - s.monitor.ReceivedMessage() - } - return err +// EnableHandlingTimeHistogram turns on recording of handling time +// of RPCs. Histogram metrics can be very expensive for Prometheus +// to retain and query. This function acts on the DefaultServerMetrics +// variable and the default Prometheus metrics registry. +func EnableHandlingTimeHistogram(opts ...HistogramOption) { + DefaultServerMetrics.EnableHandlingTimeHistogram(opts...) + prom.Register(DefaultServerMetrics.serverHandledHistogram) } diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_metrics.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_metrics.go new file mode 100644 index 00000000..5b1467e7 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_metrics.go @@ -0,0 +1,185 @@ +package grpc_prometheus + +import ( + prom "github.com/prometheus/client_golang/prometheus" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/status" +) + +// ServerMetrics represents a collection of metrics to be registered on a +// Prometheus metrics registry for a gRPC server. +type ServerMetrics struct { + serverStartedCounter *prom.CounterVec + serverHandledCounter *prom.CounterVec + serverStreamMsgReceived *prom.CounterVec + serverStreamMsgSent *prom.CounterVec + serverHandledHistogramEnabled bool + serverHandledHistogramOpts prom.HistogramOpts + serverHandledHistogram *prom.HistogramVec +} + +// NewServerMetrics returns a ServerMetrics object. Use a new instance of +// ServerMetrics when not using the default Prometheus metrics registry, for +// example when wanting to control which metrics are added to a registry as +// opposed to automatically adding metrics via init functions. +func NewServerMetrics(counterOpts ...CounterOption) *ServerMetrics { + opts := counterOptions(counterOpts) + return &ServerMetrics{ + serverStartedCounter: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_server_started_total", + Help: "Total number of RPCs started on the server.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + serverHandledCounter: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_server_handled_total", + Help: "Total number of RPCs completed on the server, regardless of success or failure.", + }), []string{"grpc_type", "grpc_service", "grpc_method", "grpc_code"}), + serverStreamMsgReceived: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_server_msg_received_total", + Help: "Total number of RPC stream messages received on the server.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + serverStreamMsgSent: prom.NewCounterVec( + opts.apply(prom.CounterOpts{ + Name: "grpc_server_msg_sent_total", + Help: "Total number of gRPC stream messages sent by the server.", + }), []string{"grpc_type", "grpc_service", "grpc_method"}), + serverHandledHistogramEnabled: false, + serverHandledHistogramOpts: prom.HistogramOpts{ + Name: "grpc_server_handling_seconds", + Help: "Histogram of response latency (seconds) of gRPC that had been application-level handled by the server.", + Buckets: prom.DefBuckets, + }, + serverHandledHistogram: nil, + } +} + +// EnableHandlingTimeHistogram enables histograms being registered when +// registering the ServerMetrics on a Prometheus registry. Histograms can be +// expensive on Prometheus servers. It takes options to configure histogram +// options such as the defined buckets. +func (m *ServerMetrics) EnableHandlingTimeHistogram(opts ...HistogramOption) { + for _, o := range opts { + o(&m.serverHandledHistogramOpts) + } + if !m.serverHandledHistogramEnabled { + m.serverHandledHistogram = prom.NewHistogramVec( + m.serverHandledHistogramOpts, + []string{"grpc_type", "grpc_service", "grpc_method"}, + ) + } + m.serverHandledHistogramEnabled = true +} + +// Describe sends the super-set of all possible descriptors of metrics +// collected by this Collector to the provided channel and returns once +// the last descriptor has been sent. +func (m *ServerMetrics) Describe(ch chan<- *prom.Desc) { + m.serverStartedCounter.Describe(ch) + m.serverHandledCounter.Describe(ch) + m.serverStreamMsgReceived.Describe(ch) + m.serverStreamMsgSent.Describe(ch) + if m.serverHandledHistogramEnabled { + m.serverHandledHistogram.Describe(ch) + } +} + +// Collect is called by the Prometheus registry when collecting +// metrics. The implementation sends each collected metric via the +// provided channel and returns once the last metric has been sent. +func (m *ServerMetrics) Collect(ch chan<- prom.Metric) { + m.serverStartedCounter.Collect(ch) + m.serverHandledCounter.Collect(ch) + m.serverStreamMsgReceived.Collect(ch) + m.serverStreamMsgSent.Collect(ch) + if m.serverHandledHistogramEnabled { + m.serverHandledHistogram.Collect(ch) + } +} + +// UnaryServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Unary RPCs. +func (m *ServerMetrics) UnaryServerInterceptor() func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + monitor := newServerReporter(m, Unary, info.FullMethod) + monitor.ReceivedMessage() + resp, err := handler(ctx, req) + st, _ := status.FromError(err) + monitor.Handled(st.Code()) + if err == nil { + monitor.SentMessage() + } + return resp, err + } +} + +// StreamServerInterceptor is a gRPC server-side interceptor that provides Prometheus monitoring for Streaming RPCs. +func (m *ServerMetrics) StreamServerInterceptor() func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + return func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + monitor := newServerReporter(m, streamRPCType(info), info.FullMethod) + err := handler(srv, &monitoredServerStream{ss, monitor}) + st, _ := status.FromError(err) + monitor.Handled(st.Code()) + return err + } +} + +// InitializeMetrics initializes all metrics, with their appropriate null +// value, for all gRPC methods registered on a gRPC server. This is useful, to +// ensure that all metrics exist when collecting and querying. +func (m *ServerMetrics) InitializeMetrics(server *grpc.Server) { + serviceInfo := server.GetServiceInfo() + for serviceName, info := range serviceInfo { + for _, mInfo := range info.Methods { + preRegisterMethod(m, serviceName, &mInfo) + } + } +} + +func streamRPCType(info *grpc.StreamServerInfo) grpcType { + if info.IsClientStream && !info.IsServerStream { + return ClientStream + } else if !info.IsClientStream && info.IsServerStream { + return ServerStream + } + return BidiStream +} + +// monitoredStream wraps grpc.ServerStream allowing each Sent/Recv of message to increment counters. +type monitoredServerStream struct { + grpc.ServerStream + monitor *serverReporter +} + +func (s *monitoredServerStream) SendMsg(m interface{}) error { + err := s.ServerStream.SendMsg(m) + if err == nil { + s.monitor.SentMessage() + } + return err +} + +func (s *monitoredServerStream) RecvMsg(m interface{}) error { + err := s.ServerStream.RecvMsg(m) + if err == nil { + s.monitor.ReceivedMessage() + } + return err +} + +// preRegisterMethod is invoked on Register of a Server, allowing all gRPC services labels to be pre-populated. +func preRegisterMethod(metrics *ServerMetrics, serviceName string, mInfo *grpc.MethodInfo) { + methodName := mInfo.Name + methodType := string(typeFromMethodInfo(mInfo)) + // These are just references (no increments), as just referencing will create the labels but not set values. + metrics.serverStartedCounter.GetMetricWithLabelValues(methodType, serviceName, methodName) + metrics.serverStreamMsgReceived.GetMetricWithLabelValues(methodType, serviceName, methodName) + metrics.serverStreamMsgSent.GetMetricWithLabelValues(methodType, serviceName, methodName) + if metrics.serverHandledHistogramEnabled { + metrics.serverHandledHistogram.GetMetricWithLabelValues(methodType, serviceName, methodName) + } + for _, code := range allCodes { + metrics.serverHandledCounter.GetMetricWithLabelValues(methodType, serviceName, methodName, code.String()) + } +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_reporter.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_reporter.go index 628a8905..aa9db540 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_reporter.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_reporter.go @@ -7,151 +7,40 @@ import ( "time" "google.golang.org/grpc/codes" - - prom "github.com/prometheus/client_golang/prometheus" - "google.golang.org/grpc" ) -type grpcType string - -const ( - Unary grpcType = "unary" - ClientStream grpcType = "client_stream" - ServerStream grpcType = "server_stream" - BidiStream grpcType = "bidi_stream" -) - -var ( - serverStartedCounter = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "server", - Name: "started_total", - Help: "Total number of RPCs started on the server.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - serverHandledCounter = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "server", - Name: "handled_total", - Help: "Total number of RPCs completed on the server, regardless of success or failure.", - }, []string{"grpc_type", "grpc_service", "grpc_method", "grpc_code"}) - - serverStreamMsgReceived = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "server", - Name: "msg_received_total", - Help: "Total number of RPC stream messages received on the server.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - serverStreamMsgSent = prom.NewCounterVec( - prom.CounterOpts{ - Namespace: "grpc", - Subsystem: "server", - Name: "msg_sent_total", - Help: "Total number of gRPC stream messages sent by the server.", - }, []string{"grpc_type", "grpc_service", "grpc_method"}) - - serverHandledHistogramEnabled = false - serverHandledHistogramOpts = prom.HistogramOpts{ - Namespace: "grpc", - Subsystem: "server", - Name: "handling_seconds", - Help: "Histogram of response latency (seconds) of gRPC that had been application-level handled by the server.", - Buckets: prom.DefBuckets, - } - serverHandledHistogram *prom.HistogramVec -) - -func init() { - prom.MustRegister(serverStartedCounter) - prom.MustRegister(serverHandledCounter) - prom.MustRegister(serverStreamMsgReceived) - prom.MustRegister(serverStreamMsgSent) -} - -type HistogramOption func(*prom.HistogramOpts) - -// WithHistogramBuckets allows you to specify custom bucket ranges for histograms if EnableHandlingTimeHistogram is on. -func WithHistogramBuckets(buckets []float64) HistogramOption { - return func(o *prom.HistogramOpts) { o.Buckets = buckets } -} - -// EnableHandlingTimeHistogram turns on recording of handling time of RPCs for server-side interceptors. -// Histogram metrics can be very expensive for Prometheus to retain and query. -func EnableHandlingTimeHistogram(opts ...HistogramOption) { - for _, o := range opts { - o(&serverHandledHistogramOpts) - } - if !serverHandledHistogramEnabled { - serverHandledHistogram = prom.NewHistogramVec( - serverHandledHistogramOpts, - []string{"grpc_type", "grpc_service", "grpc_method"}, - ) - prom.Register(serverHandledHistogram) - } - serverHandledHistogramEnabled = true -} - type serverReporter struct { + metrics *ServerMetrics rpcType grpcType serviceName string methodName string startTime time.Time } -func newServerReporter(rpcType grpcType, fullMethod string) *serverReporter { - r := &serverReporter{rpcType: rpcType} - if serverHandledHistogramEnabled { +func newServerReporter(m *ServerMetrics, rpcType grpcType, fullMethod string) *serverReporter { + r := &serverReporter{ + metrics: m, + rpcType: rpcType, + } + if r.metrics.serverHandledHistogramEnabled { r.startTime = time.Now() } r.serviceName, r.methodName = splitMethodName(fullMethod) - serverStartedCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.serverStartedCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() return r } func (r *serverReporter) ReceivedMessage() { - serverStreamMsgReceived.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.serverStreamMsgReceived.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() } func (r *serverReporter) SentMessage() { - serverStreamMsgSent.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() + r.metrics.serverStreamMsgSent.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Inc() } func (r *serverReporter) Handled(code codes.Code) { - serverHandledCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName, code.String()).Inc() - if serverHandledHistogramEnabled { - serverHandledHistogram.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Observe(time.Since(r.startTime).Seconds()) + r.metrics.serverHandledCounter.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName, code.String()).Inc() + if r.metrics.serverHandledHistogramEnabled { + r.metrics.serverHandledHistogram.WithLabelValues(string(r.rpcType), r.serviceName, r.methodName).Observe(time.Since(r.startTime).Seconds()) } } - -// preRegisterMethod is invoked on Register of a Server, allowing all gRPC services labels to be pre-populated. -func preRegisterMethod(serviceName string, mInfo *grpc.MethodInfo) { - methodName := mInfo.Name - methodType := string(typeFromMethodInfo(mInfo)) - // These are just references (no increments), as just referencing will create the labels but not set values. - serverStartedCounter.GetMetricWithLabelValues(methodType, serviceName, methodName) - serverStreamMsgReceived.GetMetricWithLabelValues(methodType, serviceName, methodName) - serverStreamMsgSent.GetMetricWithLabelValues(methodType, serviceName, methodName) - if serverHandledHistogramEnabled { - serverHandledHistogram.GetMetricWithLabelValues(methodType, serviceName, methodName) - } - for _, code := range allCodes { - serverHandledCounter.GetMetricWithLabelValues(methodType, serviceName, methodName, code.String()) - } -} - -func typeFromMethodInfo(mInfo *grpc.MethodInfo) grpcType { - if mInfo.IsClientStream == false && mInfo.IsServerStream == false { - return Unary - } - if mInfo.IsClientStream == true && mInfo.IsServerStream == false { - return ClientStream - } - if mInfo.IsClientStream == false && mInfo.IsServerStream == true { - return ServerStream - } - return BidiStream -} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_test.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_test.go index f6944f0c..5b39dcdc 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_test.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/server_test.go @@ -22,6 +22,12 @@ import ( "golang.org/x/net/context" "google.golang.org/grpc" "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var ( + // server metrics must satisfy the Collector interface + _ prometheus.Collector = NewServerMetrics() ) const ( @@ -88,7 +94,7 @@ func (s *ServerInterceptorTestSuite) TearDownSuite() { } func (s *ServerInterceptorTestSuite) TestRegisterPresetsStuff() { - for testId, testCase := range []struct { + for testID, testCase := range []struct { metricName string existingLabels []string }{ @@ -104,7 +110,7 @@ func (s *ServerInterceptorTestSuite) TestRegisterPresetsStuff() { {"grpc_server_handled_total", []string{"mwitkow.testproto.TestService", "PingEmpty", "unary", "ResourceExhausted"}}, } { lineCount := len(fetchPrometheusLines(s.T(), testCase.metricName, testCase.existingLabels...)) - assert.NotEqual(s.T(), 0, lineCount, "metrics must exist for test case %d", testId) + assert.NotEqual(s.T(), 0, lineCount, "metrics must exist for test case %d", testID) } } @@ -182,7 +188,7 @@ func (s *ServerInterceptorTestSuite) TestStreamingIncrementsHistograms() { before = sumCountersForMetricAndLabels(s.T(), "grpc_server_handling_seconds_count", "PingList", "server_stream") _, err := s.testClient.PingList(s.ctx, &pb_testproto.PingRequest{ErrorCodeReturned: uint32(codes.FailedPrecondition)}) // should return with code=FailedPrecondition - require.NoError(s.T(), err, "PingList must not fail immedietely") + require.NoError(s.T(), err, "PingList must not fail immediately") after = sumCountersForMetricAndLabels(s.T(), "grpc_server_handling_seconds_count", "PingList", "server_stream") assert.EqualValues(s.T(), before+1, after, "grpc_server_handling_seconds_count should be incremented for PingList FailedPrecondition") @@ -207,7 +213,7 @@ func (s *ServerInterceptorTestSuite) TestStreamingIncrementsHandled() { before = sumCountersForMetricAndLabels(s.T(), "grpc_server_handled_total", "PingList", "server_stream", "FailedPrecondition") _, err := s.testClient.PingList(s.ctx, &pb_testproto.PingRequest{ErrorCodeReturned: uint32(codes.FailedPrecondition)}) // should return with code=FailedPrecondition - require.NoError(s.T(), err, "PingList must not fail immedietely") + require.NoError(s.T(), err, "PingList must not fail immediately") after = sumCountersForMetricAndLabels(s.T(), "grpc_server_handled_total", "PingList", "server_stream", "FailedPrecondition") assert.EqualValues(s.T(), before+1, after, "grpc_server_handled_total should be incremented for PingList FailedPrecondition") @@ -225,7 +231,7 @@ func (s *ServerInterceptorTestSuite) TestStreamingIncrementsMessageCounts() { break } require.NoError(s.T(), err, "reading pingList shouldn't fail") - count += 1 + count++ } require.EqualValues(s.T(), countListResponses, count, "Number of received msg on the wire must match") afterSent := sumCountersForMetricAndLabels(s.T(), "grpc_server_msg_sent_total", "PingList", "server_stream") @@ -292,12 +298,12 @@ func (s *testService) Ping(ctx context.Context, ping *pb_testproto.PingRequest) func (s *testService) PingError(ctx context.Context, ping *pb_testproto.PingRequest) (*pb_testproto.Empty, error) { code := codes.Code(ping.ErrorCodeReturned) - return nil, grpc.Errorf(code, "Userspace error.") + return nil, status.Errorf(code, "Userspace error.") } func (s *testService) PingList(ping *pb_testproto.PingRequest, stream pb_testproto.TestService_PingListServer) error { if ping.ErrorCodeReturned != 0 { - return grpc.Errorf(codes.Code(ping.ErrorCodeReturned), "foobar") + return status.Errorf(codes.Code(ping.ErrorCodeReturned), "foobar") } // Send user trailers and headers. for i := 0; i < countListResponses; i++ { diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/util.go b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/util.go index 372460ac..7987de35 100644 --- a/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/util.go +++ b/vendor/github.com/grpc-ecosystem/go-grpc-prometheus/util.go @@ -6,9 +6,19 @@ package grpc_prometheus import ( "strings" + "google.golang.org/grpc" "google.golang.org/grpc/codes" ) +type grpcType string + +const ( + Unary grpcType = "unary" + ClientStream grpcType = "client_stream" + ServerStream grpcType = "server_stream" + BidiStream grpcType = "bidi_stream" +) + var ( allCodes = []codes.Code{ codes.OK, codes.Canceled, codes.Unknown, codes.InvalidArgument, codes.DeadlineExceeded, codes.NotFound, @@ -25,3 +35,16 @@ func splitMethodName(fullMethodName string) (string, string) { } return "unknown", "unknown" } + +func typeFromMethodInfo(mInfo *grpc.MethodInfo) grpcType { + if !mInfo.IsClientStream && !mInfo.IsServerStream { + return Unary + } + if mInfo.IsClientStream && !mInfo.IsServerStream { + return ClientStream + } + if !mInfo.IsClientStream && mInfo.IsServerStream { + return ServerStream + } + return BidiStream +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/.gitignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/.gitignore index 88ddcdf4..55e4918a 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/.gitignore +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/.gitignore @@ -1 +1,9 @@ _output/ +.idea + +# Bazel. +bazel-bin +bazel-genfiles +bazel-grpc-gateway +bazel-out +bazel-testlogs diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis.yml b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis.yml index c1113a5c..f9775899 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis.yml +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis.yml @@ -1,36 +1,56 @@ language: go sudo: false go: -- 1.8.x -- 1.7.x -- tip + - 1.9.x + - 1.10.x + - master go_import_path: github.com/grpc-ecosystem/grpc-gateway cache: directories: - - $HOME/local - - ${TRAVIS_BUILD_DIR}/examples/browser/node_modules + - $HOME/local + - ${TRAVIS_BUILD_DIR}/examples/browser/node_modules + - $HOME/.cache/_grpc_gateway_bazel before_install: -- ./.travis/install-protoc.sh 3.1.0 -- ./.travis/install-swagger-codegen.sh 2.1.6 -- nvm install v6.1 && nvm use v6.1 && node --version -- go get github.com/golang/lint/golint -- go get github.com/dghubble/sling + - if [ "${USE_BAZEL}" = true ]; then ./.travis/install-bazel.sh 0.12.0; fi + - if [ -z "${USE_BAZEL}" ]; then ./.travis/install-protoc.sh 3.1.0; fi + - if [ -z "${USE_BAZEL}" ]; then ./.travis/install-swagger-codegen.sh 2.2.2; fi + - if [ -z "${USE_BAZEL}" ]; then nvm install v6.1 && nvm use v6.1 && node --version; fi + - go get github.com/golang/lint/golint + - go get github.com/dghubble/sling + - go get github.com/go-resty/resty install: -- go get github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway -- go get github.com/grpc-ecosystem/grpc-gateway/runtime -- go get github.com/grpc-ecosystem/grpc-gateway/examples -- go get github.com/grpc-ecosystem/grpc-gateway/examples/server + # Make sure externally referenced packages are go-gettable. + - go get github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway + - go get github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger + - go get github.com/grpc-ecosystem/grpc-gateway/runtime + - go get github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server + - go get github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server before_script: -- sh -c 'cd examples/browser && npm install' + - sh -c 'cd examples/browser && npm install' script: -- make realclean && make examples SWAGGER_CODEGEN="java -jar $HOME/local/swagger-codegen-cli.jar" -- if (go version | grep -q 1.8) && [ -z "${GATEWAY_PLUGIN_FLAGS}" ]; then test -z "$(git status --porcelain)" || (git status; git diff; exit 1); fi -- env GLOG_logtostderr=1 go test -race -v github.com/grpc-ecosystem/grpc-gateway/... -- make lint -- sh -c 'cd examples/browser && gulp' + - if [ "${USE_BAZEL}" = true ]; then ./.travis/bazel-build.sh; fi + - if [ "${USE_BAZEL}" = true ]; then ./.travis/bazel-test.sh; fi + + # Make sure examples of generated files are up-to-date + - if [ -z "${USE_BAZEL}" ]; then make realclean && make examples SWAGGER_CODEGEN="java -jar $HOME/local/swagger-codegen-cli.jar"; fi + - if [ -z "${USE_BAZEL}" ] && (go version | grep -q "${GO_VERSION_TO_DIFF_TEST}") && [ -z "${GATEWAY_PLUGIN_FLAGS}" ]; then test -z "$(git status --porcelain)" || (git status; git diff; exit 1); fi + + # Unit tests, integration tests and code health checks + - if [ -z "${USE_BAZEL}" ]; then env GLOG_logtostderr=1 go test -race -v github.com/grpc-ecosystem/grpc-gateway/...; fi + - if [ -z "${USE_BAZEL}" ]; then make lint; fi + - if [ -z "${USE_BAZEL}" ]; then sh -c 'cd examples/browser && node ./node_modules/gulp/bin/gulp'; fi + - if (go version | grep -q "${GO_VERSION_TO_DIFF_TEST}") && [ -z "${GATEWAY_PLUGIN_FLAGS}" ]; then env GLOG_logtostderr=1 ./bin/coverage; fi +after_success: + - bash <(curl -s https://codecov.io/bash) + env: global: - - "PATH=$PATH:$HOME/local/bin" + - "PATH=$PATH:$HOME/local/bin" + - GO_VERSION_TO_DIFF_TEST="go version go1\.10\.[0-9]\+ linux/amd64" matrix: - - GATEWAY_PLUGIN_FLAGS= - - GATEWAY_PLUGIN_FLAGS=request_context=true + - GATEWAY_PLUGIN_FLAGS= + - GATEWAY_PLUGIN_FLAGS=request_context=false +matrix: + include: + - go: master + env: USE_BAZEL=true diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-build.sh b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-build.sh new file mode 100755 index 00000000..f46781d4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-build.sh @@ -0,0 +1,10 @@ +#!/bin/sh -eu + +bazel \ + --batch \ + --output_base=$HOME/.cache/_grpc_gateway_bazel \ + --host_jvm_args=-Xmx500m \ + --host_jvm_args=-Xms500m \ + build \ + --local_resources=400,1,1.0 \ + //... diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-test.sh b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-test.sh new file mode 100755 index 00000000..86dbbf32 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/bazel-test.sh @@ -0,0 +1,12 @@ +#!/bin/sh -eu + +bazel \ + --batch \ + --output_base=$HOME/.cache/_grpc_gateway_bazel \ + --host_jvm_args=-Xmx500m \ + --host_jvm_args=-Xms500m \ + test \ + --local_resources=400,1,1.0 \ + --test_output=errors \ + --features=race \ + //... diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/install-bazel.sh b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/install-bazel.sh new file mode 100755 index 00000000..9bebe44c --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/.travis/install-bazel.sh @@ -0,0 +1,20 @@ +#!/bin/sh -eu + +bazel_version=$1 + +if test -z "${bazel_version}"; then + echo "Usage: .travis/install-bazel.sh bazel-version" + exit 1 +fi + +if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then + OS=darwin +else + OS=linux +fi + +filename=bazel-${bazel_version}-installer-${OS}-x86_64.sh +wget https://github.com/bazelbuild/bazel/releases/download/${bazel_version}/${filename} +chmod +x $filename +./$filename --user +rm -f $filename diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/BUILD b/vendor/github.com/grpc-ecosystem/grpc-gateway/BUILD new file mode 100644 index 00000000..0ab0afa1 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/BUILD @@ -0,0 +1,23 @@ +load("@bazel_gazelle//:def.bzl", "gazelle") + +# gazelle:exclude third_party + +gazelle( + name = "gazelle_diff", + mode = "diff", + prefix = "github.com/grpc-ecosystem/grpc-gateway", +) + +gazelle( + name = "gazelle_fix", + mode = "fix", + prefix = "github.com/grpc-ecosystem/grpc-gateway", +) + +package_group( + name = "generators", + packages = [ + "//protoc-gen-grpc-gateway/...", + "//protoc-gen-swagger/...", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/CHANGELOG.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/CHANGELOG.md index 3d613fec..28558949 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/CHANGELOG.md +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/CHANGELOG.md @@ -1,8 +1,53 @@ # Change Log +## [1.3.1](https://github.com/grpc-ecosystem/grpc-gateway/tree/1.3.1) (2017-12-23) +**Merged pull requests:** + +- Add support for --Import\_path [\#507](https://github.com/grpc-ecosystem/grpc-gateway/pull/507) +- Fix \#504 Missing Definitions [\#505](https://github.com/grpc-ecosystem/grpc-gateway/pull/505) +- Maintain default delimiter of newline [\#497](https://github.com/grpc-ecosystem/grpc-gateway/pull/497) +- Fix gen-swagger to support more well known types [\#496](https://github.com/grpc-ecosystem/grpc-gateway/pull/496) +- Use golang/protobuf instead of gogo/protobuf [\#494](https://github.com/grpc-ecosystem/grpc-gateway/pull/494) +- Fix stream delimiters [\#488](https://github.com/grpc-ecosystem/grpc-gateway/pull/488) +- ForwardResponseStream status code errors [\#482](https://github.com/grpc-ecosystem/grpc-gateway/pull/482) +- protoc-gen-grpc-gateway: flip request\_context default to true [\#474](https://github.com/grpc-ecosystem/grpc-gateway/pull/474) +- grpc-gateway/generator: respect full package [\#462](https://github.com/grpc-ecosystem/grpc-gateway/pull/462) +- Add proto marshaller for proto-over-http [\#459](https://github.com/grpc-ecosystem/grpc-gateway/pull/459) + +## [v1.3](https://github.com/grpc-ecosystem/grpc-gateway/tree/v1.3) (2017-11-03) +## [v1.3.0](https://github.com/grpc-ecosystem/grpc-gateway/tree/v1.3.0) (2017-11-03) +**Merged pull requests:** + +- Streaming forward handler fix chunk encoding [\#479](https://github.com/grpc-ecosystem/grpc-gateway/pull/479) +- Fix logic handling primitive wrapper in URL params [\#478](https://github.com/grpc-ecosystem/grpc-gateway/pull/478) +- runtime: use r.Context\(\) [\#473](https://github.com/grpc-ecosystem/grpc-gateway/pull/473) +- Optional SourceCodeInfo [\#466](https://github.com/grpc-ecosystem/grpc-gateway/pull/466) +- Some steps to fix Travis CI [\#461](https://github.com/grpc-ecosystem/grpc-gateway/pull/461) +- fix 2 typos in Registry.SetPrefix's comment [\#455](https://github.com/grpc-ecosystem/grpc-gateway/pull/455) +- Add Handler method to pass in client [\#454](https://github.com/grpc-ecosystem/grpc-gateway/pull/454) +- Fallback to JSON name when matching URL parameter. [\#450](https://github.com/grpc-ecosystem/grpc-gateway/pull/450) +- Update DO NOT EDIT template. [\#434](https://github.com/grpc-ecosystem/grpc-gateway/pull/434) +- Memoise calls to fullyQualifiedNameToSwaggerName to speed it up for large registries [\#421](https://github.com/grpc-ecosystem/grpc-gateway/pull/421) +- Update Swagger Codegen from 2.1.6 to 2.2.2 [\#415](https://github.com/grpc-ecosystem/grpc-gateway/pull/415) +- Return codes.InvalidArgument to rather return HTTP 400 instead of HTTP 500 [\#409](https://github.com/grpc-ecosystem/grpc-gateway/pull/409) +- improve {incoming,outgoing}HeaderMatcher logic [\#408](https://github.com/grpc-ecosystem/grpc-gateway/pull/408) +- improve WKT handling in gateway and openapi output [\#404](https://github.com/grpc-ecosystem/grpc-gateway/pull/404) +- Return if runtime.AnnotateContext gave error [\#403](https://github.com/grpc-ecosystem/grpc-gateway/pull/403) +- jsonpb: update tests to reflect new jsonpb behavior [\#401](https://github.com/grpc-ecosystem/grpc-gateway/pull/401) +- Reference import grpc Status to suppress unused errors. [\#387](https://github.com/grpc-ecosystem/grpc-gateway/pull/387) +- ci: regen with current protoc-gen-go [\#385](https://github.com/grpc-ecosystem/grpc-gateway/pull/385) +- Use status package for error and introduce WithProtoErrorHandler option [\#378](https://github.com/grpc-ecosystem/grpc-gateway/pull/378) +- Return response headers from grpc server [\#374](https://github.com/grpc-ecosystem/grpc-gateway/pull/374) +- Skip unreferenced messages in definitions. [\#371](https://github.com/grpc-ecosystem/grpc-gateway/pull/371) +- Use canonical header form in default header matcher. [\#369](https://github.com/grpc-ecosystem/grpc-gateway/pull/369) +- support allow\_delete\_body for protoc-gen-grpc-gateway [\#318](https://github.com/grpc-ecosystem/grpc-gateway/pull/318) +- fixes package name override doesn't work [\#277](https://github.com/grpc-ecosystem/grpc-gateway/pull/277) +- add custom options to allow more control of swagger/openapi output [\#145](https://github.com/grpc-ecosystem/grpc-gateway/pull/145) + ## [v1.2.2](https://github.com/grpc-ecosystem/grpc-gateway/tree/v1.2.2) (2017-04-17) **Merged pull requests:** +- Add changelog for 1.2.2 [\#363](https://github.com/grpc-ecosystem/grpc-gateway/pull/363) - metadata: fix properly and change to Outgoing [\#361](https://github.com/grpc-ecosystem/grpc-gateway/pull/361) ## [v1.2.1](https://github.com/grpc-ecosystem/grpc-gateway/tree/v1.2.1) (2017-04-17) @@ -19,6 +64,7 @@ ## [v1.2.0.rc1](https://github.com/grpc-ecosystem/grpc-gateway/tree/v1.2.0.rc1) (2017-03-24) **Merged pull requests:** +- Support user configurable header forwarding & context metadata [\#336](https://github.com/grpc-ecosystem/grpc-gateway/pull/336) - Update go\_out parameter to remove comma [\#333](https://github.com/grpc-ecosystem/grpc-gateway/pull/333) - Update stale path in README [\#332](https://github.com/grpc-ecosystem/grpc-gateway/pull/332) - improve documentation regarding external dependencies [\#330](https://github.com/grpc-ecosystem/grpc-gateway/pull/330) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/CONTRIBUTING.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/CONTRIBUTING.md index 94983bc9..4f0c59ee 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/CONTRIBUTING.md +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/CONTRIBUTING.md @@ -9,7 +9,7 @@ Here's the recommended process of contribution. 4. Make sure that your change follows best practices in Go * [Effective Go](https://golang.org/doc/effective_go.html) * [Go Code Review Comments](https://golang.org/wiki/CodeReviewComments) -5. Make sure that `make test` passes. (use swagger-codegen 2.1.6, not newer versions) +5. Make sure that `make test` passes. (use swagger-codegen 2.2.2, not newer versions) 6. Sign [a Contributor License Agreement](https://cla.developers.google.com/clas) 7. Open a pull request in Github diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/ISSUE_TEMPLATE.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/ISSUE_TEMPLATE.md new file mode 100644 index 00000000..9edbaa64 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/ISSUE_TEMPLATE.md @@ -0,0 +1,41 @@ + +# Please follow the general troubleshooting steps first: + +- [ ] Update your protoc to the [latest version](https://github.com/google/protobuf/releases) +- [ ] Update your copy of `grpc-gateway` to the latest version from github. with + `git fetch https://github.com/grpc-ecosystem/grpc-gateway master && git reset --hard FETCH_HEAD` +- [ ] Delete the `protoc-gen-grpc-gateway` and `protoc-gen-swagger` binary from your `PATH`, + and install locally built binaries. + +### Bug reports: + +Fill in the following sections with explanations of what's gone wrong. + +Steps you follow to reproduce the error: + + + +Your steps here. + +What did you expect to happen instead: + + + +Your answer here. + +What's your theory on why it isn't working: + + + +Your theory here. diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/Makefile b/vendor/github.com/grpc-ecosystem/grpc-gateway/Makefile index accde980..ac5bd6f5 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/Makefile +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/Makefile @@ -5,7 +5,9 @@ PKG=github.com/grpc-ecosystem/grpc-gateway GO_PLUGIN=bin/protoc-gen-go -GO_PLUGIN_PKG=github.com/golang/protobuf/protoc-gen-go +GO_PROTOBUF_REPO=github.com/golang/protobuf +GO_PLUGIN_PKG=$(GO_PROTOBUF_REPO)/protoc-gen-go +GO_PTYPES_ANY_PKG=$(GO_PROTOBUF_REPO)/ptypes/any SWAGGER_PLUGIN=bin/protoc-gen-swagger SWAGGER_PLUGIN_SRC= utilities/doc.go \ utilities/pattern.go \ @@ -43,37 +45,50 @@ OUTPUT_DIR=_output RUNTIME_PROTO=runtime/internal/stream_chunk.proto RUNTIME_GO=$(RUNTIME_PROTO:.proto=.pb.go) -PKGMAP=Mgoogle/protobuf/descriptor.proto=$(GO_PLUGIN_PKG)/descriptor,Mexamples/sub/message.proto=$(PKG)/examples/sub +OPENAPIV2_PROTO=protoc-gen-swagger/options/openapiv2.proto protoc-gen-swagger/options/annotations.proto +OPENAPIV2_GO=$(OPENAPIV2_PROTO:.proto=.pb.go) + +PKGMAP=Mgoogle/protobuf/descriptor.proto=$(GO_PLUGIN_PKG)/descriptor,Mexamples/proto/sub/message.proto=$(PKG)/examples/proto/sub ADDITIONAL_FLAGS= ifneq "$(GATEWAY_PLUGIN_FLAGS)" "" ADDITIONAL_FLAGS=,$(GATEWAY_PLUGIN_FLAGS) endif -SWAGGER_EXAMPLES=examples/examplepb/echo_service.proto \ - examples/examplepb/a_bit_of_everything.proto -EXAMPLES=examples/examplepb/echo_service.proto \ - examples/examplepb/a_bit_of_everything.proto \ - examples/examplepb/stream.proto \ - examples/examplepb/flow_combination.proto +SWAGGER_EXAMPLES=examples/proto/examplepb/echo_service.proto \ + examples/proto/examplepb/a_bit_of_everything.proto \ + examples/proto/examplepb/wrappers.proto +EXAMPLES=examples/proto/examplepb/echo_service.proto \ + examples/proto/examplepb/a_bit_of_everything.proto \ + examples/proto/examplepb/stream.proto \ + examples/proto/examplepb/flow_combination.proto \ + examples/proto/examplepb/wrappers.proto EXAMPLE_SVCSRCS=$(EXAMPLES:.proto=.pb.go) EXAMPLE_GWSRCS=$(EXAMPLES:.proto=.pb.gw.go) -EXAMPLE_SWAGGERSRCS=$(EXAMPLES:.proto=.swagger.json) -EXAMPLE_DEPS=examples/sub/message.proto examples/sub2/message.proto +EXAMPLE_SWAGGERSRCS=$(SWAGGER_EXAMPLES:.proto=.swagger.json) +EXAMPLE_DEPS=examples/proto/sub/message.proto examples/proto/sub2/message.proto EXAMPLE_DEPSRCS=$(EXAMPLE_DEPS:.proto=.pb.go) EXAMPLE_CLIENT_DIR=examples/clients -ECHO_EXAMPLE_SPEC=examples/examplepb/echo_service.swagger.json -ECHO_EXAMPLE_SRCS=$(EXAMPLE_CLIENT_DIR)/echo/EchoServiceApi.go \ - $(EXAMPLE_CLIENT_DIR)/echo/ExamplepbSimpleMessage.go -ABE_EXAMPLE_SPEC=examples/examplepb/a_bit_of_everything.swagger.json -ABE_EXAMPLE_SRCS=$(EXAMPLE_CLIENT_DIR)/abe/ABitOfEverythingServiceApi.go \ - $(EXAMPLE_CLIENT_DIR)/abe/ABitOfEverythingNested.go \ - $(EXAMPLE_CLIENT_DIR)/abe/ExamplepbABitOfEverything.go \ - $(EXAMPLE_CLIENT_DIR)/abe/ExamplepbNumericEnum.go \ - $(EXAMPLE_CLIENT_DIR)/abe/ExamplepbIdMessage.go \ - $(EXAMPLE_CLIENT_DIR)/abe/NestedDeepEnum.go \ - $(EXAMPLE_CLIENT_DIR)/abe/ProtobufEmpty.go \ - $(EXAMPLE_CLIENT_DIR)/abe/Sub2IdMessage.go \ - $(EXAMPLE_CLIENT_DIR)/abe/SubStringMessage.go +ECHO_EXAMPLE_SPEC=examples/proto/examplepb/echo_service.swagger.json +ECHO_EXAMPLE_SRCS=$(EXAMPLE_CLIENT_DIR)/echo/api_client.go \ + $(EXAMPLE_CLIENT_DIR)/echo/api_response.go \ + $(EXAMPLE_CLIENT_DIR)/echo/configuration.go \ + $(EXAMPLE_CLIENT_DIR)/echo/echo_service_api.go \ + $(EXAMPLE_CLIENT_DIR)/echo/examplepb_simple_message.go +ABE_EXAMPLE_SPEC=examples/proto/examplepb/a_bit_of_everything.swagger.json +ABE_EXAMPLE_SRCS=$(EXAMPLE_CLIENT_DIR)/abe/a_bit_of_everything_nested.go \ + $(EXAMPLE_CLIENT_DIR)/abe/a_bit_of_everything_service_api.go \ + $(EXAMPLE_CLIENT_DIR)/abe/api_client.go \ + $(EXAMPLE_CLIENT_DIR)/abe/api_response.go \ + $(EXAMPLE_CLIENT_DIR)/abe/camel_case_service_name_api.go \ + $(EXAMPLE_CLIENT_DIR)/abe/configuration.go \ + $(EXAMPLE_CLIENT_DIR)/abe/echo_rpc_api.go \ + $(EXAMPLE_CLIENT_DIR)/abe/echo_service_api.go \ + $(EXAMPLE_CLIENT_DIR)/abe/examplepb_a_bit_of_everything.go \ + $(EXAMPLE_CLIENT_DIR)/abe/examplepb_body.go \ + $(EXAMPLE_CLIENT_DIR)/abe/examplepb_numeric_enum.go \ + $(EXAMPLE_CLIENT_DIR)/abe/nested_deep_enum.go \ + $(EXAMPLE_CLIENT_DIR)/abe/protobuf_empty.go \ + $(EXAMPLE_CLIENT_DIR)/abe/sub_string_message.go EXAMPLE_CLIENT_SRCS=$(ECHO_EXAMPLE_SRCS) $(ABE_EXAMPLE_SRCS) SWAGGER_CODEGEN=swagger-codegen @@ -83,17 +98,20 @@ generate: $(RUNTIME_GO) .SUFFIXES: .go .proto -$(GO_PLUGIN): +$(GO_PLUGIN): go get $(GO_PLUGIN_PKG) go build -o $@ $(GO_PLUGIN_PKG) $(RUNTIME_GO): $(RUNTIME_PROTO) $(GO_PLUGIN) - protoc -I $(PROTOC_INC_PATH) --plugin=$(GO_PLUGIN) -I. --go_out=$(PKGMAP):. $(RUNTIME_PROTO) + protoc -I $(PROTOC_INC_PATH) --plugin=$(GO_PLUGIN) -I $(GOPATH)/src/$(GO_PTYPES_ANY_PKG) -I. --go_out=$(PKGMAP):. $(RUNTIME_PROTO) + +$(OPENAPIV2_GO): $(OPENAPIV2_PROTO) $(GO_PLUGIN) + protoc -I $(PROTOC_INC_PATH) --plugin=$(GO_PLUGIN) -I. --go_out=$(PKGMAP):$(GOPATH)/src $(OPENAPIV2_PROTO) $(GATEWAY_PLUGIN): $(RUNTIME_GO) $(GATEWAY_PLUGIN_SRC) go build -o $@ $(GATEWAY_PLUGIN_PKG) -$(SWAGGER_PLUGIN): $(SWAGGER_PLUGIN_SRC) +$(SWAGGER_PLUGIN): $(SWAGGER_PLUGIN_SRC) $(OPENAPIV2_GO) go build -o $@ $(SWAGGER_PLUGIN_PKG) $(EXAMPLE_SVCSRCS): $(GO_PLUGIN) $(EXAMPLES) @@ -109,16 +127,21 @@ $(EXAMPLE_SWAGGERSRCS): $(SWAGGER_PLUGIN) $(SWAGGER_EXAMPLES) $(ECHO_EXAMPLE_SRCS): $(ECHO_EXAMPLE_SPEC) $(SWAGGER_CODEGEN) generate -i $(ECHO_EXAMPLE_SPEC) \ - -l go -o examples/clients --additional-properties packageName=echo - @rm -f $(EXAMPLE_CLIENT_DIR)/README.md $(EXAMPLE_CLIENT_DIR)/git_push.sh $(EXAMPLE_CLIENT_DIR)/.gitignore + -l go -o examples/clients/echo --additional-properties packageName=echo + @rm -f $(EXAMPLE_CLIENT_DIR)/echo/README.md \ + $(EXAMPLE_CLIENT_DIR)/echo/git_push.sh \ + $(EXAMPLE_CLIENT_DIR)/echo/.travis.yml $(ABE_EXAMPLE_SRCS): $(ABE_EXAMPLE_SPEC) $(SWAGGER_CODEGEN) generate -i $(ABE_EXAMPLE_SPEC) \ - -l go -o examples/clients --additional-properties packageName=abe - @rm -f $(EXAMPLE_CLIENT_DIR)/README.md $(EXAMPLE_CLIENT_DIR)/git_push.sh $(EXAMPLE_CLIENT_DIR)/.gitignore + -l go -o examples/clients/abe --additional-properties packageName=abe + @rm -f $(EXAMPLE_CLIENT_DIR)/abe/README.md \ + $(EXAMPLE_CLIENT_DIR)/abe/git_push.sh \ + $(EXAMPLE_CLIENT_DIR)/abe/.travis.yml examples: $(EXAMPLE_SVCSRCS) $(EXAMPLE_GWSRCS) $(EXAMPLE_DEPSRCS) $(EXAMPLE_SWAGGERSRCS) $(EXAMPLE_CLIENT_SRCS) test: examples go test -race $(PKG)/... + go test -race $(PKG)/examples/integration -args -network=unix -endpoint=test.sock lint: golint --set_exit_status $(PKG)/runtime @@ -130,14 +153,15 @@ lint: go vet $(PKG)/protoc-gen-grpc-gateway/... go vet $(PKG)/protoc-gen-swagger/... -clean distclean: - rm -f $(GATEWAY_PLUGIN) +clean: + rm -f $(GATEWAY_PLUGIN) $(SWAGGER_PLUGIN) +distclean: clean + rm -f $(GO_PLUGIN) realclean: distclean rm -f $(EXAMPLE_SVCSRCS) $(EXAMPLE_DEPSRCS) rm -f $(EXAMPLE_GWSRCS) rm -f $(EXAMPLE_SWAGGERSRCS) - rm -f $(GO_PLUGIN) - rm -f $(SWAGGER_PLUGIN) rm -f $(EXAMPLE_CLIENT_SRCS) + rm -f $(OPENAPIV2_GO) .PHONY: generate examples test lint clean distclean realclean diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/README.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/README.md index c7396cc1..4f489132 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/README.md +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/README.md @@ -11,6 +11,8 @@ It helps you to provide your APIs in both gRPC and RESTful style at the same tim ![architecture introduction diagram](https://docs.google.com/drawings/d/12hp4CPqrNPFhattL_cIoJptFvlAqm5wLQ0ggqI5mkCg/pub?w=749&h=370) +## Check out our [documentation](https://grpc-ecosystem.github.io/grpc-gateway/)! + ## Background gRPC is great -- it generates API clients and server stubs in many programming languages, it is fast, easy-to-use, bandwidth-efficient and its design is combat-proven by Google. However, you might still want to provide a traditional RESTful API as well. Reasons can range from maintaining backwards-compatibility, supporting languages or clients not well supported by gRPC to simply maintaining the aesthetics and tooling involved with a RESTful architecture. @@ -216,7 +218,6 @@ To use the same port for custom HTTP handlers (e.g. serving `swagger.json`), gRP ### Want to support But not yet. -* bytes fields in path parameter. #5 * Optionally generating the entrypoint. #8 * `import_path` parameter diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/WORKSPACE b/vendor/github.com/grpc-ecosystem/grpc-gateway/WORKSPACE new file mode 100644 index 00000000..5727c218 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/WORKSPACE @@ -0,0 +1,41 @@ +workspace(name = "grpc_ecosystem_grpc_gateway") + +http_archive( + name = "io_bazel_rules_go", + url = "https://github.com/bazelbuild/rules_go/releases/download/0.10.3/rules_go-0.10.3.tar.gz", + sha256 = "feba3278c13cde8d67e341a837f69a029f698d7a27ddbb2a202be7a10b22142a", +) + +http_archive( + name = "bazel_gazelle", + url = "https://github.com/bazelbuild/bazel-gazelle/releases/download/0.10.1/bazel-gazelle-0.10.1.tar.gz", + sha256 = "d03625db67e9fb0905bbd206fa97e32ae9da894fe234a493e7517fd25faec914", +) + +load("@io_bazel_rules_go//go:def.bzl", "go_rules_dependencies", "go_register_toolchains") + +go_rules_dependencies() + +go_register_toolchains() + +load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies") + +gazelle_dependencies() + +load("@io_bazel_rules_go//go:def.bzl", "go_repository") + +go_repository( + name = "com_github_rogpeppe_fastuuid", + commit = "6724a57986aff9bff1a1770e9347036def7c89f6", + importpath = "github.com/rogpeppe/fastuuid", +) + +go_repository( + name = "com_github_go_resty_resty", + commit = "f8815663de1e64d57cdd4ee9e2b2fa96977a030e", + importpath = "github.com/go-resty/resty", +) + +load("//:repositories.bzl", "repositories") + +repositories() diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/bin/coverage b/vendor/github.com/grpc-ecosystem/grpc-gateway/bin/coverage new file mode 100755 index 00000000..bab0b81e --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/bin/coverage @@ -0,0 +1,11 @@ +#!/bin/bash +set -euo pipefail +> coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic $d + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/BUILD.bazel new file mode 100644 index 00000000..1eddaaf7 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/BUILD.bazel @@ -0,0 +1,26 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//:generators"]) + +go_library( + name = "go_default_library", + srcs = [ + "doc.go", + "parse_req.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/codegenerator", + deps = [ + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) + +go_test( + name = "go_default_xtest", + srcs = ["parse_req_test.go"], + deps = [ + ":go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/doc.go new file mode 100644 index 00000000..36453171 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/doc.go @@ -0,0 +1,4 @@ +/* +Package codegenerator contains reusable functions used by the code generators. +*/ +package codegenerator diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req.go new file mode 100644 index 00000000..e74575bd --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req.go @@ -0,0 +1,23 @@ +package codegenerator + +import ( + "fmt" + "io" + "io/ioutil" + + "github.com/golang/protobuf/proto" + plugin "github.com/golang/protobuf/protoc-gen-go/plugin" +) + +// ParseRequest parses a code generator request from a proto Message. +func ParseRequest(r io.Reader) (*plugin.CodeGeneratorRequest, error) { + input, err := ioutil.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("failed to read code generator request: %v", err) + } + req := new(plugin.CodeGeneratorRequest) + if err = proto.Unmarshal(input, req); err != nil { + return nil, fmt.Errorf("failed to unmarshal code generator request: %v", err) + } + return req, nil +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req_test.go new file mode 100644 index 00000000..5f37aad9 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/codegenerator/parse_req_test.go @@ -0,0 +1,69 @@ +package codegenerator_test + +import ( + "bytes" + "fmt" + "io" + "reflect" + "strings" + "testing" + + "github.com/golang/protobuf/proto" + plugin "github.com/golang/protobuf/protoc-gen-go/plugin" + "github.com/grpc-ecosystem/grpc-gateway/codegenerator" +) + +var parseReqTests = []struct { + name string + in io.Reader + out *plugin.CodeGeneratorRequest + err error +}{ + { + "Empty input should produce empty output", + mustGetReader(&plugin.CodeGeneratorRequest{}), + &plugin.CodeGeneratorRequest{}, + nil, + }, + { + "Invalid reader should produce error", + &invalidReader{}, + nil, + fmt.Errorf("failed to read code generator request: invalid reader"), + }, + { + "Invalid proto message should produce error", + strings.NewReader("{}"), + nil, + fmt.Errorf("failed to unmarshal code generator request: unexpected EOF"), + }, +} + +func TestParseRequest(t *testing.T) { + for _, tt := range parseReqTests { + t.Run(tt.name, func(t *testing.T) { + out, err := codegenerator.ParseRequest(tt.in) + if !reflect.DeepEqual(err, tt.err) { + t.Errorf("got %v, want %v", err, tt.err) + } + if err == nil && !reflect.DeepEqual(*out, *tt.out) { + t.Errorf("got %v, want %v", *out, *tt.out) + } + }) + } +} + +func mustGetReader(pb proto.Message) io.Reader { + b, err := proto.Marshal(pb) + if err != nil { + panic(err) + } + return bytes.NewBuffer(b) +} + +type invalidReader struct { +} + +func (*invalidReader) Read(p []byte) (int, error) { + return 0, fmt.Errorf("invalid reader") +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/.gitignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/.gitignore new file mode 100644 index 00000000..ca35be08 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/.gitignore @@ -0,0 +1 @@ +_site diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile new file mode 100644 index 00000000..5e3964ff --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile @@ -0,0 +1,9 @@ +source 'https://rubygems.org' + +group :development, :test do + gem "github-pages", group: :jekyll_plugins + gem 'jekyll', '~> 3.7.0' + gem 'jekyll-redirect-from', '~> 0.13.0' + gem 'jekyll-sitemap', '~> 1.2.0' + gem 'jekyll-toc', '~> 0.5.1' +end diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile.lock b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile.lock new file mode 100644 index 00000000..6bc4cb1f --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/Gemfile.lock @@ -0,0 +1,256 @@ +GEM + remote: https://rubygems.org/ + specs: + activesupport (4.2.9) + i18n (~> 0.7) + minitest (~> 5.1) + thread_safe (~> 0.3, >= 0.3.4) + tzinfo (~> 1.1) + addressable (2.5.2) + public_suffix (>= 2.0.2, < 4.0) + coffee-script (2.4.1) + coffee-script-source + execjs + coffee-script-source (1.11.1) + colorator (1.1.0) + commonmarker (0.17.9) + ruby-enum (~> 0.5) + concurrent-ruby (1.0.5) + em-websocket (0.5.1) + eventmachine (>= 0.12.9) + http_parser.rb (~> 0.6.0) + ethon (0.11.0) + ffi (>= 1.3.0) + eventmachine (1.2.5) + execjs (2.7.0) + faraday (0.14.0) + multipart-post (>= 1.2, < 3) + ffi (1.9.23) + forwardable-extended (2.6.0) + gemoji (3.0.0) + github-pages (182) + activesupport (= 4.2.9) + github-pages-health-check (= 1.4.0) + jekyll (= 3.7.3) + jekyll-avatar (= 0.5.0) + jekyll-coffeescript (= 1.1.1) + jekyll-commonmark-ghpages (= 0.1.5) + jekyll-default-layout (= 0.1.4) + jekyll-feed (= 0.9.3) + jekyll-gist (= 1.5.0) + jekyll-github-metadata (= 2.9.4) + jekyll-mentions (= 1.3.0) + jekyll-optional-front-matter (= 0.3.0) + jekyll-paginate (= 1.1.0) + jekyll-readme-index (= 0.2.0) + jekyll-redirect-from (= 0.13.0) + jekyll-relative-links (= 0.5.3) + jekyll-remote-theme (= 0.2.3) + jekyll-sass-converter (= 1.5.2) + jekyll-seo-tag (= 2.4.0) + jekyll-sitemap (= 1.2.0) + jekyll-swiss (= 0.4.0) + jekyll-theme-architect (= 0.1.1) + jekyll-theme-cayman (= 0.1.1) + jekyll-theme-dinky (= 0.1.1) + jekyll-theme-hacker (= 0.1.1) + jekyll-theme-leap-day (= 0.1.1) + jekyll-theme-merlot (= 0.1.1) + jekyll-theme-midnight (= 0.1.1) + jekyll-theme-minimal (= 0.1.1) + jekyll-theme-modernist (= 0.1.1) + jekyll-theme-primer (= 0.5.3) + jekyll-theme-slate (= 0.1.1) + jekyll-theme-tactile (= 0.1.1) + jekyll-theme-time-machine (= 0.1.1) + jekyll-titles-from-headings (= 0.5.1) + jemoji (= 0.9.0) + kramdown (= 1.16.2) + liquid (= 4.0.0) + listen (= 3.1.5) + mercenary (~> 0.3) + minima (= 2.4.1) + nokogiri (>= 1.8.1, < 2.0) + rouge (= 2.2.1) + terminal-table (~> 1.4) + github-pages-health-check (1.4.0) + addressable (~> 2.3) + net-dns (~> 0.8) + octokit (~> 4.0) + public_suffix (~> 2.0) + typhoeus (~> 1.3) + html-pipeline (2.7.1) + activesupport (>= 2) + nokogiri (>= 1.4) + http_parser.rb (0.6.0) + i18n (0.9.5) + concurrent-ruby (~> 1.0) + jekyll (3.7.3) + addressable (~> 2.4) + colorator (~> 1.0) + em-websocket (~> 0.5) + i18n (~> 0.7) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 2.0) + kramdown (~> 1.14) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 4) + safe_yaml (~> 1.0) + jekyll-avatar (0.5.0) + jekyll (~> 3.0) + jekyll-coffeescript (1.1.1) + coffee-script (~> 2.2) + coffee-script-source (~> 1.11.1) + jekyll-commonmark (1.2.0) + commonmarker (~> 0.14) + jekyll (>= 3.0, < 4.0) + jekyll-commonmark-ghpages (0.1.5) + commonmarker (~> 0.17.6) + jekyll-commonmark (~> 1) + rouge (~> 2) + jekyll-default-layout (0.1.4) + jekyll (~> 3.0) + jekyll-feed (0.9.3) + jekyll (~> 3.3) + jekyll-gist (1.5.0) + octokit (~> 4.2) + jekyll-github-metadata (2.9.4) + jekyll (~> 3.1) + octokit (~> 4.0, != 4.4.0) + jekyll-mentions (1.3.0) + activesupport (~> 4.0) + html-pipeline (~> 2.3) + jekyll (~> 3.0) + jekyll-optional-front-matter (0.3.0) + jekyll (~> 3.0) + jekyll-paginate (1.1.0) + jekyll-readme-index (0.2.0) + jekyll (~> 3.0) + jekyll-redirect-from (0.13.0) + jekyll (~> 3.3) + jekyll-relative-links (0.5.3) + jekyll (~> 3.3) + jekyll-remote-theme (0.2.3) + jekyll (~> 3.5) + rubyzip (>= 1.2.1, < 3.0) + typhoeus (>= 0.7, < 2.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-seo-tag (2.4.0) + jekyll (~> 3.3) + jekyll-sitemap (1.2.0) + jekyll (~> 3.3) + jekyll-swiss (0.4.0) + jekyll-theme-architect (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-cayman (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-dinky (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-hacker (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-leap-day (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-merlot (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-midnight (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-minimal (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-modernist (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-primer (0.5.3) + jekyll (~> 3.5) + jekyll-github-metadata (~> 2.9) + jekyll-seo-tag (~> 2.0) + jekyll-theme-slate (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-tactile (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-time-machine (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-titles-from-headings (0.5.1) + jekyll (~> 3.3) + jekyll-toc (0.5.2) + nokogiri (~> 1.6) + jekyll-watch (2.0.0) + listen (~> 3.0) + jemoji (0.9.0) + activesupport (~> 4.0, >= 4.2.9) + gemoji (~> 3.0) + html-pipeline (~> 2.2) + jekyll (~> 3.0) + kramdown (1.16.2) + liquid (4.0.0) + listen (3.1.5) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + ruby_dep (~> 1.2) + mercenary (0.3.6) + mini_portile2 (2.3.0) + minima (2.4.1) + jekyll (~> 3.5) + jekyll-feed (~> 0.9) + jekyll-seo-tag (~> 2.1) + minitest (5.11.3) + multipart-post (2.0.0) + net-dns (0.8.0) + nokogiri (1.8.2) + mini_portile2 (~> 2.3.0) + octokit (4.8.0) + sawyer (~> 0.8.0, >= 0.5.3) + pathutil (0.16.1) + forwardable-extended (~> 2.6) + public_suffix (2.0.5) + rb-fsevent (0.10.3) + rb-inotify (0.9.10) + ffi (>= 0.5.0, < 2) + rouge (2.2.1) + ruby-enum (0.7.2) + i18n + ruby_dep (1.5.0) + rubyzip (1.2.1) + safe_yaml (1.0.4) + sass (3.5.6) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + sawyer (0.8.1) + addressable (>= 2.3.5, < 2.6) + faraday (~> 0.8, < 1.0) + terminal-table (1.8.0) + unicode-display_width (~> 1.1, >= 1.1.1) + thread_safe (0.3.6) + typhoeus (1.3.0) + ethon (>= 0.9.0) + tzinfo (1.2.5) + thread_safe (~> 0.1) + unicode-display_width (1.3.0) + +PLATFORMS + ruby + +DEPENDENCIES + github-pages + jekyll (~> 3.7.0) + jekyll-redirect-from (~> 0.13.0) + jekyll-sitemap (~> 1.2.0) + jekyll-toc (~> 0.5.1) + +BUNDLED WITH + 1.15.4 diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_config.yaml b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_config.yaml new file mode 100644 index 00000000..e1dd48b5 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_config.yaml @@ -0,0 +1,13 @@ +theme: jekyll-theme-architect + +repository: grpc-ecosystem/grpc-gateway + +collections: + docs: + output: true + +plugins: + - jekyll-toc + +exclude: + - run.sh diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/background.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/background.md new file mode 100644 index 00000000..63463a1b --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/background.md @@ -0,0 +1,11 @@ +--- +category: documentation +--- + +# Background + +gRPC is great -- it generates API clients and server stubs in many programming languages, it is fast, easy-to-use, bandwidth-efficient and its design is combat-proven by Google. +However, you might still want to provide a traditional RESTful API as well. Reasons can range from maintaining backwards-compatibility, supporting languages or clients not well supported by gRPC to simply maintaining the aesthetics and tooling involved with a RESTful architecture. + +This project aims to provide that HTTP+JSON interface to your gRPC service. A small amount of configuration in your service to attach HTTP semantics is all that's needed to generate a reverse-proxy with this library. + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/examples.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/examples.md new file mode 100644 index 00000000..adbb49ad --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/examples.md @@ -0,0 +1,16 @@ +--- +category: documentation +--- + +# Examples + +Examples are available under `examples` directory. +* `examplepb/echo_service.proto`, `examplepb/a_bit_of_everything.proto`: service definition + * `examplepb/echo_service.pb.go`, `examplepb/a_bit_of_everything.pb.go`: [generated] stub of the service + * `examplepb/echo_service.pb.gw.go`, `examplepb/a_bit_of_everything.pb.gw.go`: [generated] reverse proxy for the service +* `server/main.go`: service implementation +* `main.go`: entrypoint of the generated reverse proxy + +To use the same port for custom HTTP handlers (e.g. serving `swagger.json`), gRPC-gateway, and a gRPC server, see [this code example by CoreOS](https://github.com/philips/grpc-gateway-example/blob/master/cmd/serve.go) (and its accompanying [blog post](https://coreos.com/blog/gRPC-protobufs-swagger.html)) + + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/features.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/features.md new file mode 100644 index 00000000..27fce3c8 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/features.md @@ -0,0 +1,29 @@ +--- +category: documentation +--- + +# Features + +## Supported +* Generating JSON API handlers +* Method parameters in request body +* Method parameters in request path +* Method parameters in query string +* Enum fields in path parameter (including repeated enum fields). +* Mapping streaming APIs to newline-delimited JSON streams +* Mapping HTTP headers with `Grpc-Metadata-` prefix to gRPC metadata (prefixed with `grpcgateway-`) +* Optionally emitting API definition for [Swagger](http://swagger.io). +* Setting [gRPC timeouts](http://www.grpc.io/docs/guides/wire.html) through inbound HTTP `Grpc-Timeout` header. + +## Want to support +But not yet. +* Optionally generating the entrypoint. #8 +* `import_path` parameter + +## No plan to support +But patch is welcome. +* Method parameters in HTTP headers +* Handling trailer metadata +* Encoding request/response body in XML +* True bi-directional streaming. (Probably impossible?) + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/usage.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/usage.md new file mode 100644 index 00000000..7ad6c261 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_docs/usage.md @@ -0,0 +1,191 @@ +--- +category: documentation +--- + +# How do I use this? + +## Installation +First you need to install ProtocolBuffers 3.0.0-beta-3 or later. + +```sh +mkdir tmp +cd tmp +git clone https://github.com/google/protobuf +cd protobuf +./autogen.sh +./configure +make +make check +sudo make install +``` + +Then, `go get -u` as usual the following packages: + +```sh +go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway +go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger +go get -u github.com/golang/protobuf/protoc-gen-go +``` + +## Usage +Make sure that your `$GOPATH/bin` is in your `$PATH`. + +1. Define your service in gRPC + + your_service.proto: + ```protobuf + syntax = "proto3"; + package example; + message StringMessage { + string value = 1; + } + + service YourService { + rpc Echo(StringMessage) returns (StringMessage) {} + } + ``` +2. Add a [custom option](https://cloud.google.com/service-management/reference/rpc/google.api#http) to the .proto file + + your_service.proto: + ```diff + syntax = "proto3"; + package example; + + + +import "google/api/annotations.proto"; + + + message StringMessage { + string value = 1; + } + + service YourService { + - rpc Echo(StringMessage) returns (StringMessage) {} + + rpc Echo(StringMessage) returns (StringMessage) { + + option (google.api.http) = { + + post: "/v1/example/echo" + + body: "*" + + }; + + } + } + ``` +3. Generate gRPC stub + + ```sh + protoc -I/usr/local/include -I. \ + -I$GOPATH/src \ + -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + --go_out=plugins=grpc:. \ + path/to/your_service.proto + ``` + + It will generate a stub file `path/to/your_service.pb.go`. +4. Implement your service in gRPC as usual + 1. (Optional) Generate gRPC stub in the language you want. + + e.g. + ```sh + protoc -I/usr/local/include -I. \ + -I$GOPATH/src \ + -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + --ruby_out=. \ + path/to/your/service_proto + + protoc -I/usr/local/include -I. \ + -I$GOPATH/src \ + -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + --plugin=protoc-gen-grpc=grpc_ruby_plugin \ + --grpc-ruby_out=. \ + path/to/your/service.proto + ``` + 2. Add the googleapis-common-protos gem (or your language equivalent) as a dependency to your project. + 3. Implement your service + +5. Generate reverse-proxy + + ```sh + protoc -I/usr/local/include -I. \ + -I$GOPATH/src \ + -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + --grpc-gateway_out=logtostderr=true:. \ + path/to/your_service.proto + ``` + + It will generate a reverse proxy `path/to/your_service.pb.gw.go`. + + Note: After generating the code for each of the stubs, in order to build the code, you will want to run ```go get .``` from the directory containing the stubs. + +6. Write an entrypoint + + Now you need to write an entrypoint of the proxy server. + ```go + package main + + import ( + "flag" + "net/http" + + "github.com/golang/glog" + "golang.org/x/net/context" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "google.golang.org/grpc" + + gw "path/to/your_service_package" + ) + + var ( + echoEndpoint = flag.String("echo_endpoint", "localhost:9090", "endpoint of YourService") + ) + + func run() error { + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + mux := runtime.NewServeMux() + opts := []grpc.DialOption{grpc.WithInsecure()} + err := gw.RegisterYourServiceHandlerFromEndpoint(ctx, mux, *echoEndpoint, opts) + if err != nil { + return err + } + + return http.ListenAndServe(":8080", mux) + } + + func main() { + flag.Parse() + defer glog.Flush() + + if err := run(); err != nil { + glog.Fatal(err) + } + } + ``` + +7. (Optional) Generate swagger definitions + + ```sh + protoc -I/usr/local/include -I. \ + -I$GOPATH/src \ + -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ + --swagger_out=logtostderr=true:. \ + path/to/your_service.proto + ``` + +## Parameters and flags +`protoc-gen-grpc-gateway` supports custom mapping from Protobuf `import` to Golang import path. +They are compatible to [the parameters with same names in `protoc-gen-go`](https://github.com/golang/protobuf#parameters). + +In addition we also support the `request_context` parameter in order to use the `http.Request`'s Context (only for Go 1.7 and above). +This parameter can be useful to pass request scoped context between the gateway and the gRPC service. + +`protoc-gen-grpc-gateway` also supports some more command line flags to control logging. You can give these flags together with parameters above. Run `protoc-gen-grpc-gateway --help` for more details about the flags. + +# Mapping gRPC to HTTP + +* [How gRPC error codes map to HTTP status codes in the response](https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go#L15) +* HTTP request source IP is added as `X-Forwarded-For` gRPC request header +* HTTP request host is added as `X-Forwarded-Host` gRPC request header +* HTTP `Authorization` header is added as `authorization` gRPC request header +* Remaining Permanent HTTP header keys (as specified by the IANA [here](http://www.iana.org/assignments/message-headers/message-headers.xhtml) are prefixed with `grpcgateway-` and added with their values to gRPC request header +* HTTP headers that start with 'Grpc-Metadata-' are mapped to gRPC metadata (prefixed with `grpcgateway-`) +* While configurable, the default {un,}marshaling uses [jsonpb](https://godoc.org/github.com/golang/protobuf/jsonpb) with `OrigName: true`. + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_layouts/default.html b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_layouts/default.html new file mode 100644 index 00000000..1e45e5fb --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/_layouts/default.html @@ -0,0 +1,92 @@ + + + + + + + + + + + + +{% seo %} + + + +
+
+ +

{{ site.title | default: site.github.repository_name }}

+
+

{{ site.description | default: site.github.project_tagline }}

+ {% if site.github.is_project_page %} + View project on GitHub + {% endif %} + {% if site.github.is_user_page %} + Follow me on GitHub + {% endif %} +
+
+ +
+
+
+ {{ content | toc }} +
+ + +
+
+ + {% if site.google_analytics %} + + {% endif %} + + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/index.md b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/index.md new file mode 100644 index 00000000..47b6ec02 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/index.md @@ -0,0 +1,27 @@ +# grpc-gateway + +[![Build Status](https://travis-ci.org/grpc-ecosystem/grpc-gateway.svg?branch=master)](https://travis-ci.org/grpc-ecosystem/grpc-gateway) + +grpc-gateway is a plugin of [protoc](http://github.com/google/protobuf). +It reads [gRPC](http://github.com/grpc/grpc-common) service definition, +and generates a reverse-proxy server which translates a RESTful JSON API into gRPC. +This server is generated according to [custom options](https://cloud.google.com/service-management/reference/rpc/google.api#http) in your gRPC definition. + +It helps you to provide your APIs in both gRPC and RESTful style at the same time. + +![architecture introduction diagram](https://docs.google.com/drawings/d/12hp4CPqrNPFhattL_cIoJptFvlAqm5wLQ0ggqI5mkCg/pub?w=749&h=370) + +To learn more about us check out our documentation on: + +* [Our background](_docs/background.md) +* [Installation and usage](_docs/usage.md) +* [Examples](_docs/examples.md) +* [Features](_docs/features.md) + + +# Contribution +See [CONTRIBUTING.md](http://github.com/grpc-ecosystem/grpc-gateway/blob/master/CONTRIBUTING.md). + +# License +grpc-gateway is licensed under the BSD 3-Clause License. +See [LICENSE.txt](https://github.com/grpc-ecosystem/grpc-gateway/blob/master/LICENSE.txt) for more details. diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/run.sh b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/run.sh new file mode 100755 index 00000000..b9b5a2c0 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/docs/run.sh @@ -0,0 +1,28 @@ +#! /bin/bash + +set -e + +JEKYLL_VERSION=3.5 +BUNDLE_DIR="/tmp/grpc-gateway-bundle" + +if [ ! -d "${BUNDLE_DIR}" ]; then + mkdir "${BUNDLE_DIR}" + + # Run this to update the Gemsfile.lock + docker run --rm \ + --volume="${PWD}:/srv/jekyll" \ + -e "JEKYLL_UID=$(id -u)" \ + -e "JEKYLL_GID=$(id -g)" \ + --volume="/tmp/grpc-gateway-bundle:/usr/local/bundle" \ + -it "jekyll/builder:${JEKYLL_VERSION}" \ + bundle update +fi + +docker run --rm \ + --volume="${PWD}:/srv/jekyll" \ + -p 35729:35729 -p 4000:4000 \ + -e "JEKYLL_UID=$(id -u)" \ + -e "JEKYLL_GID=$(id -g)" \ + --volume="/tmp/grpc-gateway-bundle:/usr/local/bundle" \ + -it "jekyll/builder:${JEKYLL_VERSION}" \ + jekyll serve diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/browser/gulpfile.js b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/browser/gulpfile.js index 3964888c..233afed4 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/browser/gulpfile.js +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/browser/gulpfile.js @@ -16,11 +16,11 @@ gulp.task('bower', function(){ }); gulp.task('server', shell.task([ - 'go build -o bin/example-server github.com/grpc-ecosystem/grpc-gateway/examples/server/cmd/example-server', + 'go build -o bin/example-server github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server', ])); gulp.task('gateway', shell.task([ - 'go build -o bin/example-gw github.com/grpc-ecosystem/grpc-gateway/examples', + 'go build -o bin/example-gw github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server', ])); gulp.task('serve-server', ['server'], function(){ @@ -32,9 +32,9 @@ gulp.task('serve-server', ['server'], function(){ gulp.task('serve-gateway', ['gateway', 'serve-server'], function(){ gprocess.start('gateway-server', 'bin/example-gw', [ - '--logtostderr', '--swagger_dir', path.join(__dirname, "../examplepb"), + '--logtostderr', '--swagger_dir', path.join(__dirname, "../proto/examplepb"), ]); - gulp.watch('bin/example-gateway', ['serve-gateway']); + gulp.watch('bin/example-gw', ['serve-gateway']); }); gulp.task('backends', ['serve-gateway', 'serve-server']); diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.gitignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.gitignore new file mode 100644 index 00000000..2f882691 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.gitignore @@ -0,0 +1 @@ +/docs diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.swagger-codegen-ignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.swagger-codegen-ignore new file mode 100644 index 00000000..6c7b69a0 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/.swagger-codegen-ignore @@ -0,0 +1 @@ +.gitignore diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingNested.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingNested.go deleted file mode 100644 index 7e5488e3..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingNested.go +++ /dev/null @@ -1,11 +0,0 @@ -package abe - -import ( -) - -type ABitOfEverythingNested struct { - Name string `json:"name,omitempty"` - Amount int64 `json:"amount,omitempty"` - Ok NestedDeepEnum `json:"ok,omitempty"` - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingServiceApi.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingServiceApi.go deleted file mode 100644 index fcc60763..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ABitOfEverythingServiceApi.go +++ /dev/null @@ -1,764 +0,0 @@ -package abe - -import ( - "strings" - "fmt" - "encoding/json" - "errors" - "github.com/dghubble/sling" - "time" -) - -type ABitOfEverythingServiceApi struct { - basePath string -} - -func NewABitOfEverythingServiceApi() *ABitOfEverythingServiceApi{ - return &ABitOfEverythingServiceApi { - basePath: "http://localhost", - } -} - -func NewABitOfEverythingServiceApiWithBasePath(basePath string) *ABitOfEverythingServiceApi{ - return &ABitOfEverythingServiceApi { - basePath: basePath, - } -} - -/** - * - * - * @param floatValue - * @param doubleValue - * @param int64Value - * @param uint64Value - * @param int32Value - * @param fixed64Value - * @param fixed32Value - * @param boolValue - * @param stringValue - * @param uint32Value - * @param sfixed32Value - * @param sfixed64Value - * @param sint32Value - * @param sint64Value - * @param nonConventionalNameValue - * @return ExamplepbABitOfEverything - */ -//func (a ABitOfEverythingServiceApi) Create (floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, uint32Value int64, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, nonConventionalNameValue string) (ExamplepbABitOfEverything, error) { -func (a ABitOfEverythingServiceApi) Create (floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, uint32Value int64, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, nonConventionalNameValue string) (ExamplepbABitOfEverything, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/{float_value}/{double_value}/{int64_value}/separator/{uint64_value}/{int32_value}/{fixed64_value}/{fixed32_value}/{bool_value}/{string_value}/{uint32_value}/{sfixed32_value}/{sfixed64_value}/{sint32_value}/{sint64_value}/{nonConventionalNameValue}" - path = strings.Replace(path, "{" + "float_value" + "}", fmt.Sprintf("%v", floatValue), -1) - path = strings.Replace(path, "{" + "double_value" + "}", fmt.Sprintf("%v", doubleValue), -1) - path = strings.Replace(path, "{" + "int64_value" + "}", fmt.Sprintf("%v", int64Value), -1) - path = strings.Replace(path, "{" + "uint64_value" + "}", fmt.Sprintf("%v", uint64Value), -1) - path = strings.Replace(path, "{" + "int32_value" + "}", fmt.Sprintf("%v", int32Value), -1) - path = strings.Replace(path, "{" + "fixed64_value" + "}", fmt.Sprintf("%v", fixed64Value), -1) - path = strings.Replace(path, "{" + "fixed32_value" + "}", fmt.Sprintf("%v", fixed32Value), -1) - path = strings.Replace(path, "{" + "bool_value" + "}", fmt.Sprintf("%v", boolValue), -1) - path = strings.Replace(path, "{" + "string_value" + "}", fmt.Sprintf("%v", stringValue), -1) - path = strings.Replace(path, "{" + "uint32_value" + "}", fmt.Sprintf("%v", uint32Value), -1) - path = strings.Replace(path, "{" + "sfixed32_value" + "}", fmt.Sprintf("%v", sfixed32Value), -1) - path = strings.Replace(path, "{" + "sfixed64_value" + "}", fmt.Sprintf("%v", sfixed64Value), -1) - path = strings.Replace(path, "{" + "sint32_value" + "}", fmt.Sprintf("%v", sint32Value), -1) - path = strings.Replace(path, "{" + "sint64_value" + "}", fmt.Sprintf("%v", sint64Value), -1) - path = strings.Replace(path, "{" + "nonConventionalNameValue" + "}", fmt.Sprintf("%v", nonConventionalNameValue), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ExamplepbABitOfEverything) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param body - * @return ExamplepbABitOfEverything - */ -//func (a ABitOfEverythingServiceApi) CreateBody (body ExamplepbABitOfEverything) (ExamplepbABitOfEverything, error) { -func (a ABitOfEverythingServiceApi) CreateBody (body ExamplepbABitOfEverything) (ExamplepbABitOfEverything, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything" - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - -// body params - _sling = _sling.BodyJSON(body) - - var successPayload = new(ExamplepbABitOfEverything) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param singleNestedName - * @param body - * @return ExamplepbABitOfEverything - */ -//func (a ABitOfEverythingServiceApi) DeepPathEcho (singleNestedName string, body ExamplepbABitOfEverything) (ExamplepbABitOfEverything, error) { -func (a ABitOfEverythingServiceApi) DeepPathEcho (singleNestedName string, body ExamplepbABitOfEverything) (ExamplepbABitOfEverything, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/{single_nested.name}" - path = strings.Replace(path, "{" + "single_nested.name" + "}", fmt.Sprintf("%v", singleNestedName), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - -// body params - _sling = _sling.BodyJSON(body) - - var successPayload = new(ExamplepbABitOfEverything) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param uuid - * @return ProtobufEmpty - */ -//func (a ABitOfEverythingServiceApi) Delete (uuid string) (ProtobufEmpty, error) { -func (a ABitOfEverythingServiceApi) Delete (uuid string) (ProtobufEmpty, error) { - - _sling := sling.New().Delete(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/{uuid}" - path = strings.Replace(path, "{" + "uuid" + "}", fmt.Sprintf("%v", uuid), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ProtobufEmpty) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param value - * @return SubStringMessage - */ -//func (a ABitOfEverythingServiceApi) Echo (value string) (SubStringMessage, error) { -func (a ABitOfEverythingServiceApi) Echo (value string) (SubStringMessage, error) { - - _sling := sling.New().Get(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/echo/{value}" - path = strings.Replace(path, "{" + "value" + "}", fmt.Sprintf("%v", value), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(SubStringMessage) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param value - * @return SubStringMessage - */ -//func (a ABitOfEverythingServiceApi) Echo_1 (value string) (SubStringMessage, error) { -func (a ABitOfEverythingServiceApi) Echo_1 (value string) (SubStringMessage, error) { - - _sling := sling.New().Get(a.basePath) - - // create path and map variables - path := "/v2/example/echo" - - _sling = _sling.Path(path) - - type QueryParams struct { - value string `url:"value,omitempty"` - -} - _sling = _sling.QueryStruct(&QueryParams{ value: value }) - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(SubStringMessage) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param body - * @return SubStringMessage - */ -//func (a ABitOfEverythingServiceApi) Echo_2 (body string) (SubStringMessage, error) { -func (a ABitOfEverythingServiceApi) Echo_2 (body string) (SubStringMessage, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v2/example/echo" - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - -// body params - _sling = _sling.BodyJSON(body) - - var successPayload = new(SubStringMessage) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param uuid - * @param singleNestedName name is nested field. - * @param singleNestedAmount - * @param singleNestedOk - FALSE: FALSE is false.\n - TRUE: TRUE is true. - * @param floatValue - * @param doubleValue - * @param int64Value - * @param uint64Value - * @param int32Value - * @param fixed64Value - * @param fixed32Value - * @param boolValue - * @param stringValue - * @param uint32Value TODO(yugui) add bytes_value. - * @param enumValue - ZERO: ZERO means 0\n - ONE: ONE means 1 - * @param sfixed32Value - * @param sfixed64Value - * @param sint32Value - * @param sint64Value - * @param repeatedStringValue - * @param oneofString - * @param nonConventionalNameValue - * @param timestampValue - * @param repeatedEnumValue repeated enum value. it is comma-separated in query.\n\n - ZERO: ZERO means 0\n - ONE: ONE means 1 - * @return ProtobufEmpty - */ -//func (a ABitOfEverythingServiceApi) GetQuery (uuid string, singleNestedName string, singleNestedAmount int64, singleNestedOk string, floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, uint32Value int64, enumValue string, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, repeatedStringValue []string, oneofString string, nonConventionalNameValue string, timestampValue time.Time, repeatedEnumValue []string) (ProtobufEmpty, error) { -func (a ABitOfEverythingServiceApi) GetQuery (uuid string, singleNestedName string, singleNestedAmount int64, singleNestedOk string, floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, uint32Value int64, enumValue string, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, repeatedStringValue []string, oneofString string, nonConventionalNameValue string, timestampValue time.Time, repeatedEnumValue []string) (ProtobufEmpty, error) { - - _sling := sling.New().Get(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/query/{uuid}" - path = strings.Replace(path, "{" + "uuid" + "}", fmt.Sprintf("%v", uuid), -1) - - _sling = _sling.Path(path) - - type QueryParams struct { - singleNestedName string `url:"single_nested.name,omitempty"` - singleNestedAmount int64 `url:"single_nested.amount,omitempty"` - singleNestedOk string `url:"single_nested.ok,omitempty"` - floatValue float32 `url:"float_value,omitempty"` - doubleValue float64 `url:"double_value,omitempty"` - int64Value string `url:"int64_value,omitempty"` - uint64Value string `url:"uint64_value,omitempty"` - int32Value int32 `url:"int32_value,omitempty"` - fixed64Value string `url:"fixed64_value,omitempty"` - fixed32Value int64 `url:"fixed32_value,omitempty"` - boolValue bool `url:"bool_value,omitempty"` - stringValue string `url:"string_value,omitempty"` - uint32Value int64 `url:"uint32_value,omitempty"` - enumValue string `url:"enum_value,omitempty"` - sfixed32Value int32 `url:"sfixed32_value,omitempty"` - sfixed64Value string `url:"sfixed64_value,omitempty"` - sint32Value int32 `url:"sint32_value,omitempty"` - sint64Value string `url:"sint64_value,omitempty"` - repeatedStringValue []string `url:"repeated_string_value,omitempty"` - oneofString string `url:"oneof_string,omitempty"` - nonConventionalNameValue string `url:"nonConventionalNameValue,omitempty"` - timestampValue time.Time `url:"timestamp_value,omitempty"` - repeatedEnumValue []string `url:"repeated_enum_value,omitempty"` - -} - _sling = _sling.QueryStruct(&QueryParams{ singleNestedName: singleNestedName,singleNestedAmount: singleNestedAmount,singleNestedOk: singleNestedOk,floatValue: floatValue,doubleValue: doubleValue,int64Value: int64Value,uint64Value: uint64Value,int32Value: int32Value,fixed64Value: fixed64Value,fixed32Value: fixed32Value,boolValue: boolValue,stringValue: stringValue,uint32Value: uint32Value,enumValue: enumValue,sfixed32Value: sfixed32Value,sfixed64Value: sfixed64Value,sint32Value: sint32Value,sint64Value: sint64Value,repeatedStringValue: repeatedStringValue,oneofString: oneofString,nonConventionalNameValue: nonConventionalNameValue,timestampValue: timestampValue,repeatedEnumValue: repeatedEnumValue }) - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ProtobufEmpty) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param uuid - * @return ExamplepbABitOfEverything - */ -//func (a ABitOfEverythingServiceApi) Lookup (uuid string) (ExamplepbABitOfEverything, error) { -func (a ABitOfEverythingServiceApi) Lookup (uuid string) (ExamplepbABitOfEverything, error) { - - _sling := sling.New().Get(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/{uuid}" - path = strings.Replace(path, "{" + "uuid" + "}", fmt.Sprintf("%v", uuid), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ExamplepbABitOfEverything) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @return ProtobufEmpty - */ -//func (a ABitOfEverythingServiceApi) Timeout () (ProtobufEmpty, error) { -func (a ABitOfEverythingServiceApi) Timeout () (ProtobufEmpty, error) { - - _sling := sling.New().Get(a.basePath) - - // create path and map variables - path := "/v2/example/timeout" - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ProtobufEmpty) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * - * - * @param uuid - * @param body - * @return ProtobufEmpty - */ -//func (a ABitOfEverythingServiceApi) Update (uuid string, body ExamplepbABitOfEverything) (ProtobufEmpty, error) { -func (a ABitOfEverythingServiceApi) Update (uuid string, body ExamplepbABitOfEverything) (ProtobufEmpty, error) { - - _sling := sling.New().Put(a.basePath) - - // create path and map variables - path := "/v1/example/a_bit_of_everything/{uuid}" - path = strings.Replace(path, "{" + "uuid" + "}", fmt.Sprintf("%v", uuid), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - -// body params - _sling = _sling.BodyJSON(body) - - var successPayload = new(ProtobufEmpty) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/BUILD.bazel new file mode 100644 index 00000000..fad5b661 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/BUILD.bazel @@ -0,0 +1,25 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +package(default_visibility = ["//visibility:public"]) + +go_library( + name = "go_default_library", + srcs = [ + "a_bit_of_everything_nested.go", + "a_bit_of_everything_service_api.go", + "api_client.go", + "api_response.go", + "camel_case_service_name_api.go", + "configuration.go", + "echo_rpc_api.go", + "echo_service_api.go", + "examplepb_a_bit_of_everything.go", + "examplepb_body.go", + "examplepb_numeric_enum.go", + "nested_deep_enum.go", + "protobuf_empty.go", + "sub_string_message.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe", + deps = ["@com_github_go_resty_resty//:go_default_library"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbABitOfEverything.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbABitOfEverything.go deleted file mode 100644 index 2892eb62..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbABitOfEverything.go +++ /dev/null @@ -1,36 +0,0 @@ -package abe - -import ( - "time" -) - -type ExamplepbABitOfEverything struct { - SingleNested ABitOfEverythingNested `json:"single_nested,omitempty"` - Uuid string `json:"uuid,omitempty"` - Nested []ABitOfEverythingNested `json:"nested,omitempty"` - FloatValue float32 `json:"float_value,omitempty"` - DoubleValue float64 `json:"double_value,omitempty"` - Int64Value string `json:"int64_value,omitempty"` - Uint64Value string `json:"uint64_value,omitempty"` - Int32Value int32 `json:"int32_value,omitempty"` - Fixed64Value string `json:"fixed64_value,omitempty"` - Fixed32Value int64 `json:"fixed32_value,omitempty"` - BoolValue bool `json:"bool_value,omitempty"` - StringValue string `json:"string_value,omitempty"` - Uint32Value int64 `json:"uint32_value,omitempty"` - EnumValue ExamplepbNumericEnum `json:"enum_value,omitempty"` - Sfixed32Value int32 `json:"sfixed32_value,omitempty"` - Sfixed64Value string `json:"sfixed64_value,omitempty"` - Sint32Value int32 `json:"sint32_value,omitempty"` - Sint64Value string `json:"sint64_value,omitempty"` - RepeatedStringValue []string `json:"repeated_string_value,omitempty"` - OneofEmpty ProtobufEmpty `json:"oneof_empty,omitempty"` - OneofString string `json:"oneof_string,omitempty"` - MapValue map[string]ExamplepbNumericEnum `json:"map_value,omitempty"` - MappedStringValue map[string]string `json:"mapped_string_value,omitempty"` - MappedNestedValue map[string]ABitOfEverythingNested `json:"mapped_nested_value,omitempty"` - NonConventionalNameValue string `json:"nonConventionalNameValue,omitempty"` - TimestampValue time.Time `json:"timestamp_value,omitempty"` - RepeatedEnumValue []ExamplepbNumericEnum `json:"repeated_enum_value,omitempty"` - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbNumericEnum.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbNumericEnum.go deleted file mode 100644 index 51cffc77..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ExamplepbNumericEnum.go +++ /dev/null @@ -1,8 +0,0 @@ -package abe - -import ( -) - -type ExamplepbNumericEnum struct { - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/NestedDeepEnum.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/NestedDeepEnum.go deleted file mode 100644 index b48e2312..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/NestedDeepEnum.go +++ /dev/null @@ -1,8 +0,0 @@ -package abe - -import ( -) - -type NestedDeepEnum struct { - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufDuration.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufDuration.go deleted file mode 100644 index 837f298b..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufDuration.go +++ /dev/null @@ -1,10 +0,0 @@ -package abe - -import ( -) - -type ProtobufDuration struct { - Seconds string `json:"seconds,omitempty"` - Nanos int32 `json:"nanos,omitempty"` - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufEmpty.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufEmpty.go deleted file mode 100644 index ac37afeb..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/ProtobufEmpty.go +++ /dev/null @@ -1,8 +0,0 @@ -package abe - -import ( -) - -type ProtobufEmpty struct { - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/SubStringMessage.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/SubStringMessage.go deleted file mode 100644 index f278dbf2..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/SubStringMessage.go +++ /dev/null @@ -1,9 +0,0 @@ -package abe - -import ( -) - -type SubStringMessage struct { - Value string `json:"value,omitempty"` - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_nested.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_nested.go new file mode 100644 index 00000000..095e8c5f --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_nested.go @@ -0,0 +1,22 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +// Nested is nested type. +type ABitOfEverythingNested struct { + + // name is nested field. + Name string `json:"name,omitempty"` + + Amount int64 `json:"amount,omitempty"` + + Ok NestedDeepEnum `json:"ok,omitempty"` +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_service_api.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_service_api.go new file mode 100644 index 00000000..19c6fdaa --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/a_bit_of_everything_service_api.go @@ -0,0 +1,938 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "net/url" + "strings" + "time" + "encoding/json" + "fmt" +) + +type ABitOfEverythingServiceApi struct { + Configuration *Configuration +} + +func NewABitOfEverythingServiceApi() *ABitOfEverythingServiceApi { + configuration := NewConfiguration() + return &ABitOfEverythingServiceApi{ + Configuration: configuration, + } +} + +func NewABitOfEverythingServiceApiWithBasePath(basePath string) *ABitOfEverythingServiceApi { + configuration := NewConfiguration() + configuration.BasePath = basePath + + return &ABitOfEverythingServiceApi{ + Configuration: configuration, + } +} + +/** + * + * + * @param floatValue + * @param doubleValue + * @param int64Value + * @param uint64Value + * @param int32Value + * @param fixed64Value + * @param fixed32Value + * @param boolValue + * @param stringValue + * @param uint32Value + * @param sfixed32Value + * @param sfixed64Value + * @param sint32Value + * @param sint64Value + * @param nonConventionalNameValue + * @return *ExamplepbABitOfEverything + */ +func (a ABitOfEverythingServiceApi) Create(floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, uint32Value int64, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, nonConventionalNameValue string) (*ExamplepbABitOfEverything, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/{float_value}/{double_value}/{int64_value}/separator/{uint64_value}/{int32_value}/{fixed64_value}/{fixed32_value}/{bool_value}/{string_value}/{uint32_value}/{sfixed32_value}/{sfixed64_value}/{sint32_value}/{sint64_value}/{nonConventionalNameValue}" + localVarPath = strings.Replace(localVarPath, "{"+"float_value"+"}", fmt.Sprintf("%v", floatValue), -1) + localVarPath = strings.Replace(localVarPath, "{"+"double_value"+"}", fmt.Sprintf("%v", doubleValue), -1) + localVarPath = strings.Replace(localVarPath, "{"+"int64_value"+"}", fmt.Sprintf("%v", int64Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"uint64_value"+"}", fmt.Sprintf("%v", uint64Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"int32_value"+"}", fmt.Sprintf("%v", int32Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"fixed64_value"+"}", fmt.Sprintf("%v", fixed64Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"fixed32_value"+"}", fmt.Sprintf("%v", fixed32Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"bool_value"+"}", fmt.Sprintf("%v", boolValue), -1) + localVarPath = strings.Replace(localVarPath, "{"+"string_value"+"}", fmt.Sprintf("%v", stringValue), -1) + localVarPath = strings.Replace(localVarPath, "{"+"uint32_value"+"}", fmt.Sprintf("%v", uint32Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"sfixed32_value"+"}", fmt.Sprintf("%v", sfixed32Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"sfixed64_value"+"}", fmt.Sprintf("%v", sfixed64Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"sint32_value"+"}", fmt.Sprintf("%v", sint32Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"sint64_value"+"}", fmt.Sprintf("%v", sint64Value), -1) + localVarPath = strings.Replace(localVarPath, "{"+"nonConventionalNameValue"+"}", fmt.Sprintf("%v", nonConventionalNameValue), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ExamplepbABitOfEverything) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Create", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param body + * @return *ExamplepbABitOfEverything + */ +func (a ABitOfEverythingServiceApi) CreateBody(body ExamplepbABitOfEverything) (*ExamplepbABitOfEverything, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ExamplepbABitOfEverything) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "CreateBody", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param singleNestedName + * @param body + * @return *ExamplepbABitOfEverything + */ +func (a ABitOfEverythingServiceApi) DeepPathEcho(singleNestedName string, body ExamplepbABitOfEverything) (*ExamplepbABitOfEverything, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/{single_nested.name}" + localVarPath = strings.Replace(localVarPath, "{"+"single_nested.name"+"}", fmt.Sprintf("%v", singleNestedName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ExamplepbABitOfEverything) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "DeepPathEcho", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param uuid + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) Delete(uuid string) (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Delete") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/{uuid}" + localVarPath = strings.Replace(localVarPath, "{"+"uuid"+"}", fmt.Sprintf("%v", uuid), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Delete", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) ErrorWithDetails() (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/errorwithdetails" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "ErrorWithDetails", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param id + * @param body + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) GetMessageWithBody(id string, body ExamplepbBody) (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/withbody/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "GetMessageWithBody", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param uuid + * @param singleNestedName name is nested field. + * @param singleNestedAmount + * @param singleNestedOk - FALSE: FALSE is false. - TRUE: TRUE is true. + * @param floatValue + * @param doubleValue + * @param int64Value + * @param uint64Value + * @param int32Value + * @param fixed64Value + * @param fixed32Value + * @param boolValue + * @param stringValue + * @param bytesValue + * @param uint32Value + * @param enumValue - ZERO: ZERO means 0 - ONE: ONE means 1 + * @param sfixed32Value + * @param sfixed64Value + * @param sint32Value + * @param sint64Value + * @param repeatedStringValue + * @param oneofString + * @param nonConventionalNameValue + * @param timestampValue + * @param repeatedEnumValue repeated enum value. it is comma-separated in query. - ZERO: ZERO means 0 - ONE: ONE means 1 + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) GetQuery(uuid string, singleNestedName string, singleNestedAmount int64, singleNestedOk string, floatValue float32, doubleValue float64, int64Value string, uint64Value string, int32Value int32, fixed64Value string, fixed32Value int64, boolValue bool, stringValue string, bytesValue string, uint32Value int64, enumValue string, sfixed32Value int32, sfixed64Value string, sint32Value int32, sint64Value string, repeatedStringValue []string, oneofString string, nonConventionalNameValue string, timestampValue time.Time, repeatedEnumValue []string) (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/query/{uuid}" + localVarPath = strings.Replace(localVarPath, "{"+"uuid"+"}", fmt.Sprintf("%v", uuid), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + localVarQueryParams.Add("single_nested.name", a.Configuration.APIClient.ParameterToString(singleNestedName, "")) + localVarQueryParams.Add("single_nested.amount", a.Configuration.APIClient.ParameterToString(singleNestedAmount, "")) + localVarQueryParams.Add("single_nested.ok", a.Configuration.APIClient.ParameterToString(singleNestedOk, "")) + localVarQueryParams.Add("float_value", a.Configuration.APIClient.ParameterToString(floatValue, "")) + localVarQueryParams.Add("double_value", a.Configuration.APIClient.ParameterToString(doubleValue, "")) + localVarQueryParams.Add("int64_value", a.Configuration.APIClient.ParameterToString(int64Value, "")) + localVarQueryParams.Add("uint64_value", a.Configuration.APIClient.ParameterToString(uint64Value, "")) + localVarQueryParams.Add("int32_value", a.Configuration.APIClient.ParameterToString(int32Value, "")) + localVarQueryParams.Add("fixed64_value", a.Configuration.APIClient.ParameterToString(fixed64Value, "")) + localVarQueryParams.Add("fixed32_value", a.Configuration.APIClient.ParameterToString(fixed32Value, "")) + localVarQueryParams.Add("bool_value", a.Configuration.APIClient.ParameterToString(boolValue, "")) + localVarQueryParams.Add("string_value", a.Configuration.APIClient.ParameterToString(stringValue, "")) + localVarQueryParams.Add("bytes_value", a.Configuration.APIClient.ParameterToString(bytesValue, "")) + localVarQueryParams.Add("uint32_value", a.Configuration.APIClient.ParameterToString(uint32Value, "")) + localVarQueryParams.Add("enum_value", a.Configuration.APIClient.ParameterToString(enumValue, "")) + localVarQueryParams.Add("sfixed32_value", a.Configuration.APIClient.ParameterToString(sfixed32Value, "")) + localVarQueryParams.Add("sfixed64_value", a.Configuration.APIClient.ParameterToString(sfixed64Value, "")) + localVarQueryParams.Add("sint32_value", a.Configuration.APIClient.ParameterToString(sint32Value, "")) + localVarQueryParams.Add("sint64_value", a.Configuration.APIClient.ParameterToString(sint64Value, "")) + var repeatedStringValueCollectionFormat = "csv" + localVarQueryParams.Add("repeated_string_value", a.Configuration.APIClient.ParameterToString(repeatedStringValue, repeatedStringValueCollectionFormat)) + + localVarQueryParams.Add("oneof_string", a.Configuration.APIClient.ParameterToString(oneofString, "")) + localVarQueryParams.Add("nonConventionalNameValue", a.Configuration.APIClient.ParameterToString(nonConventionalNameValue, "")) + localVarQueryParams.Add("timestamp_value", a.Configuration.APIClient.ParameterToString(timestampValue, "")) + var repeatedEnumValueCollectionFormat = "csv" + localVarQueryParams.Add("repeated_enum_value", a.Configuration.APIClient.ParameterToString(repeatedEnumValue, repeatedEnumValueCollectionFormat)) + + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "GetQuery", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param uuid + * @return *ExamplepbABitOfEverything + */ +func (a ABitOfEverythingServiceApi) Lookup(uuid string) (*ExamplepbABitOfEverything, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/{uuid}" + localVarPath = strings.Replace(localVarPath, "{"+"uuid"+"}", fmt.Sprintf("%v", uuid), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ExamplepbABitOfEverything) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Lookup", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param name + * @param body + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) PostWithEmptyBody(name string, body ExamplepbBody) (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/postwithemptybody/{name}" + localVarPath = strings.Replace(localVarPath, "{"+"name"+"}", fmt.Sprintf("%v", name), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "PostWithEmptyBody", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) Timeout() (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/timeout" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Timeout", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * + * + * @param uuid + * @param body + * @return *ProtobufEmpty + */ +func (a ABitOfEverythingServiceApi) Update(uuid string, body ExamplepbABitOfEverything) (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Put") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/{uuid}" + localVarPath = strings.Replace(localVarPath, "{"+"uuid"+"}", fmt.Sprintf("%v", uuid), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Update", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_client.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_client.go new file mode 100644 index 00000000..bf3e21a9 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_client.go @@ -0,0 +1,164 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "bytes" + "fmt" + "path/filepath" + "reflect" + "strings" + "net/url" + "io/ioutil" + "github.com/go-resty/resty" +) + +type APIClient struct { + config *Configuration +} + +func (c *APIClient) SelectHeaderContentType(contentTypes []string) string { + + if len(contentTypes) == 0 { + return "" + } + if contains(contentTypes, "application/json") { + return "application/json" + } + return contentTypes[0] // use the first content type specified in 'consumes' +} + +func (c *APIClient) SelectHeaderAccept(accepts []string) string { + + if len(accepts) == 0 { + return "" + } + if contains(accepts, "application/json") { + return "application/json" + } + return strings.Join(accepts, ",") +} + +func contains(haystack []string, needle string) bool { + for _, a := range haystack { + if strings.ToLower(a) == strings.ToLower(needle) { + return true + } + } + return false +} + +func (c *APIClient) CallAPI(path string, method string, + postBody interface{}, + headerParams map[string]string, + queryParams url.Values, + formParams map[string]string, + fileName string, + fileBytes []byte) (*resty.Response, error) { + + rClient := c.prepareClient() + request := c.prepareRequest(rClient, postBody, headerParams, queryParams, formParams, fileName, fileBytes) + + switch strings.ToUpper(method) { + case "GET": + response, err := request.Get(path) + return response, err + case "POST": + response, err := request.Post(path) + return response, err + case "PUT": + response, err := request.Put(path) + return response, err + case "PATCH": + response, err := request.Patch(path) + return response, err + case "DELETE": + response, err := request.Delete(path) + return response, err + } + + return nil, fmt.Errorf("invalid method %v", method) +} + +func (c *APIClient) ParameterToString(obj interface{}, collectionFormat string) string { + delimiter := "" + switch collectionFormat { + case "pipes": + delimiter = "|" + case "ssv": + delimiter = " " + case "tsv": + delimiter = "\t" + case "csv": + delimiter = "," + } + + if reflect.TypeOf(obj).Kind() == reflect.Slice { + return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") + } + + return fmt.Sprintf("%v", obj) +} + +func (c *APIClient) prepareClient() *resty.Client { + + rClient := resty.New() + + rClient.SetDebug(c.config.Debug) + if c.config.Transport != nil { + rClient.SetTransport(c.config.Transport) + } + + if c.config.Timeout != nil { + rClient.SetTimeout(*c.config.Timeout) + } + rClient.SetLogger(ioutil.Discard) + return rClient +} + +func (c *APIClient) prepareRequest( + rClient *resty.Client, + postBody interface{}, + headerParams map[string]string, + queryParams url.Values, + formParams map[string]string, + fileName string, + fileBytes []byte) *resty.Request { + + + request := rClient.R() + request.SetBody(postBody) + + if c.config.UserAgent != "" { + request.SetHeader("User-Agent", c.config.UserAgent) + } + + // add header parameter, if any + if len(headerParams) > 0 { + request.SetHeaders(headerParams) + } + + // add query parameter, if any + if len(queryParams) > 0 { + request.SetMultiValueQueryParams(queryParams) + } + + // add form parameter, if any + if len(formParams) > 0 { + request.SetFormData(formParams) + } + + if len(fileBytes) > 0 && fileName != "" { + _, fileNm := filepath.Split(fileName) + request.SetFileReader("file", fileNm, bytes.NewReader(fileBytes)) + } + return request +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_response.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_response.go new file mode 100644 index 00000000..ee1315f5 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/api_response.go @@ -0,0 +1,44 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "net/http" +) + +type APIResponse struct { + *http.Response `json:"-"` + Message string `json:"message,omitempty"` + // Operation is the name of the swagger operation. + Operation string `json:"operation,omitempty"` + // RequestURL is the request URL. This value is always available, even if the + // embedded *http.Response is nil. + RequestURL string `json:"url,omitempty"` + // Method is the HTTP method used for the request. This value is always + // available, even if the embedded *http.Response is nil. + Method string `json:"method,omitempty"` + // Payload holds the contents of the response body (which may be nil or empty). + // This is provided here as the raw response.Body() reader will have already + // been drained. + Payload []byte `json:"-"` +} + +func NewAPIResponse(r *http.Response) *APIResponse { + + response := &APIResponse{Response: r} + return response +} + +func NewAPIResponseWithError(errorMessage string) *APIResponse { + + response := &APIResponse{Message: errorMessage} + return response +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/camel_case_service_name_api.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/camel_case_service_name_api.go new file mode 100644 index 00000000..43c92f74 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/camel_case_service_name_api.go @@ -0,0 +1,110 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "net/url" + "strings" + "encoding/json" +) + +type CamelCaseServiceNameApi struct { + Configuration *Configuration +} + +func NewCamelCaseServiceNameApi() *CamelCaseServiceNameApi { + configuration := NewConfiguration() + return &CamelCaseServiceNameApi{ + Configuration: configuration, + } +} + +func NewCamelCaseServiceNameApiWithBasePath(basePath string) *CamelCaseServiceNameApi { + configuration := NewConfiguration() + configuration.BasePath = basePath + + return &CamelCaseServiceNameApi{ + Configuration: configuration, + } +} + +/** + * + * + * @return *ProtobufEmpty + */ +func (a CamelCaseServiceNameApi) Empty() (*ProtobufEmpty, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/empty" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ProtobufEmpty) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Empty", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/configuration.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/configuration.go new file mode 100644 index 00000000..ccc319c3 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/configuration.go @@ -0,0 +1,67 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "encoding/base64" + "net/http" + "time" +) + + +type Configuration struct { + Username string `json:"userName,omitempty"` + Password string `json:"password,omitempty"` + APIKeyPrefix map[string]string `json:"APIKeyPrefix,omitempty"` + APIKey map[string]string `json:"APIKey,omitempty"` + Debug bool `json:"debug,omitempty"` + DebugFile string `json:"debugFile,omitempty"` + OAuthToken string `json:"oAuthToken,omitempty"` + BasePath string `json:"basePath,omitempty"` + Host string `json:"host,omitempty"` + Scheme string `json:"scheme,omitempty"` + AccessToken string `json:"accessToken,omitempty"` + DefaultHeader map[string]string `json:"defaultHeader,omitempty"` + UserAgent string `json:"userAgent,omitempty"` + APIClient *APIClient + Transport *http.Transport + Timeout *time.Duration `json:"timeout,omitempty"` +} + +func NewConfiguration() *Configuration { + cfg := &Configuration{ + BasePath: "http://localhost", + DefaultHeader: make(map[string]string), + APIKey: make(map[string]string), + APIKeyPrefix: make(map[string]string), + UserAgent: "Swagger-Codegen/1.0.0/go", + APIClient: &APIClient{}, + } + + cfg.APIClient.config = cfg + return cfg +} + +func (c *Configuration) GetBasicAuthEncodedString() string { + return base64.StdEncoding.EncodeToString([]byte(c.Username + ":" + c.Password)) +} + +func (c *Configuration) AddDefaultHeader(key string, value string) { + c.DefaultHeader[key] = value +} + +func (c *Configuration) GetAPIKeyWithPrefix(APIKeyIdentifier string) string { + if c.APIKeyPrefix[APIKeyIdentifier] != "" { + return c.APIKeyPrefix[APIKeyIdentifier] + " " + c.APIKey[APIKeyIdentifier] + } + + return c.APIKey[APIKeyIdentifier] +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_rpc_api.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_rpc_api.go new file mode 100644 index 00000000..8afbbdd4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_rpc_api.go @@ -0,0 +1,265 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "net/url" + "strings" + "encoding/json" + "fmt" +) + +type EchoRpcApi struct { + Configuration *Configuration +} + +func NewEchoRpcApi() *EchoRpcApi { + configuration := NewConfiguration() + return &EchoRpcApi{ + Configuration: configuration, + } +} + +func NewEchoRpcApiWithBasePath(basePath string) *EchoRpcApi { + configuration := NewConfiguration() + configuration.BasePath = basePath + + return &EchoRpcApi{ + Configuration: configuration, + } +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param value + * @return *SubStringMessage + */ +func (a EchoRpcApi) Echo(value string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/echo/{value}" + localVarPath = strings.Replace(localVarPath, "{"+"value"+"}", fmt.Sprintf("%v", value), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param body + * @return *SubStringMessage + */ +func (a EchoRpcApi) Echo2(body string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/echo" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo2", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param value + * @return *SubStringMessage + */ +func (a EchoRpcApi) Echo3(value string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/echo" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + localVarQueryParams.Add("value", a.Configuration.APIClient.ParameterToString(value, "")) + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo3", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_service_api.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_service_api.go new file mode 100644 index 00000000..56817771 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/echo_service_api.go @@ -0,0 +1,265 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "net/url" + "strings" + "encoding/json" + "fmt" +) + +type EchoServiceApi struct { + Configuration *Configuration +} + +func NewEchoServiceApi() *EchoServiceApi { + configuration := NewConfiguration() + return &EchoServiceApi{ + Configuration: configuration, + } +} + +func NewEchoServiceApiWithBasePath(basePath string) *EchoServiceApi { + configuration := NewConfiguration() + configuration.BasePath = basePath + + return &EchoServiceApi{ + Configuration: configuration, + } +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param value + * @return *SubStringMessage + */ +func (a EchoServiceApi) Echo(value string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/a_bit_of_everything/echo/{value}" + localVarPath = strings.Replace(localVarPath, "{"+"value"+"}", fmt.Sprintf("%v", value), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param body + * @return *SubStringMessage + */ +func (a EchoServiceApi) Echo2(body string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/echo" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo2", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * Summary: Echo rpc + * Description Echo + * + * @param value + * @return *SubStringMessage + */ +func (a EchoServiceApi) Echo3(value string) (*SubStringMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v2/example/echo" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // authentication '(OAuth2)' required + // oauth required + if a.Configuration.AccessToken != ""{ + localVarHeaderParams["Authorization"] = "Bearer " + a.Configuration.AccessToken + } + // authentication '(BasicAuth)' required + // http basic authentication required + if a.Configuration.Username != "" || a.Configuration.Password != ""{ + localVarHeaderParams["Authorization"] = "Basic " + a.Configuration.GetBasicAuthEncodedString() + } + // authentication '(ApiKeyAuth)' required + // set key with prefix in header + localVarHeaderParams["X-API-Key"] = a.Configuration.GetAPIKeyWithPrefix("X-API-Key") + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + localVarQueryParams.Add("value", a.Configuration.APIClient.ParameterToString(value, "")) + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", "application/x-foo-mime", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + "application/x-foo-mime", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(SubStringMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo3", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_a_bit_of_everything.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_a_bit_of_everything.go new file mode 100644 index 00000000..cb6fb3a4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_a_bit_of_everything.go @@ -0,0 +1,74 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +import ( + "time" +) + +type ExamplepbABitOfEverything struct { + + SingleNested ABitOfEverythingNested `json:"single_nested,omitempty"` + + Uuid string `json:"uuid,omitempty"` + + Nested []ABitOfEverythingNested `json:"nested,omitempty"` + + FloatValue float32 `json:"float_value,omitempty"` + + DoubleValue float64 `json:"double_value,omitempty"` + + Int64Value string `json:"int64_value,omitempty"` + + Uint64Value string `json:"uint64_value,omitempty"` + + Int32Value int32 `json:"int32_value,omitempty"` + + Fixed64Value string `json:"fixed64_value,omitempty"` + + Fixed32Value int64 `json:"fixed32_value,omitempty"` + + BoolValue bool `json:"bool_value,omitempty"` + + StringValue string `json:"string_value,omitempty"` + + BytesValue string `json:"bytes_value,omitempty"` + + Uint32Value int64 `json:"uint32_value,omitempty"` + + EnumValue ExamplepbNumericEnum `json:"enum_value,omitempty"` + + Sfixed32Value int32 `json:"sfixed32_value,omitempty"` + + Sfixed64Value string `json:"sfixed64_value,omitempty"` + + Sint32Value int32 `json:"sint32_value,omitempty"` + + Sint64Value string `json:"sint64_value,omitempty"` + + RepeatedStringValue []string `json:"repeated_string_value,omitempty"` + + OneofEmpty ProtobufEmpty `json:"oneof_empty,omitempty"` + + OneofString string `json:"oneof_string,omitempty"` + + MapValue map[string]ExamplepbNumericEnum `json:"map_value,omitempty"` + + MappedStringValue map[string]string `json:"mapped_string_value,omitempty"` + + MappedNestedValue map[string]ABitOfEverythingNested `json:"mapped_nested_value,omitempty"` + + NonConventionalNameValue string `json:"nonConventionalNameValue,omitempty"` + + TimestampValue time.Time `json:"timestamp_value,omitempty"` + + RepeatedEnumValue []ExamplepbNumericEnum `json:"repeated_enum_value,omitempty"` +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_body.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_body.go new file mode 100644 index 00000000..13f4c2dc --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_body.go @@ -0,0 +1,16 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +type ExamplepbBody struct { + + Name string `json:"name,omitempty"` +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_numeric_enum.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_numeric_enum.go new file mode 100644 index 00000000..e953bbe3 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/examplepb_numeric_enum.go @@ -0,0 +1,15 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +// NumericEnum is one or zero. - ZERO: ZERO means 0 - ONE: ONE means 1 +type ExamplepbNumericEnum struct { +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/nested_deep_enum.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/nested_deep_enum.go new file mode 100644 index 00000000..e5fc17d5 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/nested_deep_enum.go @@ -0,0 +1,15 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +// DeepEnum is one or zero. - FALSE: FALSE is false. - TRUE: TRUE is true. +type NestedDeepEnum struct { +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/protobuf_empty.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/protobuf_empty.go new file mode 100644 index 00000000..97c7bf61 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/protobuf_empty.go @@ -0,0 +1,15 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +// service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`. +type ProtobufEmpty struct { +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/sub_string_message.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/sub_string_message.go new file mode 100644 index 00000000..2a0874fc --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/abe/sub_string_message.go @@ -0,0 +1,16 @@ +/* + * A Bit of Everything + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * OpenAPI spec version: 1.0 + * Contact: none@example.com + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package abe + +type SubStringMessage struct { + + Value string `json:"value,omitempty"` +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.gitignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.gitignore new file mode 100644 index 00000000..2f882691 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.gitignore @@ -0,0 +1 @@ +/docs diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.swagger-codegen-ignore b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.swagger-codegen-ignore new file mode 100644 index 00000000..6c7b69a0 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/.swagger-codegen-ignore @@ -0,0 +1 @@ +.gitignore diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/BUILD.bazel new file mode 100644 index 00000000..99243f3c --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/BUILD.bazel @@ -0,0 +1,16 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +package(default_visibility = ["//visibility:public"]) + +go_library( + name = "go_default_library", + srcs = [ + "api_client.go", + "api_response.go", + "configuration.go", + "echo_service_api.go", + "examplepb_simple_message.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo", + deps = ["@com_github_go_resty_resty//:go_default_library"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/EchoServiceApi.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/EchoServiceApi.go deleted file mode 100644 index 0cf5b582..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/EchoServiceApi.go +++ /dev/null @@ -1,145 +0,0 @@ -package echo - -import ( - "strings" - "fmt" - "encoding/json" - "errors" - "github.com/dghubble/sling" -) - -type EchoServiceApi struct { - basePath string -} - -func NewEchoServiceApi() *EchoServiceApi{ - return &EchoServiceApi { - basePath: "http://localhost", - } -} - -func NewEchoServiceApiWithBasePath(basePath string) *EchoServiceApi{ - return &EchoServiceApi { - basePath: basePath, - } -} - -/** - * Echo method receives a simple message and returns it. - * The message posted as the id parameter will also be\nreturned. - * @param id - * @return ExamplepbSimpleMessage - */ -//func (a EchoServiceApi) Echo (id string) (ExamplepbSimpleMessage, error) { -func (a EchoServiceApi) Echo (id string) (ExamplepbSimpleMessage, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v1/example/echo/{id}" - path = strings.Replace(path, "{" + "id" + "}", fmt.Sprintf("%v", id), -1) - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - - - var successPayload = new(ExamplepbSimpleMessage) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} -/** - * EchoBody method receives a simple message and returns it. - * - * @param body - * @return ExamplepbSimpleMessage - */ -//func (a EchoServiceApi) EchoBody (body ExamplepbSimpleMessage) (ExamplepbSimpleMessage, error) { -func (a EchoServiceApi) EchoBody (body ExamplepbSimpleMessage) (ExamplepbSimpleMessage, error) { - - _sling := sling.New().Post(a.basePath) - - // create path and map variables - path := "/v1/example/echo_body" - - _sling = _sling.Path(path) - - // accept header - accepts := []string { "application/json" } - for key := range accepts { - _sling = _sling.Set("Accept", accepts[key]) - break // only use the first Accept - } - -// body params - _sling = _sling.BodyJSON(body) - - var successPayload = new(ExamplepbSimpleMessage) - - // We use this map (below) so that any arbitrary error JSON can be handled. - // FIXME: This is in the absence of this Go generator honoring the non-2xx - // response (error) models, which needs to be implemented at some point. - var failurePayload map[string]interface{} - - httpResponse, err := _sling.Receive(successPayload, &failurePayload) - - if err == nil { - // err == nil only means that there wasn't a sub-application-layer error (e.g. no network error) - if failurePayload != nil { - // If the failurePayload is present, there likely was some kind of non-2xx status - // returned (and a JSON payload error present) - var str []byte - str, err = json.Marshal(failurePayload) - if err == nil { // For safety, check for an error marshalling... probably superfluous - // This will return the JSON error body as a string - err = errors.New(string(str)) - } - } else { - // So, there was no network-type error, and nothing in the failure payload, - // but we should still check the status code - if httpResponse == nil { - // This should never happen... - err = errors.New("No HTTP Response received.") - } else if code := httpResponse.StatusCode; 200 > code || code > 299 { - err = errors.New("HTTP Error: " + string(httpResponse.StatusCode)) - } - } - } - - return *successPayload, err -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/ExamplepbSimpleMessage.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/ExamplepbSimpleMessage.go deleted file mode 100644 index ed9ada35..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/ExamplepbSimpleMessage.go +++ /dev/null @@ -1,9 +0,0 @@ -package echo - -import ( -) - -type ExamplepbSimpleMessage struct { - Id string `json:"id,omitempty"` - -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_client.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_client.go new file mode 100644 index 00000000..7a517148 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_client.go @@ -0,0 +1,164 @@ +/* + * Echo Service + * + * Echo Service API consists of a single service which returns a message. + * + * OpenAPI spec version: version not set + * + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package echo + +import ( + "bytes" + "fmt" + "path/filepath" + "reflect" + "strings" + "net/url" + "io/ioutil" + "github.com/go-resty/resty" +) + +type APIClient struct { + config *Configuration +} + +func (c *APIClient) SelectHeaderContentType(contentTypes []string) string { + + if len(contentTypes) == 0 { + return "" + } + if contains(contentTypes, "application/json") { + return "application/json" + } + return contentTypes[0] // use the first content type specified in 'consumes' +} + +func (c *APIClient) SelectHeaderAccept(accepts []string) string { + + if len(accepts) == 0 { + return "" + } + if contains(accepts, "application/json") { + return "application/json" + } + return strings.Join(accepts, ",") +} + +func contains(haystack []string, needle string) bool { + for _, a := range haystack { + if strings.ToLower(a) == strings.ToLower(needle) { + return true + } + } + return false +} + +func (c *APIClient) CallAPI(path string, method string, + postBody interface{}, + headerParams map[string]string, + queryParams url.Values, + formParams map[string]string, + fileName string, + fileBytes []byte) (*resty.Response, error) { + + rClient := c.prepareClient() + request := c.prepareRequest(rClient, postBody, headerParams, queryParams, formParams, fileName, fileBytes) + + switch strings.ToUpper(method) { + case "GET": + response, err := request.Get(path) + return response, err + case "POST": + response, err := request.Post(path) + return response, err + case "PUT": + response, err := request.Put(path) + return response, err + case "PATCH": + response, err := request.Patch(path) + return response, err + case "DELETE": + response, err := request.Delete(path) + return response, err + } + + return nil, fmt.Errorf("invalid method %v", method) +} + +func (c *APIClient) ParameterToString(obj interface{}, collectionFormat string) string { + delimiter := "" + switch collectionFormat { + case "pipes": + delimiter = "|" + case "ssv": + delimiter = " " + case "tsv": + delimiter = "\t" + case "csv": + delimiter = "," + } + + if reflect.TypeOf(obj).Kind() == reflect.Slice { + return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") + } + + return fmt.Sprintf("%v", obj) +} + +func (c *APIClient) prepareClient() *resty.Client { + + rClient := resty.New() + + rClient.SetDebug(c.config.Debug) + if c.config.Transport != nil { + rClient.SetTransport(c.config.Transport) + } + + if c.config.Timeout != nil { + rClient.SetTimeout(*c.config.Timeout) + } + rClient.SetLogger(ioutil.Discard) + return rClient +} + +func (c *APIClient) prepareRequest( + rClient *resty.Client, + postBody interface{}, + headerParams map[string]string, + queryParams url.Values, + formParams map[string]string, + fileName string, + fileBytes []byte) *resty.Request { + + + request := rClient.R() + request.SetBody(postBody) + + if c.config.UserAgent != "" { + request.SetHeader("User-Agent", c.config.UserAgent) + } + + // add header parameter, if any + if len(headerParams) > 0 { + request.SetHeaders(headerParams) + } + + // add query parameter, if any + if len(queryParams) > 0 { + request.SetMultiValueQueryParams(queryParams) + } + + // add form parameter, if any + if len(formParams) > 0 { + request.SetFormData(formParams) + } + + if len(fileBytes) > 0 && fileName != "" { + _, fileNm := filepath.Split(fileName) + request.SetFileReader("file", fileNm, bytes.NewReader(fileBytes)) + } + return request +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_response.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_response.go new file mode 100644 index 00000000..8b0d07c4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/api_response.go @@ -0,0 +1,44 @@ +/* + * Echo Service + * + * Echo Service API consists of a single service which returns a message. + * + * OpenAPI spec version: version not set + * + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package echo + +import ( + "net/http" +) + +type APIResponse struct { + *http.Response `json:"-"` + Message string `json:"message,omitempty"` + // Operation is the name of the swagger operation. + Operation string `json:"operation,omitempty"` + // RequestURL is the request URL. This value is always available, even if the + // embedded *http.Response is nil. + RequestURL string `json:"url,omitempty"` + // Method is the HTTP method used for the request. This value is always + // available, even if the embedded *http.Response is nil. + Method string `json:"method,omitempty"` + // Payload holds the contents of the response body (which may be nil or empty). + // This is provided here as the raw response.Body() reader will have already + // been drained. + Payload []byte `json:"-"` +} + +func NewAPIResponse(r *http.Response) *APIResponse { + + response := &APIResponse{Response: r} + return response +} + +func NewAPIResponseWithError(errorMessage string) *APIResponse { + + response := &APIResponse{Message: errorMessage} + return response +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/configuration.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/configuration.go new file mode 100644 index 00000000..9a75a30a --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/configuration.go @@ -0,0 +1,67 @@ +/* + * Echo Service + * + * Echo Service API consists of a single service which returns a message. + * + * OpenAPI spec version: version not set + * + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package echo + +import ( + "encoding/base64" + "net/http" + "time" +) + + +type Configuration struct { + Username string `json:"userName,omitempty"` + Password string `json:"password,omitempty"` + APIKeyPrefix map[string]string `json:"APIKeyPrefix,omitempty"` + APIKey map[string]string `json:"APIKey,omitempty"` + Debug bool `json:"debug,omitempty"` + DebugFile string `json:"debugFile,omitempty"` + OAuthToken string `json:"oAuthToken,omitempty"` + BasePath string `json:"basePath,omitempty"` + Host string `json:"host,omitempty"` + Scheme string `json:"scheme,omitempty"` + AccessToken string `json:"accessToken,omitempty"` + DefaultHeader map[string]string `json:"defaultHeader,omitempty"` + UserAgent string `json:"userAgent,omitempty"` + APIClient *APIClient + Transport *http.Transport + Timeout *time.Duration `json:"timeout,omitempty"` +} + +func NewConfiguration() *Configuration { + cfg := &Configuration{ + BasePath: "http://localhost", + DefaultHeader: make(map[string]string), + APIKey: make(map[string]string), + APIKeyPrefix: make(map[string]string), + UserAgent: "Swagger-Codegen/1.0.0/go", + APIClient: &APIClient{}, + } + + cfg.APIClient.config = cfg + return cfg +} + +func (c *Configuration) GetBasicAuthEncodedString() string { + return base64.StdEncoding.EncodeToString([]byte(c.Username + ":" + c.Password)) +} + +func (c *Configuration) AddDefaultHeader(key string, value string) { + c.DefaultHeader[key] = value +} + +func (c *Configuration) GetAPIKeyWithPrefix(APIKeyIdentifier string) string { + if c.APIKeyPrefix[APIKeyIdentifier] != "" { + return c.APIKeyPrefix[APIKeyIdentifier] + " " + c.APIKey[APIKeyIdentifier] + } + + return c.APIKey[APIKeyIdentifier] +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/echo_service_api.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/echo_service_api.go new file mode 100644 index 00000000..41316dce --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/echo_service_api.go @@ -0,0 +1,286 @@ +/* + * Echo Service + * + * Echo Service API consists of a single service which returns a message. + * + * OpenAPI spec version: version not set + * + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package echo + +import ( + "net/url" + "strings" + "encoding/json" + "fmt" +) + +type EchoServiceApi struct { + Configuration *Configuration +} + +func NewEchoServiceApi() *EchoServiceApi { + configuration := NewConfiguration() + return &EchoServiceApi{ + Configuration: configuration, + } +} + +func NewEchoServiceApiWithBasePath(basePath string) *EchoServiceApi { + configuration := NewConfiguration() + configuration.BasePath = basePath + + return &EchoServiceApi{ + Configuration: configuration, + } +} + +/** + * Echo method receives a simple message and returns it. + * The message posted as the id parameter will also be returned. + * + * @param id + * @return *ExamplepbSimpleMessage + */ +func (a EchoServiceApi) Echo(id string) (*ExamplepbSimpleMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/echo/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ExamplepbSimpleMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * Echo method receives a simple message and returns it. + * The message posted as the id parameter will also be returned. + * + * @param id + * @param num + * @return *ExamplepbSimpleMessage + */ +func (a EchoServiceApi) Echo2(id string, num string) (*ExamplepbSimpleMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Get") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/echo/{id}/{num}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) + localVarPath = strings.Replace(localVarPath, "{"+"num"+"}", fmt.Sprintf("%v", num), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ExamplepbSimpleMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "Echo2", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * EchoBody method receives a simple message and returns it. + * + * @param body + * @return *ExamplepbSimpleMessage + */ +func (a EchoServiceApi) EchoBody(body ExamplepbSimpleMessage) (*ExamplepbSimpleMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Post") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/echo_body" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + var successPayload = new(ExamplepbSimpleMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "EchoBody", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + +/** + * EchoDelete method receives a simple message and returns it. + * + * @param id Id represents the message identifier. + * @param num + * @return *ExamplepbSimpleMessage + */ +func (a EchoServiceApi) EchoDelete(id string, num string) (*ExamplepbSimpleMessage, *APIResponse, error) { + + var localVarHttpMethod = strings.ToUpper("Delete") + // create path and map variables + localVarPath := a.Configuration.BasePath + "/v1/example/echo_delete" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := make(map[string]string) + var localVarPostBody interface{} + var localVarFileName string + var localVarFileBytes []byte + // add default headers if any + for key := range a.Configuration.DefaultHeader { + localVarHeaderParams[key] = a.Configuration.DefaultHeader[key] + } + localVarQueryParams.Add("id", a.Configuration.APIClient.ParameterToString(id, "")) + localVarQueryParams.Add("num", a.Configuration.APIClient.ParameterToString(num, "")) + + // to determine the Content-Type header + localVarHttpContentTypes := []string{ "application/json", } + + // set Content-Type header + localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + // to determine the Accept header + localVarHttpHeaderAccepts := []string{ + "application/json", + } + + // set Accept header + localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + var successPayload = new(ExamplepbSimpleMessage) + localVarHttpResponse, err := a.Configuration.APIClient.CallAPI(localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + + var localVarURL, _ = url.Parse(localVarPath) + localVarURL.RawQuery = localVarQueryParams.Encode() + var localVarAPIResponse = &APIResponse{Operation: "EchoDelete", Method: localVarHttpMethod, RequestURL: localVarURL.String()} + if localVarHttpResponse != nil { + localVarAPIResponse.Response = localVarHttpResponse.RawResponse + localVarAPIResponse.Payload = localVarHttpResponse.Body() + } + + if err != nil { + return successPayload, localVarAPIResponse, err + } + err = json.Unmarshal(localVarHttpResponse.Body(), &successPayload) + return successPayload, localVarAPIResponse, err +} + diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/examplepb_simple_message.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/examplepb_simple_message.go new file mode 100644 index 00000000..98eb8325 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/clients/echo/examplepb_simple_message.go @@ -0,0 +1,20 @@ +/* + * Echo Service + * + * Echo Service API consists of a single service which returns a message. + * + * OpenAPI spec version: version not set + * + * Generated by: https://github.com/swagger-api/swagger-codegen.git + */ + +package echo + +// SimpleMessage represents a simple message sent to the Echo service. +type ExamplepbSimpleMessage struct { + + // Id represents the message identifier. + Id string `json:"id,omitempty"` + + Num string `json:"num,omitempty"` +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/BUILD.bazel new file mode 100644 index 00000000..0f7c5052 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/BUILD.bazel @@ -0,0 +1,18 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server", + visibility = ["//visibility:private"], + deps = [ + "//examples/gateway:go_default_library", + "@com_github_golang_glog//:go_default_library", + ], +) + +go_binary( + name = "example-gateway-server", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/main.go new file mode 100644 index 00000000..0273ffbb --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-gateway-server/main.go @@ -0,0 +1,37 @@ +/* +Command example-gateway-server is an example reverse-proxy implementation +whose HTTP handler is generated by grpc-gateway. +*/ +package main + +import ( + "context" + "flag" + + "github.com/golang/glog" + "github.com/grpc-ecosystem/grpc-gateway/examples/gateway" +) + +var ( + endpoint = flag.String("endpoint", "localhost:9090", "endpoint of the gRPC service") + network = flag.String("network", "tcp", `one of "tcp" or "unix". Must be consistent to -endpoint`) + swaggerDir = flag.String("swagger_dir", "examples/proto/examplepb", "path to the directory which contains swagger definitions") +) + +func main() { + flag.Parse() + defer glog.Flush() + + ctx := context.Background() + opts := gateway.Options{ + Addr: ":8080", + GRPCServer: gateway.Endpoint{ + Network: *network, + Addr: *endpoint, + }, + SwaggerDir: *swaggerDir, + } + if err := gateway.Run(ctx, opts); err != nil { + glog.Fatal(err) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/BUILD.bazel new file mode 100644 index 00000000..1c66416c --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/BUILD.bazel @@ -0,0 +1,19 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +package(default_visibility = ["//visibility:private"]) + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server", + deps = [ + "//examples/server:go_default_library", + "@com_github_golang_glog//:go_default_library", + ], +) + +go_binary( + name = "example-server", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/main.go new file mode 100644 index 00000000..7f6c45f5 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/cmd/example-grpc-server/main.go @@ -0,0 +1,28 @@ +/* +Command example-grpc-server is an example grpc server +to be called by example-gateway-server. +*/ +package main + +import ( + "context" + "flag" + + "github.com/golang/glog" + "github.com/grpc-ecosystem/grpc-gateway/examples/server" +) + +var ( + addr = flag.String("addr", ":9090", "endpoint of the gRPC service") + network = flag.String("network", "tcp", "a valid network type which is consistent to -addr") +) + +func main() { + flag.Parse() + defer glog.Flush() + + ctx := context.Background() + if err := server.Run(ctx, *network, *addr); err != nil { + glog.Fatal(err) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.proto deleted file mode 100644 index 91e561e0..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.proto +++ /dev/null @@ -1,137 +0,0 @@ -syntax = "proto3"; -option go_package = "examplepb"; -package grpc.gateway.examples.examplepb; - -import "google/api/annotations.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/duration.proto"; -import "examples/sub/message.proto"; -import "examples/sub2/message.proto"; -import "google/protobuf/timestamp.proto"; - -// Intentionaly complicated message type to cover much features of Protobuf. -// NEXT ID: 27 -message ABitOfEverything { - // Nested is nested type. - message Nested { - // name is nested field. - string name = 1; - uint32 amount = 2; - // DeepEnum is one or zero. - enum DeepEnum { - // FALSE is false. - FALSE = 0; - // TRUE is true. - TRUE = 1; - } - DeepEnum ok = 3; - } - Nested single_nested = 25; - - string uuid = 1; - repeated Nested nested = 2; - float float_value = 3; - double double_value = 4; - int64 int64_value = 5; - uint64 uint64_value = 6; - int32 int32_value = 7; - fixed64 fixed64_value = 8; - fixed32 fixed32_value = 9; - bool bool_value = 10; - string string_value = 11; - // TODO(yugui) add bytes_value - uint32 uint32_value = 13; - NumericEnum enum_value = 14; - sfixed32 sfixed32_value = 15; - sfixed64 sfixed64_value = 16; - sint32 sint32_value = 17; - sint64 sint64_value = 18; - repeated string repeated_string_value = 19; - oneof oneof_value { - google.protobuf.Empty oneof_empty = 20; - string oneof_string = 21; - } - - map map_value = 22; - map mapped_string_value = 23; - map mapped_nested_value = 24; - - string nonConventionalNameValue = 26; - - google.protobuf.Timestamp timestamp_value = 27; - - // repeated enum value. it is comma-separated in query - repeated NumericEnum repeated_enum_value = 28; -} - -// NumericEnum is one or zero. -enum NumericEnum { - // ZERO means 0 - ZERO = 0; - // ONE means 1 - ONE = 1; -} - -service ABitOfEverythingService { - rpc Create(ABitOfEverything) returns (ABitOfEverything) { - // TODO add enum_value - option (google.api.http) = { - post: "/v1/example/a_bit_of_everything/{float_value}/{double_value}/{int64_value}/separator/{uint64_value}/{int32_value}/{fixed64_value}/{fixed32_value}/{bool_value}/{string_value=strprefix/*}/{uint32_value}/{sfixed32_value}/{sfixed64_value}/{sint32_value}/{sint64_value}/{nonConventionalNameValue}" - }; - } - rpc CreateBody(ABitOfEverything) returns (ABitOfEverything) { - option (google.api.http) = { - post: "/v1/example/a_bit_of_everything" - body: "*" - }; - } - rpc Lookup(sub2.IdMessage) returns (ABitOfEverything) { - option (google.api.http) = { - get: "/v1/example/a_bit_of_everything/{uuid}" - }; - } - rpc Update(ABitOfEverything) returns (google.protobuf.Empty) { - option (google.api.http) = { - put: "/v1/example/a_bit_of_everything/{uuid}" - body: "*" - }; - } - rpc Delete(sub2.IdMessage) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/example/a_bit_of_everything/{uuid}" - }; - } - rpc GetQuery(ABitOfEverything) returns (google.protobuf.Empty) { - option (google.api.http) = { - get: "/v1/example/a_bit_of_everything/query/{uuid}" - }; - } - rpc Echo(grpc.gateway.examples.sub.StringMessage) returns (grpc.gateway.examples.sub.StringMessage) { - option (google.api.http) = { - get: "/v1/example/a_bit_of_everything/echo/{value}" - additional_bindings { - post: "/v2/example/echo" - body: "value" - } - additional_bindings { - get: "/v2/example/echo" - } - }; - } - rpc DeepPathEcho(ABitOfEverything) returns (ABitOfEverything) { - option (google.api.http) = { - post: "/v1/example/a_bit_of_everything/{single_nested.name}" - body: "*" - }; - } - rpc NoBindings(google.protobuf.Duration) returns (google.protobuf.Empty) {} - rpc Timeout(google.protobuf.Empty) returns (google.protobuf.Empty) { - option (google.api.http) = { - get: "/v2/example/timeout", - }; - } -} - -service AnotherServiceWithNoBindings { - rpc NoBindings(google.protobuf.Empty) returns (google.protobuf.Empty) {} -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.gw.go deleted file mode 100644 index 70395244..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.gw.go +++ /dev/null @@ -1,173 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway -// source: examples/examplepb/echo_service.proto -// DO NOT EDIT! - -/* -Package examplepb is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package examplepb - -import ( - "io" - "net/http" - - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" - "golang.org/x/net/context" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/status" -) - -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray - -func request_EchoService_Echo_0(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq SimpleMessage - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["id"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") - } - - protoReq.Id, err = runtime.String(val) - - if err != nil { - return nil, metadata, err - } - - msg, err := client.Echo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func request_EchoService_EchoBody_0(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq SimpleMessage - var metadata runtime.ServerMetadata - - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.EchoBody(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -// RegisterEchoServiceHandlerFromEndpoint is same as RegisterEchoServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterEchoServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterEchoServiceHandler(ctx, mux, conn) -} - -// RegisterEchoServiceHandler registers the http handlers for service EchoService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterEchoServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewEchoServiceClient(conn) - - mux.Handle("POST", pattern_EchoService_Echo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) - defer cancel() - if cn, ok := w.(http.CloseNotifier); ok { - go func(done <-chan struct{}, closed <-chan bool) { - select { - case <-done: - case <-closed: - cancel() - } - }(ctx.Done(), cn.CloseNotify()) - } - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_EchoService_Echo_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_EchoService_Echo_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("POST", pattern_EchoService_EchoBody_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) - defer cancel() - if cn, ok := w.(http.CloseNotifier); ok { - go func(done <-chan struct{}, closed <-chan bool) { - select { - case <-done: - case <-closed: - cancel() - } - }(ctx.Done(), cn.CloseNotify()) - } - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_EchoService_EchoBody_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_EchoService_EchoBody_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_EchoService_Echo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v1", "example", "echo", "id"}, "")) - - pattern_EchoService_EchoBody_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "example", "echo_body"}, "")) -) - -var ( - forward_EchoService_Echo_0 = runtime.ForwardResponseMessage - - forward_EchoService_EchoBody_0 = runtime.ForwardResponseMessage -) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/BUILD.bazel new file mode 100644 index 00000000..d076e613 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/BUILD.bazel @@ -0,0 +1,19 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "doc.go", + "gateway.go", + "handlers.go", + "main.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/gateway", + visibility = ["//visibility:public"], + deps = [ + "//examples/proto/examplepb:go_default_library", + "//runtime:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@org_golang_google_grpc//:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/doc.go new file mode 100644 index 00000000..a9ca8369 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/doc.go @@ -0,0 +1,2 @@ +// Package gateway is an example of grpc-gateway server +package gateway diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/gateway.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/gateway.go new file mode 100644 index 00000000..8430331b --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/gateway.go @@ -0,0 +1,68 @@ +package gateway + +import ( + "context" + "fmt" + "net" + "net/http" + "time" + + "github.com/golang/glog" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" + gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" + "google.golang.org/grpc" +) + +// newGateway returns a new gateway server which translates HTTP into gRPC. +func newGateway(ctx context.Context, network, addr string, opts []gwruntime.ServeMuxOption) (http.Handler, error) { + conn, err := dial(ctx, network, addr) + if err != nil { + return nil, err + } + go func() { + <-ctx.Done() + if err := conn.Close(); err != nil { + glog.Errorf("Failed to close a client connection to the gRPC server: %v", err) + } + }() + + mux := gwruntime.NewServeMux(opts...) + + for _, f := range []func(context.Context, *gwruntime.ServeMux, *grpc.ClientConn) error{ + examplepb.RegisterEchoServiceHandler, + examplepb.RegisterStreamServiceHandler, + examplepb.RegisterABitOfEverythingServiceHandler, + examplepb.RegisterFlowCombinationHandler, + } { + if err := f(ctx, mux, conn); err != nil { + return nil, err + } + } + return mux, nil +} + +func dial(ctx context.Context, network, addr string) (*grpc.ClientConn, error) { + switch network { + case "tcp": + return dialTCP(ctx, addr) + case "unix": + return dialUnix(ctx, addr) + default: + return nil, fmt.Errorf("unsupported network type %q", network) + } +} + +// dialTCP creates a client connection via TCP. +// "addr" must be a valid TCP address with a port number. +func dialTCP(ctx context.Context, addr string) (*grpc.ClientConn, error) { + return grpc.DialContext(ctx, addr, grpc.WithInsecure()) +} + +// dialUnix creates a client connection via a unix domain socket. +// "addr" must be a valid path to the socket. +func dialUnix(ctx context.Context, addr string) (*grpc.ClientConn, error) { + d := func(addr string, timeout time.Duration) (net.Conn, error) { + return net.DialTimeout("unix", addr, timeout) + } + return grpc.DialContext(ctx, addr, grpc.WithInsecure(), grpc.WithDialer(d)) +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/handlers.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/handlers.go new file mode 100644 index 00000000..7581125b --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/handlers.go @@ -0,0 +1,48 @@ +package gateway + +import ( + "net/http" + "path" + "strings" + + "github.com/golang/glog" +) + +func swaggerServer(dir string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if !strings.HasSuffix(r.URL.Path, ".swagger.json") { + glog.Errorf("Not Found: %s", r.URL.Path) + http.NotFound(w, r) + return + } + + glog.Infof("Serving %s", r.URL.Path) + p := strings.TrimPrefix(r.URL.Path, "/swagger/") + p = path.Join(dir, p) + http.ServeFile(w, r, p) + } +} + +// allowCORS allows Cross Origin Resoruce Sharing from any origin. +// Don't do this without consideration in production systems. +func allowCORS(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if origin := r.Header.Get("Origin"); origin != "" { + w.Header().Set("Access-Control-Allow-Origin", origin) + if r.Method == "OPTIONS" && r.Header.Get("Access-Control-Request-Method") != "" { + preflightHandler(w, r) + return + } + } + h.ServeHTTP(w, r) + }) +} + +func preflightHandler(w http.ResponseWriter, r *http.Request) { + headers := []string{"Content-Type", "Accept"} + w.Header().Set("Access-Control-Allow-Headers", strings.Join(headers, ",")) + methods := []string{"GET", "HEAD", "POST", "PUT", "DELETE"} + w.Header().Set("Access-Control-Allow-Methods", strings.Join(methods, ",")) + glog.Infof("preflight request for %s", r.URL.Path) + return +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/main.go new file mode 100644 index 00000000..c59fd1f2 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/gateway/main.go @@ -0,0 +1,65 @@ +package gateway + +import ( + "context" + "net/http" + + "github.com/golang/glog" + gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" +) + +// Endpoint describes a gRPC endpoint +type Endpoint struct { + Network, Addr string +} + +// Options is a set of options to be passed to Run +type Options struct { + // Addr is the address to listen + Addr string + + // GRPCServer defines an endpoint of a gRPC service + GRPCServer Endpoint + + // SwaggerDir is a path to a directory from which the server + // serves swagger specs. + SwaggerDir string + + // Mux is a list of options to be passed to the grpc-gateway multiplexer + Mux []gwruntime.ServeMuxOption +} + +// Run starts a HTTP server and blocks while running if successful. +// The server will be shutdown when "ctx" is canceled. +func Run(ctx context.Context, opts Options) error { + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + mux := http.NewServeMux() + mux.HandleFunc("/swagger/", swaggerServer(opts.SwaggerDir)) + + gw, err := newGateway(ctx, opts.GRPCServer.Network, opts.GRPCServer.Addr, opts.Mux) + if err != nil { + return err + } + mux.Handle("/", gw) + + s := &http.Server{ + Addr: opts.Addr, + Handler: allowCORS(mux), + } + go func() { + <-ctx.Done() + glog.Infof("Shutting down the http server") + if err := s.Shutdown(context.Background()); err != nil { + glog.Errorf("Failed to shutdown http server: %v", err) + } + }() + + glog.Infof("Starting listening at %s", opts.Addr) + if err := s.ListenAndServe(); err != http.ErrServerClosed { + glog.Errorf("Failed to listen and serve: %v", err) + return err + } + return nil +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/BUILD.bazel new file mode 100644 index 00000000..37ef3bc6 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/BUILD.bazel @@ -0,0 +1,26 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_test") + +go_test( + name = "go_default_xtest", + srcs = [ + "client_test.go", + "integration_test.go", + "main_test.go", + "proto_error_test.go", + ], + deps = [ + "//examples/clients/abe:go_default_library", + "//examples/clients/echo:go_default_library", + "//examples/gateway:go_default_library", + "//examples/proto/examplepb:go_default_library", + "//examples/proto/sub:go_default_library", + "//examples/server:go_default_library", + "//runtime:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//jsonpb:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//ptypes/empty:go_default_library", + "@org_golang_google_genproto//googleapis/rpc/status:go_default_library", + "@org_golang_google_grpc//codes:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/client_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/client_test.go similarity index 93% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/client_test.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/client_test.go index 0574d9fb..1e6b3b77 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/client_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/client_test.go @@ -1,4 +1,4 @@ -package main +package integration_test import ( "reflect" @@ -18,7 +18,7 @@ func TestEchoClient(t *testing.T) { } cl := echo.NewEchoServiceApiWithBasePath("http://localhost:8080") - resp, err := cl.Echo("foo") + resp, _, err := cl.Echo("foo") if err != nil { t.Errorf(`cl.Echo("foo") failed with %v; want success`, err) } @@ -35,7 +35,7 @@ func TestEchoBodyClient(t *testing.T) { cl := echo.NewEchoServiceApiWithBasePath("http://localhost:8080") req := echo.ExamplepbSimpleMessage{Id: "foo"} - resp, err := cl.EchoBody(req) + resp, _, err := cl.EchoBody(req) if err != nil { t.Errorf("cl.EchoBody(%#v) failed with %v; want success", req, err) } @@ -56,7 +56,7 @@ func TestAbitOfEverythingClient(t *testing.T) { } func testABEClientCreate(t *testing.T, cl *abe.ABitOfEverythingServiceApi) { - want := abe.ExamplepbABitOfEverything{ + want := &abe.ExamplepbABitOfEverything{ FloatValue: 1.5, DoubleValue: 2.5, Int64Value: "4294967296", @@ -73,7 +73,7 @@ func testABEClientCreate(t *testing.T, cl *abe.ABitOfEverythingServiceApi) { Sint64Value: "4611686018427387903", NonConventionalNameValue: "camelCase", } - resp, err := cl.Create( + resp, _, err := cl.Create( want.FloatValue, want.DoubleValue, want.Int64Value, @@ -136,8 +136,8 @@ func testABEClientCreateBody(t *testing.T, cl *abe.ABitOfEverythingServiceApi) { RepeatedStringValue: []string{"a", "b", "c"}, OneofString: "x", MapValue: map[string]abe.ExamplepbNumericEnum{ - // "a": abe.ExamplepbNumericEnum_ONE, - // "b": abe.ExamplepbNumericEnum_ZERO, + // "a": abe.ExamplepbNumericEnum_ONE, + // "b": abe.ExamplepbNumericEnum_ZERO, }, MappedStringValue: map[string]string{ "a": "x", @@ -148,7 +148,7 @@ func testABEClientCreateBody(t *testing.T, cl *abe.ABitOfEverythingServiceApi) { "b": {Name: "y", Amount: 2}, }, } - resp, err := cl.CreateBody(want) + resp, _, err := cl.CreateBody(want) if err != nil { t.Errorf("cl.CreateBody(%#v) failed with %v; want success", want, err) } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/integration_test.go similarity index 84% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration_test.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/integration_test.go index 710b8249..97b72dba 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/integration_test.go @@ -1,7 +1,8 @@ -package main +package integration_test import ( "bytes" + "context" "encoding/json" "fmt" "io" @@ -17,16 +18,16 @@ import ( "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes/empty" - gw "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" - sub "github.com/grpc-ecosystem/grpc-gateway/examples/sub" + gw "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" + sub "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" "github.com/grpc-ecosystem/grpc-gateway/runtime" - "golang.org/x/net/context" "google.golang.org/grpc/codes" ) type errorBody struct { - Error string `json:"error"` - Code int `json:"code"` + Error string `json:"error"` + Code int `json:"code"` + Details []interface{} `json:"details"` } func TestEcho(t *testing.T) { @@ -40,8 +41,13 @@ func TestEcho(t *testing.T) { } func TestForwardResponseOption(t *testing.T) { + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + go func() { - if err := Run( + if err := runGateway( + ctx, ":8081", runtime.WithForwardResponseOption( func(_ context.Context, w http.ResponseWriter, _ proto.Message) error { @@ -50,7 +56,7 @@ func TestForwardResponseOption(t *testing.T) { }, ), ); err != nil { - t.Errorf("gw.Run() failed with %v; want success", err) + t.Errorf("runGateway() failed with %v; want success", err) return } }() @@ -460,7 +466,7 @@ func testABELookupNotFound(t *testing.T, port int) { var msg errorBody if err := json.Unmarshal(buf, &msg); err != nil { - t.Errorf("jsonpb.UnmarshalString(%s, &msg) failed with %v; want success", buf, err) + t.Errorf("json.Unmarshal(%s, &msg) failed with %v; want success", buf, err) return } @@ -666,6 +672,20 @@ func testAdditionalBindings(t *testing.T, port int) { } return resp }, + func() *http.Response { + r, w := io.Pipe() + go func() { + defer w.Close() + w.Write([]byte(`"hello"`)) + }() + url := fmt.Sprintf("http://localhost:%d/v2/example/echo", port) + resp, err := http.Post(url, "application/json", r) + if err != nil { + t.Errorf("http.Post(%q, %q, %q) failed with %v; want success", url, "application/json", `"hello"`, err) + return nil + } + return resp + }, func() *http.Response { url := fmt.Sprintf("http://localhost:%d/v2/example/echo?value=hello", port) resp, err := http.Get(url) @@ -718,11 +738,87 @@ func TestTimeout(t *testing.T) { } defer resp.Body.Close() - if got, want := resp.StatusCode, http.StatusRequestTimeout; got != want { + if got, want := resp.StatusCode, http.StatusGatewayTimeout; got != want { t.Errorf("resp.StatusCode = %d; want %d", got, want) } } +func TestErrorWithDetails(t *testing.T) { + url := "http://localhost:8080/v2/example/errorwithdetails" + resp, err := http.Get(url) + if err != nil { + t.Errorf("http.Get(%q) failed with %v; want success", url, err) + return + } + defer resp.Body.Close() + + buf, err := ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatalf("iotuil.ReadAll(resp.Body) failed with %v; want success", err) + } + + if got, want := resp.StatusCode, http.StatusInternalServerError; got != want { + t.Errorf("resp.StatusCode = %d; want %d", got, want) + } + + var msg errorBody + if err := json.Unmarshal(buf, &msg); err != nil { + t.Fatalf("json.Unmarshal(%s, &msg) failed with %v; want success", buf, err) + } + + if got, want := msg.Code, int(codes.Unknown); got != want { + t.Errorf("msg.Code = %d; want %d", got, want) + } + if got, want := msg.Error, "with details"; got != want { + t.Errorf("msg.Error = %s; want %s", got, want) + } + if got, want := len(msg.Details), 1; got != want { + t.Fatalf("len(msg.Details) = %q; want %q", got, want) + } + + details, ok := msg.Details[0].(map[string]interface{}) + if got, want := ok, true; got != want { + t.Fatalf("msg.Details[0] got type: %T, want %T", msg.Details[0], map[string]interface{}{}) + } + typ, ok := details["@type"].(string) + if got, want := ok, true; got != want { + t.Fatalf("msg.Details[0][\"@type\"] got type: %T, want %T", typ, "") + } + if got, want := details["@type"], "type.googleapis.com/google.rpc.DebugInfo"; got != want { + t.Errorf("msg.Details[\"@type\"] = %q; want %q", got, want) + } + if got, want := details["detail"], "error debug details"; got != want { + t.Errorf("msg.Details[\"detail\"] = %q; want %q", got, want) + } + entries, ok := details["stack_entries"].([]interface{}) + if got, want := ok, true; got != want { + t.Fatalf("msg.Details[0][\"stack_entries\"] got type: %T, want %T", entries, []string{}) + } + entry, ok := entries[0].(string) + if got, want := ok, true; got != want { + t.Fatalf("msg.Details[0][\"stack_entries\"][0] got type: %T, want %T", entry, "") + } + if got, want := entries[0], "foo:1"; got != want { + t.Errorf("msg.Details[\"stack_entries\"][0] = %q; want %q", got, want) + } +} + +func TestPostWithEmptyBody(t *testing.T) { + url := "http://localhost:8080/v2/example/postwithemptybody/name" + rep, err := http.Post(url, "application/json", nil) + + if err != nil { + t.Errorf("http.Post(%q) failed with %v; want success", url, err) + return + } + + if rep.StatusCode != http.StatusOK { + t.Errorf("http.Post(%q) response code is %d; want %d", url, + rep.StatusCode, http.StatusOK) + return + } +} + func TestUnknownPath(t *testing.T) { url := "http://localhost:8080" resp, err := http.Post(url, "application/json", strings.NewReader("{}")) @@ -762,3 +858,23 @@ func TestMethodNotAllowed(t *testing.T) { t.Logf("%s", buf) } } + +func TestInvalidArgument(t *testing.T) { + url := "http://localhost:8080/v1/example/echo/myid/not_int64" + resp, err := http.Get(url) + if err != nil { + t.Errorf("http.Get(%q) failed with %v; want success", url, err) + return + } + defer resp.Body.Close() + buf, err := ioutil.ReadAll(resp.Body) + if err != nil { + t.Errorf("iotuil.ReadAll(resp.Body) failed with %v; want success", err) + return + } + + if got, want := resp.StatusCode, http.StatusBadRequest; got != want { + t.Errorf("resp.StatusCode = %d; want %d", got, want) + t.Logf("%s", buf) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/main_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/main_test.go new file mode 100644 index 00000000..2ce46ae4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/main_test.go @@ -0,0 +1,72 @@ +package integration_test + +import ( + "context" + "flag" + "fmt" + "os" + "testing" + "time" + + "github.com/golang/glog" + "github.com/grpc-ecosystem/grpc-gateway/examples/gateway" + server "github.com/grpc-ecosystem/grpc-gateway/examples/server" + gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" +) + +var ( + endpoint = flag.String("endpoint", "localhost:9090", "endpoint of the gRPC service") + network = flag.String("network", "tcp", `one of "tcp" or "unix". Must be consistent to -endpoint`) + swaggerDir = flag.String("swagger_dir", "examples/proto/examplepb", "path to the directory which contains swagger definitions") +) + +func runGateway(ctx context.Context, addr string, opts ...gwruntime.ServeMuxOption) error { + return gateway.Run(ctx, gateway.Options{ + Addr: addr, + GRPCServer: gateway.Endpoint{ + Network: *network, + Addr: *endpoint, + }, + SwaggerDir: *swaggerDir, + Mux: opts, + }) +} + +func runServers(ctx context.Context) <-chan error { + ch := make(chan error, 2) + go func() { + if err := server.Run(ctx, *network, *endpoint); err != nil { + ch <- fmt.Errorf("cannot run grpc service: %v", err) + } + }() + go func() { + if err := runGateway(ctx, ":8080"); err != nil { + ch <- fmt.Errorf("cannot run gateway service: %v", err) + } + }() + return ch +} + +func TestMain(m *testing.M) { + flag.Parse() + defer glog.Flush() + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + errCh := runServers(ctx) + + ch := make(chan int, 1) + go func() { + time.Sleep(100 * time.Millisecond) + ch <- m.Run() + }() + + select { + case err := <-errCh: + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + case status := <-ch: + cancel() + os.Exit(status) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto_error_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/proto_error_test.go similarity index 65% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto_error_test.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/proto_error_test.go index de3b638f..a893da0d 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto_error_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/integration/proto_error_test.go @@ -1,6 +1,7 @@ -package main +package integration_test import ( + "context" "fmt" "io/ioutil" "net/http" @@ -14,20 +15,27 @@ import ( "google.golang.org/grpc/codes" ) -func TestWithProtoErrorHandler(t *testing.T) { - go func() { - if err := Run( - ":8082", - runtime.WithProtoErrorHandler(runtime.DefaultHTTPProtoErrorHandler), - ); err != nil { - t.Errorf("gw.Run() failed with %v; want success", err) - return - } - }() +func runServer(ctx context.Context, t *testing.T, port uint16) { + opt := runtime.WithProtoErrorHandler(runtime.DefaultHTTPProtoErrorHandler) + if err := runGateway(ctx, fmt.Sprintf(":%d", port), opt); err != nil { + t.Errorf("runGateway() failed with %v; want success", err) + } +} +func TestWithProtoErrorHandler(t *testing.T) { + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + const port = 8082 + go runServer(ctx, t, port) + + // Waiting for the server's getting available. + // TODO(yugui) find a better way to wait time.Sleep(100 * time.Millisecond) - testEcho(t, 8082, "application/json") - testEchoBody(t, 8082) + + testEcho(t, port, "application/json") + testEchoBody(t, port) } func TestABEWithProtoErrorHandler(t *testing.T) { @@ -36,19 +44,29 @@ func TestABEWithProtoErrorHandler(t *testing.T) { return } - testABECreate(t, 8082) - testABECreateBody(t, 8082) - testABEBulkCreate(t, 8082) - testABELookup(t, 8082) - testABELookupNotFoundWithProtoError(t) - testABEList(t, 8082) - testABEBulkEcho(t, 8082) - testABEBulkEchoZeroLength(t, 8082) - testAdditionalBindings(t, 8082) + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + const port = 8083 + go runServer(ctx, t, port) + // Waiting for the server's getting available. + // TODO(yugui) find a better way to wait + time.Sleep(100 * time.Millisecond) + + testABECreate(t, port) + testABECreateBody(t, port) + testABEBulkCreate(t, port) + testABELookup(t, port) + testABELookupNotFoundWithProtoError(t, port) + testABEList(t, port) + testABEBulkEcho(t, port) + testABEBulkEchoZeroLength(t, port) + testAdditionalBindings(t, port) } -func testABELookupNotFoundWithProtoError(t *testing.T) { - url := "http://localhost:8082/v1/example/a_bit_of_everything" +func testABELookupNotFoundWithProtoError(t *testing.T, port uint16) { + url := fmt.Sprintf("http://localhost:%d/v1/example/a_bit_of_everything", port) uuid := "not_exist" url = fmt.Sprintf("%s/%s", url, uuid) resp, err := http.Get(url) @@ -98,7 +116,18 @@ func testABELookupNotFoundWithProtoError(t *testing.T) { } func TestUnknownPathWithProtoError(t *testing.T) { - url := "http://localhost:8082" + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + const port = 8084 + go runServer(ctx, t, port) + + // Waiting for the server's getting available. + // TODO(yugui) find a better way to wait + time.Sleep(100 * time.Millisecond) + + url := fmt.Sprintf("http://localhost:%d", port) resp, err := http.Post(url, "application/json", strings.NewReader("{}")) if err != nil { t.Errorf("http.Post(%q) failed with %v; want success", url, err) @@ -134,7 +163,18 @@ func TestUnknownPathWithProtoError(t *testing.T) { } func TestMethodNotAllowedWithProtoError(t *testing.T) { - url := "http://localhost:8082/v1/example/echo/myid" + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + const port = 8085 + go runServer(ctx, t, port) + + // Waiting for the server's getting available. + // TODO(yugui) find a better way to wait + time.Sleep(100 * time.Millisecond) + + url := fmt.Sprintf("http://localhost:%d/v1/example/echo/myid", port) resp, err := http.Get(url) if err != nil { t.Errorf("http.Post(%q) failed with %v; want success", url, err) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main.go deleted file mode 100644 index f6a16a80..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main.go +++ /dev/null @@ -1,109 +0,0 @@ -package main - -import ( - "flag" - "net/http" - "path" - "strings" - - "github.com/golang/glog" - "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "golang.org/x/net/context" - "google.golang.org/grpc" -) - -var ( - echoEndpoint = flag.String("echo_endpoint", "localhost:9090", "endpoint of EchoService") - abeEndpoint = flag.String("more_endpoint", "localhost:9090", "endpoint of ABitOfEverythingService") - flowEndpoint = flag.String("flow_endpoint", "localhost:9090", "endpoint of FlowCombination") - - swaggerDir = flag.String("swagger_dir", "examples/examplepb", "path to the directory which contains swagger definitions") -) - -// newGateway returns a new gateway server which translates HTTP into gRPC. -func newGateway(ctx context.Context, opts ...runtime.ServeMuxOption) (http.Handler, error) { - mux := runtime.NewServeMux(opts...) - dialOpts := []grpc.DialOption{grpc.WithInsecure()} - err := examplepb.RegisterEchoServiceHandlerFromEndpoint(ctx, mux, *echoEndpoint, dialOpts) - if err != nil { - return nil, err - } - err = examplepb.RegisterStreamServiceHandlerFromEndpoint(ctx, mux, *abeEndpoint, dialOpts) - if err != nil { - return nil, err - } - err = examplepb.RegisterABitOfEverythingServiceHandlerFromEndpoint(ctx, mux, *abeEndpoint, dialOpts) - if err != nil { - return nil, err - } - err = examplepb.RegisterFlowCombinationHandlerFromEndpoint(ctx, mux, *flowEndpoint, dialOpts) - if err != nil { - return nil, err - } - return mux, nil -} - -func serveSwagger(w http.ResponseWriter, r *http.Request) { - if !strings.HasSuffix(r.URL.Path, ".swagger.json") { - glog.Errorf("Not Found: %s", r.URL.Path) - http.NotFound(w, r) - return - } - - glog.Infof("Serving %s", r.URL.Path) - p := strings.TrimPrefix(r.URL.Path, "/swagger/") - p = path.Join(*swaggerDir, p) - http.ServeFile(w, r, p) -} - -// allowCORS allows Cross Origin Resoruce Sharing from any origin. -// Don't do this without consideration in production systems. -func allowCORS(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if origin := r.Header.Get("Origin"); origin != "" { - w.Header().Set("Access-Control-Allow-Origin", origin) - if r.Method == "OPTIONS" && r.Header.Get("Access-Control-Request-Method") != "" { - preflightHandler(w, r) - return - } - } - h.ServeHTTP(w, r) - }) -} - -func preflightHandler(w http.ResponseWriter, r *http.Request) { - headers := []string{"Content-Type", "Accept"} - w.Header().Set("Access-Control-Allow-Headers", strings.Join(headers, ",")) - methods := []string{"GET", "HEAD", "POST", "PUT", "DELETE"} - w.Header().Set("Access-Control-Allow-Methods", strings.Join(methods, ",")) - glog.Infof("preflight request for %s", r.URL.Path) - return -} - -// Run starts a HTTP server and blocks forever if successful. -func Run(address string, opts ...runtime.ServeMuxOption) error { - ctx := context.Background() - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - mux := http.NewServeMux() - mux.HandleFunc("/swagger/", serveSwagger) - - gw, err := newGateway(ctx, opts...) - if err != nil { - return err - } - mux.Handle("/", gw) - - return http.ListenAndServe(address, allowCORS(mux)) -} - -func main() { - flag.Parse() - defer glog.Flush() - - if err := Run(":8080"); err != nil { - glog.Fatal(err) - } -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main_test.go deleted file mode 100644 index 2742c385..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/main_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "os" - "testing" - "time" - - server "github.com/grpc-ecosystem/grpc-gateway/examples/server" -) - -func runServers() <-chan error { - ch := make(chan error, 2) - go func() { - if err := server.Run(); err != nil { - ch <- fmt.Errorf("cannot run grpc service: %v", err) - } - }() - go func() { - if err := Run(":8080"); err != nil { - ch <- fmt.Errorf("cannot run gateway service: %v", err) - } - }() - return ch -} - -func TestMain(m *testing.M) { - flag.Parse() - errCh := runServers() - - ch := make(chan int, 1) - go func() { - time.Sleep(100 * time.Millisecond) - ch <- m.Run() - }() - - select { - case err := <-errCh: - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - case status := <-ch: - os.Exit(status) - } -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/BUILD.bazel new file mode 100644 index 00000000..3b64196f --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/BUILD.bazel @@ -0,0 +1,63 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") +load("@grpc_ecosystem_grpc_gateway//protoc-gen-swagger:defs.bzl", "protoc_gen_swagger") + +package(default_visibility = ["//visibility:public"]) + +# gazelle:exclude a_bit_of_everything.pb.gw.go +# gazelle:exclude echo_service.pb.gw.go +# gazelle:exclude flow_combination.pb.gw.go +# gazelle:exclude stream.pb.gw.go +# gazelle:exclude wrappers.pb.gw.go + +proto_library( + name = "examplepb_proto", + srcs = [ + "a_bit_of_everything.proto", + "echo_service.proto", + "flow_combination.proto", + "stream.proto", + "wrappers.proto", + ], + deps = [ + "//examples/proto/sub:sub_proto", + "//examples/proto/sub2:sub2_proto", + "//protoc-gen-swagger/options:options_proto", + "@com_github_googleapis_googleapis//google/api:api_proto", + "@com_google_protobuf//:duration_proto", + "@com_google_protobuf//:empty_proto", + "@com_google_protobuf//:timestamp_proto", + "@com_google_protobuf//:wrappers_proto", + ], +) + +go_proto_library( + name = "examplepb_go_proto", + compilers = [ + "@io_bazel_rules_go//proto:go_grpc", + "//protoc-gen-grpc-gateway:go_gen_grpc_gateway", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb", + proto = ":examplepb_proto", + deps = [ + "//examples/proto/sub:go_default_library", + "//examples/proto/sub2:go_default_library", + "//protoc-gen-swagger/options:go_default_library", + "@com_github_golang_protobuf//ptypes/duration:go_default_library", + "@com_github_golang_protobuf//ptypes/empty:go_default_library", + "@com_github_golang_protobuf//ptypes/timestamp:go_default_library", + "@com_github_golang_protobuf//ptypes/wrappers:go_default_library", + "@com_github_googleapis_googleapis//google/api:go_default_library", + ], +) + +go_library( + name = "go_default_library", + embed = [":examplepb_go_proto"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb", +) + +protoc_gen_swagger( + name = "expamplepb_protoc_gen_swagger", + proto = ":examplepb_proto", +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.go similarity index 54% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.go index 30afc97d..23703e51 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/examplepb/a_bit_of_everything.proto +// source: examples/proto/examplepb/a_bit_of_everything.proto package examplepb @@ -9,9 +9,10 @@ import math "math" import _ "google.golang.org/genproto/googleapis/api/annotations" import google_protobuf1 "github.com/golang/protobuf/ptypes/empty" import google_protobuf2 "github.com/golang/protobuf/ptypes/duration" -import grpc_gateway_examples_sub "github.com/grpc-ecosystem/grpc-gateway/examples/sub" -import sub2 "github.com/grpc-ecosystem/grpc-gateway/examples/sub2" +import grpc_gateway_examples_sub "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" +import sub2 "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2" import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" +import _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" import ( context "golang.org/x/net/context" @@ -74,28 +75,28 @@ func (ABitOfEverything_Nested_DeepEnum) EnumDescriptor() ([]byte, []int) { } // Intentionaly complicated message type to cover much features of Protobuf. -// NEXT ID: 27 +// NEXT ID: 30 type ABitOfEverything struct { - SingleNested *ABitOfEverything_Nested `protobuf:"bytes,25,opt,name=single_nested,json=singleNested" json:"single_nested,omitempty"` - Uuid string `protobuf:"bytes,1,opt,name=uuid" json:"uuid,omitempty"` - Nested []*ABitOfEverything_Nested `protobuf:"bytes,2,rep,name=nested" json:"nested,omitempty"` - FloatValue float32 `protobuf:"fixed32,3,opt,name=float_value,json=floatValue" json:"float_value,omitempty"` - DoubleValue float64 `protobuf:"fixed64,4,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` - Int64Value int64 `protobuf:"varint,5,opt,name=int64_value,json=int64Value" json:"int64_value,omitempty"` - Uint64Value uint64 `protobuf:"varint,6,opt,name=uint64_value,json=uint64Value" json:"uint64_value,omitempty"` - Int32Value int32 `protobuf:"varint,7,opt,name=int32_value,json=int32Value" json:"int32_value,omitempty"` - Fixed64Value uint64 `protobuf:"fixed64,8,opt,name=fixed64_value,json=fixed64Value" json:"fixed64_value,omitempty"` - Fixed32Value uint32 `protobuf:"fixed32,9,opt,name=fixed32_value,json=fixed32Value" json:"fixed32_value,omitempty"` - BoolValue bool `protobuf:"varint,10,opt,name=bool_value,json=boolValue" json:"bool_value,omitempty"` - StringValue string `protobuf:"bytes,11,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` - // TODO(yugui) add bytes_value - Uint32Value uint32 `protobuf:"varint,13,opt,name=uint32_value,json=uint32Value" json:"uint32_value,omitempty"` - EnumValue NumericEnum `protobuf:"varint,14,opt,name=enum_value,json=enumValue,enum=grpc.gateway.examples.examplepb.NumericEnum" json:"enum_value,omitempty"` - Sfixed32Value int32 `protobuf:"fixed32,15,opt,name=sfixed32_value,json=sfixed32Value" json:"sfixed32_value,omitempty"` - Sfixed64Value int64 `protobuf:"fixed64,16,opt,name=sfixed64_value,json=sfixed64Value" json:"sfixed64_value,omitempty"` - Sint32Value int32 `protobuf:"zigzag32,17,opt,name=sint32_value,json=sint32Value" json:"sint32_value,omitempty"` - Sint64Value int64 `protobuf:"zigzag64,18,opt,name=sint64_value,json=sint64Value" json:"sint64_value,omitempty"` - RepeatedStringValue []string `protobuf:"bytes,19,rep,name=repeated_string_value,json=repeatedStringValue" json:"repeated_string_value,omitempty"` + SingleNested *ABitOfEverything_Nested `protobuf:"bytes,25,opt,name=single_nested,json=singleNested" json:"single_nested,omitempty"` + Uuid string `protobuf:"bytes,1,opt,name=uuid" json:"uuid,omitempty"` + Nested []*ABitOfEverything_Nested `protobuf:"bytes,2,rep,name=nested" json:"nested,omitempty"` + FloatValue float32 `protobuf:"fixed32,3,opt,name=float_value,json=floatValue" json:"float_value,omitempty"` + DoubleValue float64 `protobuf:"fixed64,4,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + Int64Value int64 `protobuf:"varint,5,opt,name=int64_value,json=int64Value" json:"int64_value,omitempty"` + Uint64Value uint64 `protobuf:"varint,6,opt,name=uint64_value,json=uint64Value" json:"uint64_value,omitempty"` + Int32Value int32 `protobuf:"varint,7,opt,name=int32_value,json=int32Value" json:"int32_value,omitempty"` + Fixed64Value uint64 `protobuf:"fixed64,8,opt,name=fixed64_value,json=fixed64Value" json:"fixed64_value,omitempty"` + Fixed32Value uint32 `protobuf:"fixed32,9,opt,name=fixed32_value,json=fixed32Value" json:"fixed32_value,omitempty"` + BoolValue bool `protobuf:"varint,10,opt,name=bool_value,json=boolValue" json:"bool_value,omitempty"` + StringValue string `protobuf:"bytes,11,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + BytesValue []byte `protobuf:"bytes,29,opt,name=bytes_value,json=bytesValue,proto3" json:"bytes_value,omitempty"` + Uint32Value uint32 `protobuf:"varint,13,opt,name=uint32_value,json=uint32Value" json:"uint32_value,omitempty"` + EnumValue NumericEnum `protobuf:"varint,14,opt,name=enum_value,json=enumValue,enum=grpc.gateway.examples.examplepb.NumericEnum" json:"enum_value,omitempty"` + Sfixed32Value int32 `protobuf:"fixed32,15,opt,name=sfixed32_value,json=sfixed32Value" json:"sfixed32_value,omitempty"` + Sfixed64Value int64 `protobuf:"fixed64,16,opt,name=sfixed64_value,json=sfixed64Value" json:"sfixed64_value,omitempty"` + Sint32Value int32 `protobuf:"zigzag32,17,opt,name=sint32_value,json=sint32Value" json:"sint32_value,omitempty"` + Sint64Value int64 `protobuf:"zigzag64,18,opt,name=sint64_value,json=sint64Value" json:"sint64_value,omitempty"` + RepeatedStringValue []string `protobuf:"bytes,19,rep,name=repeated_string_value,json=repeatedStringValue" json:"repeated_string_value,omitempty"` // Types that are valid to be assigned to OneofValue: // *ABitOfEverything_OneofEmpty // *ABitOfEverything_OneofString @@ -219,6 +220,13 @@ func (m *ABitOfEverything) GetStringValue() string { return "" } +func (m *ABitOfEverything) GetBytesValue() []byte { + if m != nil { + return m.BytesValue + } + return nil +} + func (m *ABitOfEverything) GetUint32Value() uint32 { if m != nil { return m.Uint32Value @@ -428,9 +436,51 @@ func (m *ABitOfEverything_Nested) GetOk() ABitOfEverything_Nested_DeepEnum { return ABitOfEverything_Nested_FALSE } +type Body struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` +} + +func (m *Body) Reset() { *m = Body{} } +func (m *Body) String() string { return proto.CompactTextString(m) } +func (*Body) ProtoMessage() {} +func (*Body) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} } + +func (m *Body) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type MessageWithBody struct { + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Data *Body `protobuf:"bytes,2,opt,name=data" json:"data,omitempty"` +} + +func (m *MessageWithBody) Reset() { *m = MessageWithBody{} } +func (m *MessageWithBody) String() string { return proto.CompactTextString(m) } +func (*MessageWithBody) ProtoMessage() {} +func (*MessageWithBody) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} } + +func (m *MessageWithBody) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *MessageWithBody) GetData() *Body { + if m != nil { + return m.Data + } + return nil +} + func init() { proto.RegisterType((*ABitOfEverything)(nil), "grpc.gateway.examples.examplepb.ABitOfEverything") proto.RegisterType((*ABitOfEverything_Nested)(nil), "grpc.gateway.examples.examplepb.ABitOfEverything.Nested") + proto.RegisterType((*Body)(nil), "grpc.gateway.examples.examplepb.Body") + proto.RegisterType((*MessageWithBody)(nil), "grpc.gateway.examples.examplepb.MessageWithBody") proto.RegisterEnum("grpc.gateway.examples.examplepb.NumericEnum", NumericEnum_name, NumericEnum_value) proto.RegisterEnum("grpc.gateway.examples.examplepb.ABitOfEverything_Nested_DeepEnum", ABitOfEverything_Nested_DeepEnum_name, ABitOfEverything_Nested_DeepEnum_value) } @@ -452,10 +502,20 @@ type ABitOfEverythingServiceClient interface { Update(ctx context.Context, in *ABitOfEverything, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) Delete(ctx context.Context, in *sub2.IdMessage, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) GetQuery(ctx context.Context, in *ABitOfEverything, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + // Echo allows posting a StringMessage value. + // + // It also exposes multiple bindings. + // + // This makes it useful when validating that the OpenAPI v2 API + // description exposes documentation correctly on all paths + // defined as additional_bindings in the proto. Echo(ctx context.Context, in *grpc_gateway_examples_sub.StringMessage, opts ...grpc.CallOption) (*grpc_gateway_examples_sub.StringMessage, error) DeepPathEcho(ctx context.Context, in *ABitOfEverything, opts ...grpc.CallOption) (*ABitOfEverything, error) NoBindings(ctx context.Context, in *google_protobuf2.Duration, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) Timeout(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + ErrorWithDetails(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + GetMessageWithBody(ctx context.Context, in *MessageWithBody, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + PostWithEmptyBody(ctx context.Context, in *Body, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) } type aBitOfEverythingServiceClient struct { @@ -556,6 +616,33 @@ func (c *aBitOfEverythingServiceClient) Timeout(ctx context.Context, in *google_ return out, nil } +func (c *aBitOfEverythingServiceClient) ErrorWithDetails(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { + out := new(google_protobuf1.Empty) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.ABitOfEverythingService/ErrorWithDetails", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *aBitOfEverythingServiceClient) GetMessageWithBody(ctx context.Context, in *MessageWithBody, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { + out := new(google_protobuf1.Empty) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.ABitOfEverythingService/GetMessageWithBody", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *aBitOfEverythingServiceClient) PostWithEmptyBody(ctx context.Context, in *Body, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { + out := new(google_protobuf1.Empty) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.ABitOfEverythingService/PostWithEmptyBody", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // Server API for ABitOfEverythingService service type ABitOfEverythingServiceServer interface { @@ -565,10 +652,20 @@ type ABitOfEverythingServiceServer interface { Update(context.Context, *ABitOfEverything) (*google_protobuf1.Empty, error) Delete(context.Context, *sub2.IdMessage) (*google_protobuf1.Empty, error) GetQuery(context.Context, *ABitOfEverything) (*google_protobuf1.Empty, error) + // Echo allows posting a StringMessage value. + // + // It also exposes multiple bindings. + // + // This makes it useful when validating that the OpenAPI v2 API + // description exposes documentation correctly on all paths + // defined as additional_bindings in the proto. Echo(context.Context, *grpc_gateway_examples_sub.StringMessage) (*grpc_gateway_examples_sub.StringMessage, error) DeepPathEcho(context.Context, *ABitOfEverything) (*ABitOfEverything, error) NoBindings(context.Context, *google_protobuf2.Duration) (*google_protobuf1.Empty, error) Timeout(context.Context, *google_protobuf1.Empty) (*google_protobuf1.Empty, error) + ErrorWithDetails(context.Context, *google_protobuf1.Empty) (*google_protobuf1.Empty, error) + GetMessageWithBody(context.Context, *MessageWithBody) (*google_protobuf1.Empty, error) + PostWithEmptyBody(context.Context, *Body) (*google_protobuf1.Empty, error) } func RegisterABitOfEverythingServiceServer(s *grpc.Server, srv ABitOfEverythingServiceServer) { @@ -755,6 +852,60 @@ func _ABitOfEverythingService_Timeout_Handler(srv interface{}, ctx context.Conte return interceptor(ctx, in, info, handler) } +func _ABitOfEverythingService_ErrorWithDetails_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(google_protobuf1.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ABitOfEverythingServiceServer).ErrorWithDetails(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.ABitOfEverythingService/ErrorWithDetails", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ABitOfEverythingServiceServer).ErrorWithDetails(ctx, req.(*google_protobuf1.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +func _ABitOfEverythingService_GetMessageWithBody_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MessageWithBody) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ABitOfEverythingServiceServer).GetMessageWithBody(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.ABitOfEverythingService/GetMessageWithBody", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ABitOfEverythingServiceServer).GetMessageWithBody(ctx, req.(*MessageWithBody)) + } + return interceptor(ctx, in, info, handler) +} + +func _ABitOfEverythingService_PostWithEmptyBody_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Body) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ABitOfEverythingServiceServer).PostWithEmptyBody(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.ABitOfEverythingService/PostWithEmptyBody", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ABitOfEverythingServiceServer).PostWithEmptyBody(ctx, req.(*Body)) + } + return interceptor(ctx, in, info, handler) +} + var _ABitOfEverythingService_serviceDesc = grpc.ServiceDesc{ ServiceName: "grpc.gateway.examples.examplepb.ABitOfEverythingService", HandlerType: (*ABitOfEverythingServiceServer)(nil), @@ -799,9 +950,85 @@ var _ABitOfEverythingService_serviceDesc = grpc.ServiceDesc{ MethodName: "Timeout", Handler: _ABitOfEverythingService_Timeout_Handler, }, + { + MethodName: "ErrorWithDetails", + Handler: _ABitOfEverythingService_ErrorWithDetails_Handler, + }, + { + MethodName: "GetMessageWithBody", + Handler: _ABitOfEverythingService_GetMessageWithBody_Handler, + }, + { + MethodName: "PostWithEmptyBody", + Handler: _ABitOfEverythingService_PostWithEmptyBody_Handler, + }, }, Streams: []grpc.StreamDesc{}, - Metadata: "examples/examplepb/a_bit_of_everything.proto", + Metadata: "examples/proto/examplepb/a_bit_of_everything.proto", +} + +// Client API for CamelCaseServiceName service + +type CamelCaseServiceNameClient interface { + Empty(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) +} + +type camelCaseServiceNameClient struct { + cc *grpc.ClientConn +} + +func NewCamelCaseServiceNameClient(cc *grpc.ClientConn) CamelCaseServiceNameClient { + return &camelCaseServiceNameClient{cc} +} + +func (c *camelCaseServiceNameClient) Empty(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { + out := new(google_protobuf1.Empty) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.camelCaseServiceName/Empty", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for CamelCaseServiceName service + +type CamelCaseServiceNameServer interface { + Empty(context.Context, *google_protobuf1.Empty) (*google_protobuf1.Empty, error) +} + +func RegisterCamelCaseServiceNameServer(s *grpc.Server, srv CamelCaseServiceNameServer) { + s.RegisterService(&_CamelCaseServiceName_serviceDesc, srv) +} + +func _CamelCaseServiceName_Empty_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(google_protobuf1.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CamelCaseServiceNameServer).Empty(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.camelCaseServiceName/Empty", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CamelCaseServiceNameServer).Empty(ctx, req.(*google_protobuf1.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +var _CamelCaseServiceName_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.gateway.examples.examplepb.camelCaseServiceName", + HandlerType: (*CamelCaseServiceNameServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Empty", + Handler: _CamelCaseServiceName_Empty_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "examples/proto/examplepb/a_bit_of_everything.proto", } // Client API for AnotherServiceWithNoBindings service @@ -865,93 +1092,138 @@ var _AnotherServiceWithNoBindings_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "examples/examplepb/a_bit_of_everything.proto", + Metadata: "examples/proto/examplepb/a_bit_of_everything.proto", } -func init() { proto.RegisterFile("examples/examplepb/a_bit_of_everything.proto", fileDescriptor1) } +func init() { proto.RegisterFile("examples/proto/examplepb/a_bit_of_everything.proto", fileDescriptor1) } var fileDescriptor1 = []byte{ - // 1297 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0xd8, 0x89, 0x13, 0x3f, 0xc7, 0x89, 0x33, 0x69, 0x53, 0xd7, 0x2d, 0x64, 0x71, 0x01, - 0xad, 0x42, 0xb5, 0xab, 0xba, 0x15, 0x6a, 0x23, 0x41, 0x95, 0x34, 0x86, 0x22, 0xda, 0xb4, 0xdd, - 0xfe, 0x41, 0x8a, 0x5a, 0xac, 0xb5, 0x3d, 0xb6, 0x57, 0xf1, 0xee, 0x2c, 0xbb, 0xb3, 0x26, 0x96, - 0x31, 0x07, 0x0e, 0x5c, 0x38, 0x72, 0xef, 0x05, 0x09, 0x71, 0xe1, 0xc8, 0x19, 0xbe, 0x03, 0x5f, - 0x81, 0x03, 0x1f, 0x03, 0xed, 0xcc, 0xec, 0x76, 0xd7, 0x89, 0xe5, 0x26, 0x45, 0xbd, 0xed, 0xcc, - 0x7b, 0xef, 0xf7, 0x7b, 0x7f, 0xe6, 0xbd, 0x99, 0x85, 0xab, 0xe4, 0xc8, 0xb4, 0xdd, 0x3e, 0xf1, - 0x75, 0xf9, 0xe1, 0x36, 0x75, 0xb3, 0xd1, 0xb4, 0x58, 0x83, 0x76, 0x1a, 0x64, 0x40, 0xbc, 0x21, - 0xeb, 0x59, 0x4e, 0x57, 0x73, 0x3d, 0xca, 0x28, 0xde, 0xec, 0x7a, 0x6e, 0x4b, 0xeb, 0x9a, 0x8c, - 0x7c, 0x6b, 0x0e, 0xb5, 0xc8, 0x54, 0x8b, 0x4d, 0x2b, 0x97, 0xbb, 0x94, 0x76, 0xfb, 0x44, 0x37, - 0x5d, 0x4b, 0x37, 0x1d, 0x87, 0x32, 0x93, 0x59, 0xd4, 0xf1, 0x85, 0x79, 0xe5, 0x92, 0x94, 0xf2, - 0x55, 0x33, 0xe8, 0xe8, 0xc4, 0x76, 0xd9, 0x50, 0x0a, 0xdf, 0x9d, 0x14, 0xb6, 0x03, 0x8f, 0x5b, - 0x4b, 0x79, 0x25, 0xf6, 0xd4, 0x0f, 0x9a, 0xba, 0x4d, 0x7c, 0xdf, 0xec, 0x92, 0x08, 0x38, 0x29, - 0xab, 0x4d, 0x08, 0x37, 0x27, 0x81, 0x99, 0x65, 0x13, 0x9f, 0x99, 0xb6, 0x2b, 0x14, 0xaa, 0x7f, - 0xad, 0x42, 0x69, 0x67, 0xd7, 0x62, 0x0f, 0x3a, 0xf5, 0x38, 0x60, 0xfc, 0x02, 0x8a, 0xbe, 0xe5, - 0x74, 0xfb, 0xa4, 0xe1, 0x10, 0x9f, 0x91, 0x76, 0xf9, 0xa2, 0x82, 0xd4, 0x42, 0xed, 0xa6, 0x36, - 0x23, 0x05, 0xda, 0x24, 0x92, 0xb6, 0xcf, 0xed, 0x8d, 0x65, 0x01, 0x27, 0x56, 0x18, 0xc3, 0x7c, - 0x10, 0x58, 0xed, 0x32, 0x52, 0x90, 0x9a, 0x37, 0xf8, 0x37, 0x7e, 0x08, 0x39, 0xc9, 0x95, 0x51, - 0xb2, 0x6f, 0xc4, 0x25, 0x71, 0xf0, 0x26, 0x14, 0x3a, 0x7d, 0x6a, 0xb2, 0xc6, 0xc0, 0xec, 0x07, - 0xa4, 0x9c, 0x55, 0x90, 0x9a, 0x31, 0x80, 0x6f, 0x3d, 0x0b, 0x77, 0xf0, 0x7b, 0xb0, 0xdc, 0xa6, - 0x41, 0xb3, 0x4f, 0xa4, 0xc6, 0xbc, 0x82, 0x54, 0x64, 0x14, 0xc4, 0x9e, 0x50, 0xd9, 0x84, 0x82, - 0xe5, 0xb0, 0x8f, 0x6f, 0x48, 0x8d, 0x05, 0x05, 0xa9, 0x59, 0x03, 0xf8, 0x56, 0x8c, 0x11, 0x24, - 0x35, 0x72, 0x0a, 0x52, 0xe7, 0x8d, 0x42, 0x90, 0x50, 0x11, 0x18, 0xd7, 0x6b, 0x52, 0x63, 0x51, - 0x41, 0xea, 0x02, 0xc7, 0xb8, 0x5e, 0x13, 0x0a, 0x57, 0xa0, 0xd8, 0xb1, 0x8e, 0x48, 0x3b, 0x06, - 0x59, 0x52, 0x90, 0x9a, 0x33, 0x96, 0xe5, 0x66, 0x5a, 0x29, 0xc6, 0xc9, 0x2b, 0x48, 0x5d, 0x94, - 0x4a, 0x11, 0xd2, 0x3b, 0x00, 0x4d, 0x4a, 0xfb, 0x52, 0x03, 0x14, 0xa4, 0x2e, 0x19, 0xf9, 0x70, - 0x27, 0x76, 0xd6, 0x67, 0x9e, 0xe5, 0x74, 0xa5, 0x42, 0x81, 0xe7, 0xbf, 0x20, 0xf6, 0x52, 0xf1, - 0xc4, 0x2c, 0x45, 0x05, 0xa9, 0x45, 0x11, 0x4f, 0x44, 0xf2, 0x25, 0x00, 0x71, 0x02, 0x5b, 0x2a, - 0xac, 0x28, 0x48, 0x5d, 0xa9, 0x5d, 0x9d, 0x59, 0xad, 0xfd, 0xc0, 0x26, 0x9e, 0xd5, 0xaa, 0x3b, - 0x81, 0x6d, 0xe4, 0x43, 0x7b, 0x01, 0xf6, 0x01, 0xac, 0xf8, 0xe9, 0xb8, 0x56, 0x15, 0xa4, 0xae, - 0x1a, 0x45, 0x3f, 0x15, 0x58, 0xac, 0x16, 0xe7, 0xa8, 0xa4, 0x20, 0xb5, 0x14, 0xa9, 0x25, 0xaa, - 0xe1, 0x27, 0xbd, 0x5f, 0x53, 0x90, 0xba, 0x66, 0x14, 0xfc, 0x84, 0xf7, 0x52, 0x25, 0xc6, 0xc1, - 0x0a, 0x52, 0xb1, 0x50, 0x89, 0x50, 0x6a, 0x70, 0xde, 0x23, 0x2e, 0x31, 0x19, 0x69, 0x37, 0x52, - 0xf9, 0x5a, 0x57, 0xb2, 0x6a, 0xde, 0x58, 0x8f, 0x84, 0x8f, 0x13, 0x79, 0xbb, 0x05, 0x05, 0xea, - 0x90, 0x70, 0x6c, 0x84, 0x5d, 0x5d, 0x3e, 0xc7, 0xfb, 0x65, 0x43, 0x13, 0xdd, 0xa7, 0x45, 0xdd, - 0xa7, 0xd5, 0x43, 0xe9, 0xdd, 0x39, 0x03, 0xb8, 0x32, 0x5f, 0xe1, 0x2b, 0xb0, 0x2c, 0x4c, 0x05, - 0x57, 0xf9, 0x7c, 0x58, 0x95, 0xbb, 0x73, 0x86, 0x00, 0x14, 0x24, 0xf8, 0x39, 0xe4, 0x6d, 0xd3, - 0x95, 0x7e, 0x6c, 0xf0, 0x0e, 0xb9, 0x7d, 0xfa, 0x0e, 0xb9, 0x6f, 0xba, 0xdc, 0xdd, 0xba, 0xc3, - 0xbc, 0xa1, 0xb1, 0x64, 0xcb, 0x25, 0x3e, 0x82, 0x75, 0xdb, 0x74, 0xdd, 0xc9, 0x78, 0x2f, 0x70, - 0x9e, 0xbb, 0x67, 0xe2, 0x71, 0x53, 0xf9, 0x11, 0x84, 0x6b, 0xf6, 0xe4, 0x7e, 0x82, 0x59, 0x74, - 0xad, 0x64, 0x2e, 0xbf, 0x19, 0xb3, 0x98, 0x04, 0xc7, 0x99, 0x13, 0xfb, 0x78, 0x1b, 0xca, 0x0e, - 0x75, 0xee, 0x50, 0x67, 0x40, 0x9c, 0x70, 0xd2, 0x9a, 0xfd, 0x7d, 0xd3, 0x16, 0x6d, 0x5f, 0xae, - 0xf0, 0xc6, 0x98, 0x2a, 0xc7, 0x77, 0x60, 0x35, 0x9e, 0xa3, 0xd2, 0xe3, 0x4b, 0xbc, 0xe2, 0x95, - 0x63, 0x15, 0x7f, 0x12, 0xe9, 0x19, 0x2b, 0xb1, 0x89, 0x00, 0x79, 0x0e, 0xf1, 0x49, 0x6a, 0x24, - 0x1a, 0xea, 0xb2, 0x92, 0x3d, 0x75, 0x43, 0xad, 0x45, 0x40, 0xf5, 0xa8, 0xb1, 0x2a, 0xbf, 0x21, - 0xc8, 0xbd, 0x1a, 0xb7, 0x8e, 0x69, 0x93, 0x68, 0xdc, 0x86, 0xdf, 0x78, 0x03, 0x72, 0xa6, 0x4d, - 0x03, 0x87, 0x95, 0x33, 0xbc, 0xc3, 0xe5, 0x0a, 0x3f, 0x82, 0x0c, 0x3d, 0xe4, 0xb3, 0x72, 0xa5, - 0xb6, 0x73, 0xd6, 0x11, 0xac, 0xed, 0x11, 0xe2, 0x72, 0xc7, 0x32, 0xf4, 0xb0, 0xba, 0x09, 0x4b, - 0xd1, 0x1a, 0xe7, 0x61, 0xe1, 0xb3, 0x9d, 0x7b, 0x8f, 0xeb, 0xa5, 0x39, 0xbc, 0x04, 0xf3, 0x4f, - 0x8c, 0xa7, 0xf5, 0x12, 0xaa, 0x58, 0x50, 0x4c, 0x1d, 0x4c, 0x5c, 0x82, 0xec, 0x21, 0x19, 0x4a, - 0x7f, 0xc3, 0x4f, 0xbc, 0x0b, 0x0b, 0x22, 0x3b, 0x99, 0x33, 0x8c, 0x1b, 0x61, 0xba, 0x9d, 0xb9, - 0x89, 0x2a, 0x7b, 0xb0, 0x71, 0xf2, 0xd9, 0x3c, 0x81, 0xf3, 0x5c, 0x92, 0x33, 0x9f, 0x44, 0xf9, - 0x3e, 0x42, 0x99, 0x3c, 0x67, 0x27, 0xa0, 0xec, 0x27, 0x51, 0xde, 0xe4, 0x5a, 0x7b, 0xc5, 0xbf, - 0x5b, 0x8c, 0x86, 0x0d, 0xdf, 0xda, 0x52, 0xa0, 0x90, 0x08, 0x37, 0x4c, 0xec, 0x41, 0xdd, 0x78, - 0x50, 0x9a, 0xc3, 0x8b, 0x90, 0x7d, 0xb0, 0x5f, 0x2f, 0xa1, 0xda, 0xbf, 0xcb, 0x70, 0x61, 0x12, - 0xf7, 0x31, 0xf1, 0x06, 0x56, 0x8b, 0xe0, 0x97, 0x59, 0xc8, 0xdd, 0xf1, 0xc2, 0xd3, 0x83, 0xaf, - 0x9d, 0xda, 0xb9, 0xca, 0xe9, 0x4d, 0xaa, 0xbf, 0x67, 0x7e, 0xf8, 0xfb, 0x9f, 0x9f, 0x33, 0xbf, - 0x66, 0xaa, 0xbf, 0x64, 0xf4, 0xc1, 0xb5, 0xe8, 0xed, 0x75, 0xd2, 0xcb, 0x4b, 0x1f, 0x25, 0x6e, - 0xf0, 0xb1, 0x3e, 0x4a, 0x5e, 0xd7, 0x63, 0x7d, 0x94, 0x98, 0xe3, 0x63, 0xdd, 0x27, 0xae, 0xe9, - 0x99, 0x8c, 0x7a, 0xfa, 0x28, 0x48, 0x09, 0x46, 0x89, 0x1b, 0x61, 0xac, 0x8f, 0x52, 0xd7, 0x48, - 0xb4, 0x4e, 0xc8, 0x5f, 0x5d, 0xa0, 0x63, 0x7d, 0x94, 0x1c, 0x87, 0x9f, 0xf8, 0xcc, 0x73, 0x3d, - 0xd2, 0xb1, 0x8e, 0xf4, 0xad, 0xb1, 0x20, 0x49, 0x98, 0xf9, 0x93, 0x38, 0xfe, 0x24, 0x91, 0x3f, - 0x61, 0x90, 0x76, 0x72, 0xda, 0xac, 0x19, 0xe3, 0x97, 0x08, 0x40, 0x14, 0x68, 0x97, 0xb6, 0x87, - 0x6f, 0xa9, 0x48, 0x5b, 0xbc, 0x46, 0xef, 0x57, 0x37, 0x67, 0x54, 0x68, 0x1b, 0x6d, 0xe1, 0xef, - 0x20, 0x77, 0x8f, 0xd2, 0xc3, 0xc0, 0xc5, 0xab, 0x5a, 0xf8, 0x04, 0xd5, 0xbe, 0x68, 0xdf, 0x17, - 0x8f, 0xd0, 0xb3, 0x30, 0x6b, 0x9c, 0x59, 0xc5, 0x1f, 0xce, 0x3c, 0x1b, 0xe1, 0xbb, 0x71, 0x8c, - 0x7f, 0x44, 0x90, 0x7b, 0xea, 0xb6, 0xcf, 0x78, 0x7e, 0xa7, 0x5c, 0xd1, 0xd5, 0x6b, 0xdc, 0x8b, - 0x8f, 0x2a, 0xaf, 0xe9, 0x45, 0x98, 0x06, 0x13, 0x72, 0x7b, 0xa4, 0x4f, 0x18, 0x39, 0x9e, 0x86, - 0x69, 0x2c, 0x32, 0xd6, 0xad, 0xd7, 0x8d, 0xf5, 0x27, 0x04, 0x4b, 0x9f, 0x13, 0xf6, 0x28, 0x20, - 0xde, 0xf0, 0xff, 0x8c, 0xf6, 0x06, 0xf7, 0x43, 0xc3, 0x57, 0x67, 0xf9, 0xf1, 0x4d, 0xc8, 0x1c, - 0x79, 0xf3, 0x27, 0x82, 0xf9, 0x7a, 0xab, 0x47, 0xb1, 0x3a, 0xc5, 0x13, 0x3f, 0x68, 0x6a, 0x62, - 0xd0, 0x46, 0x89, 0x78, 0x6d, 0xcd, 0x6a, 0x8b, 0xbb, 0xf4, 0x62, 0xb6, 0x4b, 0xa4, 0xd5, 0xa3, - 0xfa, 0x48, 0xb4, 0xd1, 0xc1, 0xc5, 0x6a, 0x49, 0x1f, 0xd4, 0x62, 0xfd, 0x50, 0xb6, 0x2d, 0x06, - 0xe7, 0x01, 0xc6, 0xc7, 0x44, 0xf8, 0x0f, 0x04, 0xcb, 0xe1, 0xdd, 0xf4, 0xd0, 0x64, 0x3d, 0x1e, - 0xc9, 0xdb, 0x69, 0xae, 0xdb, 0x3c, 0xb6, 0x5b, 0xd5, 0x1b, 0x33, 0xcb, 0x9e, 0xfa, 0x0b, 0xd3, - 0xc2, 0x9b, 0x9b, 0x1f, 0xb5, 0x1d, 0x80, 0x7d, 0xba, 0x6b, 0x39, 0x6d, 0xcb, 0xe9, 0xfa, 0xf8, - 0xe2, 0xb1, 0xaa, 0xee, 0xc9, 0xbf, 0xc7, 0xa9, 0x05, 0x9f, 0xc3, 0xcf, 0x60, 0x31, 0x7c, 0x9a, - 0xd0, 0x80, 0xe1, 0x29, 0x4a, 0x53, 0x8d, 0x2f, 0x71, 0xf7, 0xcf, 0xe3, 0xf5, 0x64, 0x3e, 0x99, - 0x00, 0xab, 0x7d, 0x0d, 0x97, 0x77, 0x1c, 0xca, 0x7a, 0xc4, 0x93, 0x17, 0xcc, 0x57, 0x16, 0xeb, - 0x25, 0x9c, 0xfd, 0x34, 0xe5, 0xfa, 0x69, 0xa9, 0xe7, 0x76, 0x0b, 0x07, 0xf9, 0x38, 0xb3, 0xcd, - 0x1c, 0x17, 0x5f, 0xff, 0x2f, 0x00, 0x00, 0xff, 0xff, 0xab, 0xe5, 0x92, 0x0d, 0xc9, 0x0f, 0x00, - 0x00, + // 2019 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xcd, 0x6f, 0xdb, 0xc8, + 0x15, 0xf7, 0x48, 0xb2, 0x22, 0x3d, 0xf9, 0x43, 0x1e, 0xe7, 0xc3, 0x51, 0x9c, 0xf5, 0x44, 0xc9, + 0xb6, 0x8c, 0x37, 0x22, 0x37, 0xca, 0xa2, 0xd8, 0x08, 0x68, 0xb7, 0xb2, 0xad, 0x4d, 0x82, 0x6c, + 0x9c, 0x84, 0xd9, 0x4d, 0x83, 0x34, 0x5b, 0x83, 0x22, 0x47, 0x12, 0x13, 0x91, 0xc3, 0x92, 0x43, + 0x27, 0xaa, 0xeb, 0x16, 0xed, 0x02, 0x2d, 0xf6, 0x54, 0xc0, 0xbd, 0xef, 0xa5, 0x40, 0xd1, 0x4b, + 0x0f, 0x3d, 0xf4, 0x54, 0xa0, 0x3d, 0xf6, 0xd0, 0x63, 0x81, 0xde, 0x0b, 0xf4, 0xd4, 0x63, 0xff, + 0x82, 0x62, 0x86, 0xa4, 0x42, 0xc9, 0x16, 0x1c, 0x39, 0x8b, 0xbd, 0x24, 0x9c, 0x99, 0xdf, 0x7b, + 0xef, 0xf7, 0xde, 0xbc, 0xf7, 0xe6, 0xc9, 0x50, 0xa7, 0xaf, 0x0c, 0xc7, 0xeb, 0xd3, 0x40, 0xf3, + 0x7c, 0xc6, 0x99, 0x16, 0x2f, 0xbd, 0xb6, 0x66, 0xec, 0xb4, 0x6d, 0xbe, 0xc3, 0x3a, 0x3b, 0x74, + 0x97, 0xfa, 0x03, 0xde, 0xb3, 0xdd, 0xae, 0x2a, 0x31, 0x78, 0xad, 0xeb, 0x7b, 0xa6, 0xda, 0x35, + 0x38, 0x7d, 0x69, 0x0c, 0xd4, 0x44, 0x81, 0x3a, 0x14, 0xad, 0xac, 0x76, 0x19, 0xeb, 0xf6, 0xa9, + 0x66, 0x78, 0xb6, 0x66, 0xb8, 0x2e, 0xe3, 0x06, 0xb7, 0x99, 0x1b, 0x44, 0xe2, 0x95, 0x0b, 0xf1, + 0xa9, 0x5c, 0xb5, 0xc3, 0x8e, 0x46, 0x1d, 0x8f, 0x0f, 0xe2, 0xc3, 0x77, 0xc6, 0x0f, 0xad, 0xd0, + 0x97, 0xd2, 0xf1, 0x39, 0x19, 0xe3, 0x1b, 0x84, 0x6d, 0xcd, 0xa1, 0x41, 0x60, 0x74, 0x69, 0x8c, + 0xb8, 0x74, 0x18, 0x51, 0x1f, 0x83, 0xac, 0x8d, 0x1b, 0xe1, 0xb6, 0x43, 0x03, 0x6e, 0x38, 0x5e, + 0x0c, 0xb8, 0x26, 0xff, 0x33, 0x6b, 0x5d, 0xea, 0xd6, 0x82, 0x97, 0x46, 0xb7, 0x4b, 0x7d, 0x8d, + 0x79, 0xd2, 0x89, 0xc3, 0x0e, 0x55, 0xff, 0x5a, 0x86, 0x72, 0x73, 0xc3, 0xe6, 0xf7, 0x3b, 0xad, + 0x61, 0xa8, 0xf0, 0xe7, 0x30, 0x1f, 0xd8, 0x6e, 0xb7, 0x4f, 0x77, 0x5c, 0x1a, 0x70, 0x6a, 0xad, + 0x9c, 0x27, 0x48, 0x29, 0xd5, 0x3f, 0x54, 0x8f, 0x09, 0x9e, 0x3a, 0xae, 0x49, 0xdd, 0x96, 0xf2, + 0xfa, 0x5c, 0xa4, 0x2e, 0x5a, 0x61, 0x0c, 0xb9, 0x30, 0xb4, 0xad, 0x15, 0x44, 0x90, 0x52, 0xd4, + 0xe5, 0x37, 0x7e, 0x00, 0xf9, 0xd8, 0x56, 0x86, 0x64, 0xdf, 0xca, 0x56, 0xac, 0x07, 0xaf, 0x41, + 0xa9, 0xd3, 0x67, 0x06, 0xdf, 0xd9, 0x35, 0xfa, 0x21, 0x5d, 0xc9, 0x12, 0xa4, 0x64, 0x74, 0x90, + 0x5b, 0x8f, 0xc5, 0x0e, 0xbe, 0x04, 0x73, 0x16, 0x0b, 0xdb, 0x7d, 0x1a, 0x23, 0x72, 0x04, 0x29, + 0x48, 0x2f, 0x45, 0x7b, 0x11, 0x64, 0x0d, 0x4a, 0xb6, 0xcb, 0xbf, 0xf3, 0x41, 0x8c, 0x98, 0x25, + 0x48, 0xc9, 0xea, 0x20, 0xb7, 0x86, 0x3a, 0xc2, 0x34, 0x22, 0x4f, 0x90, 0x92, 0xd3, 0x4b, 0x61, + 0x0a, 0x12, 0xe9, 0xb8, 0x51, 0x8f, 0x11, 0xa7, 0x08, 0x52, 0x66, 0xa5, 0x8e, 0x1b, 0xf5, 0x08, + 0x70, 0x19, 0xe6, 0x3b, 0xf6, 0x2b, 0x6a, 0x0d, 0x95, 0x14, 0x08, 0x52, 0xf2, 0xfa, 0x5c, 0xbc, + 0x39, 0x0a, 0x1a, 0xea, 0x29, 0x12, 0xa4, 0x9c, 0x8a, 0x41, 0x89, 0xa6, 0x8b, 0x00, 0x6d, 0xc6, + 0xfa, 0x31, 0x02, 0x08, 0x52, 0x0a, 0x7a, 0x51, 0xec, 0x0c, 0xc9, 0x06, 0xdc, 0xb7, 0xdd, 0x6e, + 0x0c, 0x28, 0xc9, 0xf8, 0x97, 0xa2, 0xbd, 0x21, 0xd9, 0xf6, 0x80, 0xd3, 0x20, 0x46, 0x5c, 0x24, + 0x48, 0x99, 0xd3, 0x41, 0x6e, 0x8d, 0x38, 0x3c, 0xa4, 0x31, 0x4f, 0x90, 0x32, 0x1f, 0x39, 0x9c, + 0xb0, 0xb8, 0x0b, 0x40, 0xdd, 0xd0, 0x89, 0x01, 0x0b, 0x04, 0x29, 0x0b, 0xf5, 0x6b, 0xc7, 0x5e, + 0xe7, 0x76, 0xe8, 0x50, 0xdf, 0x36, 0x5b, 0x6e, 0xe8, 0xe8, 0x45, 0x21, 0x1f, 0x29, 0x7b, 0x17, + 0x16, 0x82, 0x51, 0xc7, 0x17, 0x09, 0x52, 0x16, 0xf5, 0xf9, 0x60, 0xc4, 0xf3, 0x21, 0x6c, 0x18, + 0xc4, 0x32, 0x41, 0x4a, 0x39, 0x81, 0xa5, 0xae, 0x2b, 0x48, 0xb3, 0x5f, 0x22, 0x48, 0x59, 0xd2, + 0x4b, 0x41, 0x8a, 0x7d, 0x0c, 0x19, 0xea, 0xc1, 0x04, 0x29, 0x38, 0x82, 0x24, 0x5a, 0xea, 0x70, + 0xc6, 0xa7, 0x1e, 0x35, 0x38, 0xb5, 0x76, 0x46, 0x02, 0xba, 0x4c, 0xb2, 0x4a, 0x51, 0x5f, 0x4e, + 0x0e, 0x1f, 0xa5, 0x02, 0x7b, 0x13, 0x4a, 0xcc, 0xa5, 0xa2, 0x23, 0x89, 0x86, 0xb1, 0x72, 0x5a, + 0x16, 0xd4, 0x59, 0x35, 0x2a, 0x66, 0x35, 0x29, 0x66, 0xb5, 0x25, 0x4e, 0x6f, 0xcf, 0xe8, 0x20, + 0xc1, 0x72, 0x85, 0x2f, 0xc3, 0x5c, 0x24, 0x1a, 0xd9, 0x5a, 0x39, 0x23, 0xae, 0xed, 0xf6, 0x8c, + 0x1e, 0x29, 0x8c, 0x8c, 0xe0, 0x67, 0x50, 0x74, 0x0c, 0x2f, 0xe6, 0x71, 0x56, 0x96, 0xd0, 0x47, + 0xd3, 0x97, 0xd0, 0x3d, 0xc3, 0x93, 0x74, 0x5b, 0x2e, 0xf7, 0x07, 0x7a, 0xc1, 0x89, 0x97, 0xf8, + 0x15, 0x2c, 0x3b, 0x86, 0xe7, 0x8d, 0xfb, 0x7b, 0x4e, 0xda, 0xb9, 0x7d, 0x22, 0x3b, 0xde, 0x48, + 0x7c, 0x22, 0x83, 0x4b, 0xce, 0xf8, 0x7e, 0xca, 0x72, 0x54, 0xd6, 0xb1, 0xe5, 0x95, 0xb7, 0xb3, + 0x1c, 0xb5, 0x8a, 0xc3, 0x96, 0x53, 0xfb, 0xb8, 0x01, 0x2b, 0x2e, 0x73, 0x37, 0x99, 0xbb, 0x4b, + 0x5d, 0xd1, 0x31, 0x8d, 0xfe, 0xb6, 0xe1, 0x44, 0x7d, 0x61, 0xa5, 0x22, 0x2b, 0x67, 0xe2, 0x39, + 0xde, 0x84, 0xc5, 0x61, 0x5b, 0x8e, 0x19, 0x5f, 0x90, 0x37, 0x5e, 0x39, 0x74, 0xe3, 0x9f, 0x26, + 0x38, 0x7d, 0x61, 0x28, 0x12, 0x29, 0x79, 0x06, 0xc3, 0x4c, 0xda, 0x49, 0x15, 0xd4, 0x2a, 0xc9, + 0x4e, 0x5d, 0x50, 0x4b, 0x89, 0xa2, 0x56, 0x52, 0x58, 0x95, 0x3f, 0x20, 0xc8, 0xbf, 0xee, 0xc7, + 0xae, 0xe1, 0xd0, 0xa4, 0x1f, 0x8b, 0x6f, 0x7c, 0x16, 0xf2, 0x86, 0xc3, 0x42, 0x97, 0xaf, 0x64, + 0x64, 0x85, 0xc7, 0x2b, 0xfc, 0x10, 0x32, 0xec, 0x85, 0x6c, 0xa6, 0x0b, 0xf5, 0xe6, 0x49, 0x7b, + 0xb4, 0xba, 0x45, 0xa9, 0x27, 0x89, 0x65, 0xd8, 0x8b, 0xea, 0x1a, 0x14, 0x92, 0x35, 0x2e, 0xc2, + 0xec, 0xc7, 0xcd, 0x4f, 0x1e, 0xb5, 0xca, 0x33, 0xb8, 0x00, 0xb9, 0x4f, 0xf5, 0xcf, 0x5a, 0x65, + 0x54, 0xb1, 0x61, 0x7e, 0x24, 0x31, 0x71, 0x19, 0xb2, 0x2f, 0xe8, 0x20, 0xe6, 0x2b, 0x3e, 0xf1, + 0x06, 0xcc, 0x46, 0xd1, 0xc9, 0x9c, 0xa0, 0xdd, 0x44, 0xa2, 0x8d, 0xcc, 0x87, 0xa8, 0xb2, 0x05, + 0x67, 0x8f, 0xce, 0xcd, 0x23, 0x6c, 0x9e, 0x4e, 0xdb, 0x2c, 0xa6, 0xb5, 0xfc, 0x2c, 0xd1, 0x32, + 0x9e, 0x67, 0x47, 0x68, 0xd9, 0x4e, 0x6b, 0x79, 0x9b, 0x77, 0xef, 0xb5, 0xfd, 0xc6, 0x0f, 0x0f, + 0x9a, 0x4f, 0xd6, 0x1f, 0xc3, 0x95, 0x8f, 0x6d, 0xd7, 0x22, 0x2c, 0xe4, 0xc4, 0x61, 0x3e, 0x25, + 0x46, 0x5b, 0x7c, 0x1e, 0x7a, 0xec, 0xd5, 0x1e, 0xe7, 0x5e, 0xd0, 0xd0, 0xb4, 0xae, 0xcd, 0x7b, + 0x61, 0x5b, 0x35, 0x99, 0xa3, 0x09, 0x0e, 0x35, 0x6a, 0xb2, 0x60, 0x10, 0x70, 0x1a, 0x2f, 0x63, + 0x4a, 0x1b, 0xf3, 0x49, 0x27, 0x93, 0xf6, 0xaa, 0x15, 0xc8, 0x6d, 0x30, 0x6b, 0x70, 0x54, 0x12, + 0x55, 0x9f, 0xc1, 0xe2, 0xbd, 0x68, 0x78, 0xf9, 0x81, 0xcd, 0x7b, 0x12, 0xb6, 0x00, 0x99, 0xe1, + 0xcb, 0x9f, 0xb1, 0x2d, 0x7c, 0x13, 0x72, 0x96, 0xc1, 0x8d, 0xd8, 0xfb, 0x77, 0x8f, 0xf5, 0x5e, + 0x28, 0xd1, 0xa5, 0xc8, 0x3a, 0x81, 0x52, 0xea, 0x16, 0x45, 0xbe, 0x3c, 0x6d, 0xe9, 0xf7, 0xcb, + 0x33, 0xf8, 0x14, 0x64, 0xef, 0x6f, 0xb7, 0xca, 0xa8, 0xfe, 0xa7, 0x65, 0x38, 0x37, 0xee, 0xef, + 0x23, 0xea, 0xef, 0xda, 0x26, 0xc5, 0x5f, 0x65, 0x21, 0xbf, 0xe9, 0x8b, 0xa2, 0xc0, 0xd7, 0xa7, + 0x8e, 0x79, 0x65, 0x7a, 0x91, 0xea, 0x1f, 0x33, 0xbf, 0xfc, 0xe7, 0x7f, 0x7e, 0x9b, 0xf9, 0x7d, + 0xa6, 0xfa, 0xbb, 0x8c, 0xb6, 0x7b, 0x3d, 0x99, 0x56, 0x8f, 0x9a, 0x55, 0xb5, 0xbd, 0xd4, 0xe4, + 0xb2, 0xaf, 0xed, 0xa5, 0xc7, 0x94, 0x7d, 0x6d, 0x2f, 0xf5, 0x3c, 0xed, 0x6b, 0x01, 0xf5, 0x0c, + 0xdf, 0xe0, 0xcc, 0xd7, 0xf6, 0xc2, 0x91, 0x83, 0xbd, 0xd4, 0x43, 0xb7, 0xaf, 0xed, 0x8d, 0xbc, + 0x8e, 0xc9, 0x3a, 0x75, 0xfe, 0x7a, 0x70, 0xd8, 0xd7, 0xf6, 0xd2, 0x5d, 0xfe, 0xbb, 0x01, 0xf7, + 0x3d, 0x9f, 0x76, 0xec, 0x57, 0xda, 0xfa, 0x7e, 0x64, 0x24, 0x25, 0x16, 0x8c, 0xeb, 0x09, 0xc6, + 0x0d, 0x05, 0x63, 0x02, 0xa3, 0x24, 0x27, 0xb5, 0xd0, 0x7d, 0xfc, 0x15, 0x02, 0x88, 0x2e, 0x48, + 0x26, 0xce, 0x37, 0x73, 0x49, 0xeb, 0xf2, 0x8e, 0xae, 0x54, 0xd7, 0x8e, 0xb9, 0xa1, 0x06, 0x5a, + 0xc7, 0x3f, 0x85, 0xfc, 0x27, 0x8c, 0xbd, 0x08, 0x3d, 0xbc, 0xa8, 0x8a, 0x41, 0x5d, 0xbd, 0x63, + 0xc5, 0xd9, 0x7e, 0x12, 0xcb, 0xaa, 0xb4, 0xac, 0xe0, 0x6f, 0x1d, 0x9b, 0x1b, 0x62, 0x5e, 0xde, + 0xc7, 0xbf, 0x42, 0x90, 0xff, 0xcc, 0xb3, 0x4e, 0x98, 0xbf, 0x13, 0x26, 0x8f, 0xea, 0x75, 0xc9, + 0xe2, 0xbd, 0xca, 0x1b, 0xb2, 0x10, 0x61, 0xf8, 0x0d, 0x82, 0xfc, 0x16, 0xed, 0x53, 0x4e, 0x0f, + 0xc7, 0x61, 0x92, 0x99, 0x67, 0x07, 0xcd, 0xf7, 0xda, 0x57, 0x61, 0x01, 0xa0, 0xe9, 0xd9, 0x77, + 0xe9, 0xa0, 0x19, 0xf2, 0x1e, 0x9e, 0x81, 0x73, 0x90, 0xbf, 0x2f, 0x3e, 0xeb, 0x78, 0x1e, 0x72, + 0x3e, 0x35, 0x2c, 0x98, 0x7d, 0xe9, 0xdb, 0x9c, 0x46, 0xa1, 0x59, 0x7f, 0xd3, 0xd0, 0xfc, 0x1b, + 0x41, 0xe1, 0x16, 0xe5, 0x0f, 0x43, 0xea, 0x0f, 0xbe, 0xce, 0xe0, 0x7c, 0x89, 0x0e, 0x9a, 0x7a, + 0x75, 0x1b, 0x56, 0x8f, 0xea, 0xab, 0x43, 0x83, 0x53, 0xf6, 0xd3, 0x27, 0x48, 0x7a, 0xa7, 0xe2, + 0x6b, 0xc7, 0x79, 0xf7, 0x63, 0xa1, 0x3e, 0xf1, 0xf1, 0xef, 0x19, 0xc8, 0xb5, 0xcc, 0x1e, 0xc3, + 0xca, 0x04, 0xff, 0x82, 0xb0, 0xad, 0x46, 0x8f, 0x58, 0x72, 0x19, 0x6f, 0x8c, 0xac, 0xfe, 0x17, + 0x1d, 0x34, 0xbf, 0x40, 0x30, 0x47, 0xcd, 0x1e, 0x23, 0x41, 0xd4, 0x30, 0xa1, 0x20, 0x57, 0xbe, + 0x67, 0xe2, 0xa5, 0x47, 0xa1, 0xe3, 0x18, 0xfe, 0xa0, 0x41, 0x5a, 0xf1, 0x56, 0xa5, 0xbc, 0x45, + 0x03, 0xd3, 0xb7, 0xe5, 0xcf, 0x4c, 0xb9, 0x5b, 0xdd, 0x02, 0x3c, 0x1a, 0x26, 0xc9, 0x76, 0xca, + 0xe0, 0xc8, 0xd0, 0x7c, 0x7e, 0x7c, 0x68, 0x04, 0x35, 0x6d, 0x2f, 0xea, 0x29, 0x4f, 0xcf, 0x57, + 0xcb, 0xda, 0x6e, 0x7d, 0x88, 0x17, 0x67, 0x8d, 0xe8, 0x71, 0x7c, 0x8a, 0xf1, 0xa1, 0x23, 0xfc, + 0x67, 0x04, 0x73, 0x62, 0xfe, 0x78, 0x60, 0xf0, 0x9e, 0xe4, 0xf8, 0xcd, 0x74, 0x9a, 0x8f, 0xa4, + 0x6f, 0x37, 0xab, 0x1f, 0x1c, 0x9b, 0xd4, 0x23, 0x3f, 0xc5, 0x55, 0xf1, 0xb0, 0xca, 0xba, 0x6b, + 0x02, 0x6c, 0xb3, 0x0d, 0xdb, 0xb5, 0x6c, 0xb7, 0x1b, 0xe0, 0xf3, 0x87, 0x72, 0x76, 0x2b, 0xfe, + 0xe3, 0xc3, 0xc4, 0x74, 0x9e, 0xc1, 0x8f, 0xe1, 0x94, 0x18, 0x3f, 0x59, 0xc8, 0xf1, 0x04, 0xd0, + 0x44, 0xe1, 0x0b, 0x92, 0xfe, 0x19, 0xbc, 0x9c, 0x8e, 0x27, 0x8f, 0x95, 0xf5, 0xa0, 0xdc, 0xf2, + 0x7d, 0xe6, 0x8b, 0x57, 0x7f, 0x8b, 0x72, 0xc3, 0xee, 0x07, 0x53, 0x1b, 0xb8, 0x22, 0x0d, 0xbc, + 0x83, 0x57, 0x47, 0x2e, 0x4c, 0x68, 0x7d, 0x69, 0xf3, 0x9e, 0x15, 0x6b, 0xfd, 0x35, 0x02, 0x7c, + 0x8b, 0xf2, 0xf1, 0x29, 0xe3, 0xfd, 0x63, 0xef, 0x63, 0x4c, 0x62, 0x22, 0x8d, 0x6f, 0x4b, 0x1a, + 0x97, 0xaa, 0xe7, 0xd3, 0x34, 0x04, 0x83, 0x36, 0xb3, 0x06, 0xda, 0x9e, 0xe8, 0x81, 0x72, 0x1a, + 0xc1, 0x5f, 0x20, 0x58, 0x7a, 0xc0, 0x02, 0x2e, 0x34, 0x4a, 0x51, 0x49, 0xe4, 0xcd, 0x06, 0x9a, + 0x89, 0xd6, 0x35, 0x69, 0xfd, 0x6a, 0xf5, 0x4a, 0xda, 0xba, 0xc7, 0x02, 0x2e, 0x18, 0xc8, 0x5f, + 0x92, 0x11, 0x8d, 0x24, 0x29, 0x2a, 0x7f, 0x43, 0x07, 0xcd, 0xbf, 0x20, 0xdc, 0x99, 0x30, 0xf5, + 0x10, 0x2b, 0x55, 0xa6, 0xb5, 0x1a, 0x79, 0xd9, 0xb3, 0xcd, 0x1e, 0x09, 0x7a, 0x2c, 0xec, 0x5b, + 0xc4, 0x65, 0x9c, 0xb4, 0x29, 0x09, 0x03, 0x6a, 0x11, 0xdb, 0x25, 0x5e, 0xdf, 0x30, 0x29, 0x61, + 0x1d, 0xc2, 0x7b, 0x94, 0x58, 0xcc, 0x0c, 0x1d, 0xea, 0x46, 0x7f, 0x3b, 0x22, 0x26, 0x73, 0xc4, + 0xe2, 0x52, 0xe5, 0x21, 0xac, 0x1d, 0xd5, 0x0b, 0x45, 0x19, 0x25, 0x73, 0xd6, 0x94, 0x15, 0x5f, + 0x7f, 0x0e, 0xa7, 0x4d, 0xc3, 0xa1, 0xfd, 0x4d, 0x23, 0xa0, 0xb1, 0x0e, 0x31, 0x14, 0x60, 0x1d, + 0x66, 0xa3, 0x9f, 0xc3, 0xd3, 0x26, 0xd2, 0x79, 0x19, 0xc3, 0x65, 0xbc, 0x34, 0x92, 0x48, 0xe2, + 0xa8, 0xfe, 0x23, 0x58, 0x6d, 0xba, 0x8c, 0xf7, 0xa8, 0x1f, 0x5b, 0x12, 0x97, 0x97, 0x2a, 0xaa, + 0xef, 0x8d, 0x94, 0xd8, 0xb4, 0x86, 0x67, 0x36, 0x7e, 0x31, 0x7b, 0xd0, 0xfc, 0x5f, 0x0e, 0x73, + 0x58, 0x6e, 0x92, 0x0d, 0x9b, 0x8b, 0x60, 0xa6, 0x3a, 0xc0, 0x13, 0x38, 0xdd, 0xd5, 0x1f, 0x6c, + 0xd6, 0x6e, 0x45, 0x9e, 0x13, 0xcf, 0x67, 0xcf, 0xa9, 0xc9, 0xa7, 0x8d, 0x58, 0xa5, 0xec, 0x32, + 0x97, 0x7e, 0x3f, 0xf6, 0x4c, 0xa0, 0xeb, 0xd9, 0xeb, 0xea, 0xfb, 0xeb, 0x59, 0x94, 0xc9, 0xd5, + 0xcb, 0x86, 0xe7, 0xf5, 0x6d, 0x53, 0x5e, 0x9b, 0xf6, 0x3c, 0x60, 0x6e, 0xfd, 0x6c, 0x7a, 0xe7, + 0x55, 0xad, 0xc3, 0x58, 0xcd, 0xb1, 0x1d, 0xda, 0x38, 0x84, 0x6c, 0x4c, 0x40, 0x3e, 0xfd, 0x32, + 0x03, 0x8b, 0x50, 0xdc, 0x30, 0x02, 0xdb, 0x94, 0xef, 0x76, 0xa6, 0x80, 0xe0, 0xe2, 0xc8, 0x4b, + 0xbe, 0x58, 0xc8, 0x54, 0x8a, 0x4f, 0x6a, 0xcd, 0x07, 0x77, 0x6a, 0x77, 0xe9, 0x80, 0x64, 0xe0, + 0x5f, 0x68, 0xf8, 0xb2, 0xff, 0x03, 0x15, 0xb2, 0x4a, 0xae, 0x7e, 0x39, 0x71, 0x32, 0xc5, 0x5b, + 0x63, 0x46, 0xc8, 0x7b, 0x9a, 0xf8, 0x87, 0xf9, 0xf6, 0x4f, 0x68, 0x63, 0x6d, 0x32, 0x88, 0xb3, + 0x17, 0xd4, 0xdd, 0xf8, 0x39, 0x54, 0xa2, 0x41, 0x01, 0xe3, 0x5b, 0xbe, 0xe1, 0xf2, 0x80, 0x88, + 0x05, 0x31, 0x4c, 0x93, 0x06, 0x01, 0xac, 0xc6, 0xe3, 0x03, 0x5e, 0x8e, 0x0f, 0xe5, 0x2a, 0x39, + 0xdd, 0x84, 0x59, 0xc3, 0x72, 0x6c, 0x17, 0x37, 0x46, 0x44, 0x5d, 0x6b, 0x04, 0x46, 0x38, 0x23, + 0x12, 0x66, 0x07, 0x5c, 0xf4, 0xd3, 0x5d, 0x4a, 0x6c, 0xb7, 0xc3, 0x7c, 0x47, 0x86, 0xa5, 0xbd, + 0x06, 0xf3, 0xe9, 0x50, 0xcc, 0x8c, 0x8f, 0x34, 0xed, 0xab, 0x13, 0x87, 0x9a, 0x71, 0xa8, 0x7f, + 0x07, 0xce, 0xdd, 0x7b, 0x5d, 0x43, 0xe9, 0xd4, 0x98, 0x36, 0x25, 0x9e, 0x16, 0x87, 0xfd, 0xa5, + 0x9d, 0x97, 0x59, 0x79, 0xe3, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x60, 0xd8, 0x68, 0xe8, 0x2d, + 0x17, 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.gw.go similarity index 66% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.gw.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.gw.go index 5e5415b5..f863b09a 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.pb.gw.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.pb.gw.go @@ -1,6 +1,5 @@ -// Code generated by protoc-gen-grpc-gateway -// source: examples/examplepb/a_bit_of_everything.proto -// DO NOT EDIT! +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: examples/proto/examplepb/a_bit_of_everything.proto /* Package examplepb is a reverse proxy. @@ -15,8 +14,8 @@ import ( "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes/empty" - "github.com/grpc-ecosystem/grpc-gateway/examples/sub" - "github.com/grpc-ecosystem/grpc-gateway/examples/sub2" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/grpc-ecosystem/grpc-gateway/utilities" "golang.org/x/net/context" @@ -55,7 +54,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.FloatValue, err = runtime.Float32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "float_value", err) } val, ok = pathParams["double_value"] @@ -66,7 +65,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.DoubleValue, err = runtime.Float64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "double_value", err) } val, ok = pathParams["int64_value"] @@ -77,7 +76,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Int64Value, err = runtime.Int64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "int64_value", err) } val, ok = pathParams["uint64_value"] @@ -88,7 +87,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Uint64Value, err = runtime.Uint64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uint64_value", err) } val, ok = pathParams["int32_value"] @@ -99,7 +98,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Int32Value, err = runtime.Int32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "int32_value", err) } val, ok = pathParams["fixed64_value"] @@ -110,7 +109,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Fixed64Value, err = runtime.Uint64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fixed64_value", err) } val, ok = pathParams["fixed32_value"] @@ -121,7 +120,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Fixed32Value, err = runtime.Uint32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fixed32_value", err) } val, ok = pathParams["bool_value"] @@ -132,7 +131,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.BoolValue, err = runtime.Bool(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "bool_value", err) } val, ok = pathParams["string_value"] @@ -143,7 +142,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.StringValue, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "string_value", err) } val, ok = pathParams["uint32_value"] @@ -154,7 +153,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Uint32Value, err = runtime.Uint32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uint32_value", err) } val, ok = pathParams["sfixed32_value"] @@ -165,7 +164,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Sfixed32Value, err = runtime.Int32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "sfixed32_value", err) } val, ok = pathParams["sfixed64_value"] @@ -176,7 +175,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Sfixed64Value, err = runtime.Int64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "sfixed64_value", err) } val, ok = pathParams["sint32_value"] @@ -187,7 +186,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Sint32Value, err = runtime.Int32(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "sint32_value", err) } val, ok = pathParams["sint64_value"] @@ -198,7 +197,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.Sint64Value, err = runtime.Int64(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "sint64_value", err) } val, ok = pathParams["nonConventionalNameValue"] @@ -209,7 +208,7 @@ func request_ABitOfEverythingService_Create_0(ctx context.Context, marshaler run protoReq.NonConventionalNameValue, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "nonConventionalNameValue", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_ABitOfEverythingService_Create_0); err != nil { @@ -225,7 +224,7 @@ func request_ABitOfEverythingService_CreateBody_0(ctx context.Context, marshaler var protoReq ABitOfEverything var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -253,7 +252,7 @@ func request_ABitOfEverythingService_Lookup_0(ctx context.Context, marshaler run protoReq.Uuid, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uuid", err) } msg, err := client.Lookup(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -265,7 +264,7 @@ func request_ABitOfEverythingService_Update_0(ctx context.Context, marshaler run var protoReq ABitOfEverything var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -284,7 +283,7 @@ func request_ABitOfEverythingService_Update_0(ctx context.Context, marshaler run protoReq.Uuid, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uuid", err) } msg, err := client.Update(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -311,7 +310,7 @@ func request_ABitOfEverythingService_Delete_0(ctx context.Context, marshaler run protoReq.Uuid, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uuid", err) } msg, err := client.Delete(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -342,7 +341,7 @@ func request_ABitOfEverythingService_GetQuery_0(ctx context.Context, marshaler r protoReq.Uuid, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uuid", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_ABitOfEverythingService_GetQuery_0); err != nil { @@ -373,7 +372,7 @@ func request_ABitOfEverythingService_Echo_0(ctx context.Context, marshaler runti protoReq.Value, err = runtime.StringP(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "value", err) } msg, err := client.Echo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -385,7 +384,7 @@ func request_ABitOfEverythingService_Echo_1(ctx context.Context, marshaler runti var protoReq sub.StringMessage var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Value); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Value); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -415,7 +414,7 @@ func request_ABitOfEverythingService_DeepPathEcho_0(ctx context.Context, marshal var protoReq ABitOfEverything var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -434,7 +433,7 @@ func request_ABitOfEverythingService_DeepPathEcho_0(ctx context.Context, marshal err = runtime.PopulateFieldFromPath(&protoReq, "single_nested.name", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "single_nested.name", err) } msg, err := client.DeepPathEcho(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -451,6 +450,86 @@ func request_ABitOfEverythingService_Timeout_0(ctx context.Context, marshaler ru } +func request_ABitOfEverythingService_ErrorWithDetails_0(ctx context.Context, marshaler runtime.Marshaler, client ABitOfEverythingServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq empty.Empty + var metadata runtime.ServerMetadata + + msg, err := client.ErrorWithDetails(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_ABitOfEverythingService_GetMessageWithBody_0(ctx context.Context, marshaler runtime.Marshaler, client ABitOfEverythingServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq MessageWithBody + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Data); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + msg, err := client.GetMessageWithBody(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_ABitOfEverythingService_PostWithEmptyBody_0(ctx context.Context, marshaler runtime.Marshaler, client ABitOfEverythingServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq Body + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") + } + + protoReq.Name, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) + } + + msg, err := client.PostWithEmptyBody(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_CamelCaseServiceName_Empty_0(ctx context.Context, marshaler runtime.Marshaler, client CamelCaseServiceNameClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq empty.Empty + var metadata runtime.ServerMetadata + + msg, err := client.Empty(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + // RegisterABitOfEverythingServiceHandlerFromEndpoint is same as RegisterABitOfEverythingServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterABitOfEverythingServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { @@ -479,10 +558,18 @@ func RegisterABitOfEverythingServiceHandlerFromEndpoint(ctx context.Context, mux // RegisterABitOfEverythingServiceHandler registers the http handlers for service ABitOfEverythingService to "mux". // The handlers forward requests to the grpc endpoint over "conn". func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewABitOfEverythingServiceClient(conn) + return RegisterABitOfEverythingServiceHandlerClient(ctx, mux, NewABitOfEverythingServiceClient(conn)) +} + +// RegisterABitOfEverythingServiceHandler registers the http handlers for service ABitOfEverythingService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "ABitOfEverythingServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ABitOfEverythingServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "ABitOfEverythingServiceClient" to call the correct interceptors. +func RegisterABitOfEverythingServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ABitOfEverythingServiceClient) error { mux.Handle("POST", pattern_ABitOfEverythingService_Create_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -511,7 +598,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("POST", pattern_ABitOfEverythingService_CreateBody_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -540,7 +627,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("GET", pattern_ABitOfEverythingService_Lookup_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -569,7 +656,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("PUT", pattern_ABitOfEverythingService_Update_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -598,7 +685,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("DELETE", pattern_ABitOfEverythingService_Delete_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -627,7 +714,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("GET", pattern_ABitOfEverythingService_GetQuery_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -656,7 +743,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("GET", pattern_ABitOfEverythingService_Echo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -685,7 +772,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("POST", pattern_ABitOfEverythingService_Echo_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -714,7 +801,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("GET", pattern_ABitOfEverythingService_Echo_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -743,7 +830,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("POST", pattern_ABitOfEverythingService_DeepPathEcho_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -772,7 +859,7 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) mux.Handle("GET", pattern_ABitOfEverythingService_Timeout_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -800,6 +887,93 @@ func RegisterABitOfEverythingServiceHandler(ctx context.Context, mux *runtime.Se }) + mux.Handle("GET", pattern_ABitOfEverythingService_ErrorWithDetails_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ABitOfEverythingService_ErrorWithDetails_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_ABitOfEverythingService_ErrorWithDetails_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_ABitOfEverythingService_GetMessageWithBody_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ABitOfEverythingService_GetMessageWithBody_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_ABitOfEverythingService_GetMessageWithBody_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_ABitOfEverythingService_PostWithEmptyBody_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ABitOfEverythingService_PostWithEmptyBody_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_ABitOfEverythingService_PostWithEmptyBody_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -825,6 +999,12 @@ var ( pattern_ABitOfEverythingService_DeepPathEcho_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v1", "example", "a_bit_of_everything", "single_nested.name"}, "")) pattern_ABitOfEverythingService_Timeout_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v2", "example", "timeout"}, "")) + + pattern_ABitOfEverythingService_ErrorWithDetails_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v2", "example", "errorwithdetails"}, "")) + + pattern_ABitOfEverythingService_GetMessageWithBody_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v2", "example", "withbody", "id"}, "")) + + pattern_ABitOfEverythingService_PostWithEmptyBody_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v2", "example", "postwithemptybody", "name"}, "")) ) var ( @@ -849,4 +1029,88 @@ var ( forward_ABitOfEverythingService_DeepPathEcho_0 = runtime.ForwardResponseMessage forward_ABitOfEverythingService_Timeout_0 = runtime.ForwardResponseMessage + + forward_ABitOfEverythingService_ErrorWithDetails_0 = runtime.ForwardResponseMessage + + forward_ABitOfEverythingService_GetMessageWithBody_0 = runtime.ForwardResponseMessage + + forward_ABitOfEverythingService_PostWithEmptyBody_0 = runtime.ForwardResponseMessage +) + +// RegisterCamelCaseServiceNameHandlerFromEndpoint is same as RegisterCamelCaseServiceNameHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterCamelCaseServiceNameHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterCamelCaseServiceNameHandler(ctx, mux, conn) +} + +// RegisterCamelCaseServiceNameHandler registers the http handlers for service CamelCaseServiceName to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterCamelCaseServiceNameHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterCamelCaseServiceNameHandlerClient(ctx, mux, NewCamelCaseServiceNameClient(conn)) +} + +// RegisterCamelCaseServiceNameHandler registers the http handlers for service CamelCaseServiceName to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "CamelCaseServiceNameClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "CamelCaseServiceNameClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "CamelCaseServiceNameClient" to call the correct interceptors. +func RegisterCamelCaseServiceNameHandlerClient(ctx context.Context, mux *runtime.ServeMux, client CamelCaseServiceNameClient) error { + + mux.Handle("GET", pattern_CamelCaseServiceName_Empty_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_CamelCaseServiceName_Empty_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_CamelCaseServiceName_Empty_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_CamelCaseServiceName_Empty_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v2", "example", "empty"}, "")) +) + +var ( + forward_CamelCaseServiceName_Empty_0 = runtime.ForwardResponseMessage ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.proto new file mode 100644 index 00000000..6eaa7d8f --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.proto @@ -0,0 +1,316 @@ +syntax = "proto3"; +option go_package = "examplepb"; +package grpc.gateway.examples.examplepb; + +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/duration.proto"; +import "examples/proto/sub/message.proto"; +import "examples/proto/sub2/message.proto"; +import "google/protobuf/timestamp.proto"; +import "protoc-gen-swagger/options/annotations.proto"; + +option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { + info: { + title: "A Bit of Everything"; + version: "1.0"; + contact: { + name: "gRPC-Gateway project"; + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + email: "none@example.com"; + }; + }; + // Overwriting host entry breaks tests, so this is not done here. + external_docs: { + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + description: "More about gRPC-Gateway"; + } + schemes: HTTP; + schemes: HTTPS; + schemes: WSS; + consumes: "application/json"; + consumes: "application/x-foo-mime"; + produces: "application/json"; + produces: "application/x-foo-mime"; + security_definitions: { + security: { + key: "BasicAuth"; + value: { + type: TYPE_BASIC; + } + } + security: { + key: "ApiKeyAuth"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "X-API-Key"; + } + } + security: { + key: "OAuth2"; + value: { + type: TYPE_OAUTH2; + flow: FLOW_ACCESS_CODE; + authorization_url: "https://example.com/oauth/authorize"; + token_url: "https://example.com/oauth/token"; + scopes: { + scope: { + key: "read"; + value: "Grants read access"; + } + scope: { + key: "write"; + value: "Grants write access"; + } + scope: { + key: "admin"; + value: "Grants read and write access to administrative information"; + } + } + } + } + } + security: { + security_requirement: { + key: "BasicAuth"; + value: {}; + } + security_requirement: { + key: "ApiKeyAuth"; + value: {}; + } + } + security: { + security_requirement: { + key: "OAuth2"; + value: { + scope: "read"; + scope: "write"; + } + } + security_requirement: { + key: "ApiKeyAuth"; + value: {}; + } + } +}; + + +// Intentionaly complicated message type to cover much features of Protobuf. +// NEXT ID: 30 +message ABitOfEverything { + option (grpc.gateway.protoc_gen_swagger.options.openapiv2_schema) = { + external_docs: { + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + description: "Find out more about ABitOfEverything"; + } + }; + + // Nested is nested type. + message Nested { + // name is nested field. + string name = 1; + uint32 amount = 2; + // DeepEnum is one or zero. + enum DeepEnum { + // FALSE is false. + FALSE = 0; + // TRUE is true. + TRUE = 1; + } + DeepEnum ok = 3; + } + Nested single_nested = 25; + + string uuid = 1; + repeated Nested nested = 2; + float float_value = 3; + double double_value = 4; + int64 int64_value = 5; + uint64 uint64_value = 6; + int32 int32_value = 7; + fixed64 fixed64_value = 8; + fixed32 fixed32_value = 9; + bool bool_value = 10; + string string_value = 11; + bytes bytes_value = 29; + uint32 uint32_value = 13; + NumericEnum enum_value = 14; + sfixed32 sfixed32_value = 15; + sfixed64 sfixed64_value = 16; + sint32 sint32_value = 17; + sint64 sint64_value = 18; + repeated string repeated_string_value = 19; + oneof oneof_value { + google.protobuf.Empty oneof_empty = 20; + string oneof_string = 21; + } + + map map_value = 22; + map mapped_string_value = 23; + map mapped_nested_value = 24; + + string nonConventionalNameValue = 26; + + google.protobuf.Timestamp timestamp_value = 27; + + // repeated enum value. it is comma-separated in query + repeated NumericEnum repeated_enum_value = 28; +} + +message Body { + string name = 1; +} + +message MessageWithBody { + string id = 1; + Body data = 2; +} + + +// NumericEnum is one or zero. +enum NumericEnum { + // ZERO means 0 + ZERO = 0; + // ONE means 1 + ONE = 1; +} + +// ABitOfEverything service is used to validate that APIs with complicated +// proto messages and URL templates are still processed correctly. +service ABitOfEverythingService { + + option (grpc.gateway.protoc_gen_swagger.options.openapiv2_tag) = { + description: "ABitOfEverythingService description -- which should not be used in place of the documentation comment!" + external_docs: { + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + description: "Find out more about EchoService"; + } + }; + + rpc Create(ABitOfEverything) returns (ABitOfEverything) { + // TODO add enum_value + option (google.api.http) = { + post: "/v1/example/a_bit_of_everything/{float_value}/{double_value}/{int64_value}/separator/{uint64_value}/{int32_value}/{fixed64_value}/{fixed32_value}/{bool_value}/{string_value=strprefix/*}/{uint32_value}/{sfixed32_value}/{sfixed64_value}/{sint32_value}/{sint64_value}/{nonConventionalNameValue}" + }; + } + rpc CreateBody(ABitOfEverything) returns (ABitOfEverything) { + option (google.api.http) = { + post: "/v1/example/a_bit_of_everything" + body: "*" + }; + } + rpc Lookup(sub2.IdMessage) returns (ABitOfEverything) { + option (google.api.http) = { + get: "/v1/example/a_bit_of_everything/{uuid}" + }; + } + rpc Update(ABitOfEverything) returns (google.protobuf.Empty) { + option (google.api.http) = { + put: "/v1/example/a_bit_of_everything/{uuid}" + body: "*" + }; + } + rpc Delete(sub2.IdMessage) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/example/a_bit_of_everything/{uuid}" + }; + option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { + security: { + security_requirement: { + key: "ApiKeyAuth"; + value: {} + } + security_requirement: { + key: "OAuth2"; + value: { + scope: "read"; + scope: "write"; + } + } + } + }; + } + rpc GetQuery(ABitOfEverything) returns (google.protobuf.Empty) { + option (google.api.http) = { + get: "/v1/example/a_bit_of_everything/query/{uuid}" + }; + option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { + deprecated: true // For testing purposes. + external_docs: { + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + description: "Find out more about GetQuery"; + } + }; + } + // Echo allows posting a StringMessage value. + // + // It also exposes multiple bindings. + // + // This makes it useful when validating that the OpenAPI v2 API + // description exposes documentation correctly on all paths + // defined as additional_bindings in the proto. + rpc Echo(grpc.gateway.examples.sub.StringMessage) returns (grpc.gateway.examples.sub.StringMessage) { + option (google.api.http) = { + get: "/v1/example/a_bit_of_everything/echo/{value}" + additional_bindings { + post: "/v2/example/echo" + body: "value" + } + additional_bindings { + get: "/v2/example/echo" + } + }; + option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { + description: "Description Echo"; + summary: "Summary: Echo rpc"; + tags: "echo service"; + tags: "echo rpc"; + external_docs: { + url: "https://github.com/grpc-ecosystem/grpc-gateway"; + description: "Find out more Echo"; + } + }; + } + rpc DeepPathEcho(ABitOfEverything) returns (ABitOfEverything) { + option (google.api.http) = { + post: "/v1/example/a_bit_of_everything/{single_nested.name}" + body: "*" + }; + } + rpc NoBindings(google.protobuf.Duration) returns (google.protobuf.Empty) {} + rpc Timeout(google.protobuf.Empty) returns (google.protobuf.Empty) { + option (google.api.http) = { + get: "/v2/example/timeout", + }; + } + rpc ErrorWithDetails(google.protobuf.Empty) returns (google.protobuf.Empty) { + option (google.api.http) = { + get: "/v2/example/errorwithdetails", + }; + } + rpc GetMessageWithBody(MessageWithBody) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/example/withbody/{id}", + body: "data" + }; + } + rpc PostWithEmptyBody(Body) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/example/postwithemptybody/{name}", + body: "*" + }; + } +} +// camelCase and lowercase service names are valid but not recommended (use TitleCase instead) +service camelCaseServiceName { + rpc Empty(google.protobuf.Empty) returns (google.protobuf.Empty) { + option (google.api.http) = { + get: "/v2/example/empty", + }; + } +} +service AnotherServiceWithNoBindings { + rpc NoBindings(google.protobuf.Empty) returns (google.protobuf.Empty) {} +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.swagger.json b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.swagger.json similarity index 77% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.swagger.json rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.swagger.json index 052b256b..aa7f2479 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/a_bit_of_everything.swagger.json +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/a_bit_of_everything.swagger.json @@ -1,18 +1,26 @@ { "swagger": "2.0", "info": { - "title": "examples/examplepb/a_bit_of_everything.proto", - "version": "version not set" + "title": "A Bit of Everything", + "version": "1.0", + "contact": { + "name": "gRPC-Gateway project", + "url": "https://github.com/grpc-ecosystem/grpc-gateway", + "email": "none@example.com" + } }, "schemes": [ "http", - "https" + "https", + "wss" ], "consumes": [ - "application/json" + "application/json", + "application/x-foo-mime" ], "produces": [ - "application/json" + "application/json", + "application/x-foo-mime" ], "paths": { "/v1/example/a_bit_of_everything": { @@ -43,6 +51,8 @@ }, "/v1/example/a_bit_of_everything/echo/{value}": { "get": { + "summary": "Summary: Echo rpc", + "description": "Description Echo", "operationId": "Echo", "responses": { "200": { @@ -61,8 +71,13 @@ } ], "tags": [ - "ABitOfEverythingService" - ] + "echo service", + "echo rpc" + ], + "externalDocs": { + "description": "Find out more Echo", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" + } } }, "/v1/example/a_bit_of_everything/query/{uuid}": { @@ -171,9 +186,15 @@ "required": false, "type": "string" }, + { + "name": "bytes_value", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, { "name": "uint32_value", - "description": "TODO(yugui) add bytes_value.", "in": "query", "required": false, "type": "integer", @@ -264,7 +285,12 @@ ], "tags": [ "ABitOfEverythingService" - ] + ], + "deprecated": true, + "externalDocs": { + "description": "Find out more about GetQuery", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" + } } }, "/v1/example/a_bit_of_everything/{float_value}/{double_value}/{int64_value}/separator/{uint64_value}/{int32_value}/{fixed64_value}/{fixed32_value}/{bool_value}/{string_value}/{uint32_value}/{sfixed32_value}/{sfixed64_value}/{sint32_value}/{sint64_value}/{nonConventionalNameValue}": { @@ -463,6 +489,15 @@ ], "tags": [ "ABitOfEverythingService" + ], + "security": [ + { + "ApiKeyAuth": [], + "OAuth2": [ + "read", + "write" + ] + } ] }, "put": { @@ -498,7 +533,9 @@ }, "/v2/example/echo": { "get": { - "operationId": "Echo", + "summary": "Summary: Echo rpc", + "description": "Description Echo", + "operationId": "Echo3", "responses": { "200": { "description": "", @@ -516,11 +553,18 @@ } ], "tags": [ - "ABitOfEverythingService" - ] + "echo service", + "echo rpc" + ], + "externalDocs": { + "description": "Find out more Echo", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" + } }, "post": { - "operationId": "Echo", + "summary": "Summary: Echo rpc", + "description": "Description Echo", + "operationId": "Echo2", "responses": { "200": { "description": "", @@ -539,6 +583,75 @@ } } ], + "tags": [ + "echo service", + "echo rpc" + ], + "externalDocs": { + "description": "Find out more Echo", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" + } + } + }, + "/v2/example/empty": { + "get": { + "operationId": "Empty", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "tags": [ + "camelCaseServiceName" + ] + } + }, + "/v2/example/errorwithdetails": { + "get": { + "operationId": "ErrorWithDetails", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "tags": [ + "ABitOfEverythingService" + ] + } + }, + "/v2/example/postwithemptybody/{name}": { + "post": { + "operationId": "PostWithEmptyBody", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "parameters": [ + { + "name": "name", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/examplepbBody" + } + } + ], "tags": [ "ABitOfEverythingService" ] @@ -559,6 +672,38 @@ "ABitOfEverythingService" ] } + }, + "/v2/example/withbody/{id}": { + "post": { + "operationId": "GetMessageWithBody", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/protobufEmpty" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/examplepbBody" + } + } + ], + "tags": [ + "ABitOfEverythingService" + ] + } } }, "definitions": { @@ -638,10 +783,13 @@ "string_value": { "type": "string" }, + "bytes_value": { + "type": "string", + "format": "byte" + }, "uint32_value": { "type": "integer", - "format": "int64", - "title": "TODO(yugui) add bytes_value" + "format": "int64" }, "enum_value": { "$ref": "#/definitions/examplepbNumericEnum" @@ -707,7 +855,19 @@ "title": "repeated enum value. it is comma-separated in query" } }, - "title": "Intentionaly complicated message type to cover much features of Protobuf.\nNEXT ID: 27" + "title": "Intentionaly complicated message type to cover much features of Protobuf.\nNEXT ID: 30", + "externalDocs": { + "description": "Find out more about ABitOfEverything", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" + } + }, + "examplepbBody": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + } }, "examplepbNumericEnum": { "type": "string", @@ -731,5 +891,43 @@ } } } + }, + "securityDefinitions": { + "ApiKeyAuth": { + "type": "apiKey", + "name": "X-API-Key", + "in": "header" + }, + "BasicAuth": { + "type": "basic" + }, + "OAuth2": { + "type": "oauth2", + "flow": "accessCode", + "authorizationUrl": "https://example.com/oauth/authorize", + "tokenUrl": "https://example.com/oauth/token", + "scopes": { + "admin": "Grants read and write access to administrative information", + "read": "Grants read access", + "write": "Grants write access" + } + } + }, + "security": [ + { + "ApiKeyAuth": [], + "BasicAuth": [] + }, + { + "ApiKeyAuth": [], + "OAuth2": [ + "read", + "write" + ] + } + ], + "externalDocs": { + "description": "More about gRPC-Gateway", + "url": "https://github.com/grpc-ecosystem/grpc-gateway" } } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.go similarity index 59% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.go index 44775837..a5d16e85 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/examplepb/echo_service.proto +// source: examples/proto/examplepb/echo_service.proto /* Package examplepb is a generated protocol buffer package. @@ -10,19 +10,23 @@ Echo Service API consists of a single service which returns a message. It is generated from these files: - examples/examplepb/echo_service.proto - examples/examplepb/a_bit_of_everything.proto - examples/examplepb/stream.proto - examples/examplepb/flow_combination.proto + examples/proto/examplepb/echo_service.proto + examples/proto/examplepb/a_bit_of_everything.proto + examples/proto/examplepb/stream.proto + examples/proto/examplepb/flow_combination.proto + examples/proto/examplepb/wrappers.proto It has these top-level messages: SimpleMessage ABitOfEverything + Body + MessageWithBody EmptyProto NonEmptyProto UnaryProto NestedProto SingleNestedProto + Wrappers */ package examplepb @@ -50,7 +54,8 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // SimpleMessage represents a simple message sent to the Echo service. type SimpleMessage struct { // Id represents the message identifier. - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Num int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` } func (m *SimpleMessage) Reset() { *m = SimpleMessage{} } @@ -65,6 +70,13 @@ func (m *SimpleMessage) GetId() string { return "" } +func (m *SimpleMessage) GetNum() int64 { + if m != nil { + return m.Num + } + return 0 +} + func init() { proto.RegisterType((*SimpleMessage)(nil), "grpc.gateway.examples.examplepb.SimpleMessage") } @@ -87,6 +99,8 @@ type EchoServiceClient interface { Echo(ctx context.Context, in *SimpleMessage, opts ...grpc.CallOption) (*SimpleMessage, error) // EchoBody method receives a simple message and returns it. EchoBody(ctx context.Context, in *SimpleMessage, opts ...grpc.CallOption) (*SimpleMessage, error) + // EchoDelete method receives a simple message and returns it. + EchoDelete(ctx context.Context, in *SimpleMessage, opts ...grpc.CallOption) (*SimpleMessage, error) } type echoServiceClient struct { @@ -115,6 +129,15 @@ func (c *echoServiceClient) EchoBody(ctx context.Context, in *SimpleMessage, opt return out, nil } +func (c *echoServiceClient) EchoDelete(ctx context.Context, in *SimpleMessage, opts ...grpc.CallOption) (*SimpleMessage, error) { + out := new(SimpleMessage) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.EchoService/EchoDelete", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // Server API for EchoService service type EchoServiceServer interface { @@ -125,6 +148,8 @@ type EchoServiceServer interface { Echo(context.Context, *SimpleMessage) (*SimpleMessage, error) // EchoBody method receives a simple message and returns it. EchoBody(context.Context, *SimpleMessage) (*SimpleMessage, error) + // EchoDelete method receives a simple message and returns it. + EchoDelete(context.Context, *SimpleMessage) (*SimpleMessage, error) } func RegisterEchoServiceServer(s *grpc.Server, srv EchoServiceServer) { @@ -167,6 +192,24 @@ func _EchoService_EchoBody_Handler(srv interface{}, ctx context.Context, dec fun return interceptor(ctx, in, info, handler) } +func _EchoService_EchoDelete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleMessage) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EchoServiceServer).EchoDelete(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.EchoService/EchoDelete", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EchoServiceServer).EchoDelete(ctx, req.(*SimpleMessage)) + } + return interceptor(ctx, in, info, handler) +} + var _EchoService_serviceDesc = grpc.ServiceDesc{ ServiceName: "grpc.gateway.examples.examplepb.EchoService", HandlerType: (*EchoServiceServer)(nil), @@ -179,28 +222,35 @@ var _EchoService_serviceDesc = grpc.ServiceDesc{ MethodName: "EchoBody", Handler: _EchoService_EchoBody_Handler, }, + { + MethodName: "EchoDelete", + Handler: _EchoService_EchoDelete_Handler, + }, }, Streams: []grpc.StreamDesc{}, - Metadata: "examples/examplepb/echo_service.proto", + Metadata: "examples/proto/examplepb/echo_service.proto", } -func init() { proto.RegisterFile("examples/examplepb/echo_service.proto", fileDescriptor0) } +func init() { proto.RegisterFile("examples/proto/examplepb/echo_service.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 229 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4d, 0xad, 0x48, 0xcc, - 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x87, 0x32, 0x0a, 0x92, 0xf4, 0x53, 0x93, 0x33, 0xf2, 0xe3, 0x8b, - 0x53, 0x8b, 0xca, 0x32, 0x93, 0x53, 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0xe4, 0xd3, 0x8b, - 0x0a, 0x92, 0xf5, 0xd2, 0x13, 0x4b, 0x52, 0xcb, 0x13, 0x2b, 0xf5, 0x60, 0x7a, 0xf4, 0xe0, 0x7a, - 0xa4, 0x64, 0xd2, 0xf3, 0xf3, 0xd3, 0x73, 0x52, 0xf5, 0x13, 0x0b, 0x32, 0xf5, 0x13, 0xf3, 0xf2, - 0xf2, 0x4b, 0x12, 0x4b, 0x32, 0xf3, 0xf3, 0x8a, 0x21, 0xda, 0x95, 0xe4, 0xb9, 0x78, 0x83, 0x33, - 0x41, 0x2a, 0x7d, 0x53, 0x8b, 0x8b, 0x13, 0xd3, 0x53, 0x85, 0xf8, 0xb8, 0x98, 0x32, 0x53, 0x24, - 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0x98, 0x32, 0x53, 0x8c, 0x96, 0x30, 0x71, 0x71, 0xbb, 0x26, - 0x67, 0xe4, 0x07, 0x43, 0x6c, 0x15, 0x6a, 0x65, 0xe4, 0x62, 0x01, 0xf1, 0x85, 0xf4, 0xf4, 0x08, - 0xd8, 0xac, 0x87, 0x62, 0xb0, 0x14, 0x89, 0xea, 0x95, 0x64, 0x9b, 0x2e, 0x3f, 0x99, 0xcc, 0x24, - 0xae, 0x24, 0xaa, 0x5f, 0x66, 0x08, 0x0b, 0x02, 0x70, 0x00, 0xe8, 0x57, 0x67, 0xa6, 0xd4, 0x0a, - 0xf5, 0x30, 0x72, 0x71, 0x80, 0xdc, 0xe1, 0x94, 0x9f, 0x52, 0x49, 0x73, 0xb7, 0x28, 0x80, 0xdd, - 0x22, 0x85, 0xe9, 0x96, 0xf8, 0xa4, 0xfc, 0x94, 0x4a, 0x2b, 0x46, 0x2d, 0x27, 0xee, 0x28, 0x4e, - 0xb8, 0xe6, 0x24, 0x36, 0x70, 0xd8, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x26, 0x96, 0x37, - 0xac, 0xc3, 0x01, 0x00, 0x00, + // 288 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4e, 0xad, 0x48, 0xcc, + 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0x87, 0x72, 0x0b, 0x92, 0xf4, + 0x53, 0x93, 0x33, 0xf2, 0xe3, 0x8b, 0x53, 0x8b, 0xca, 0x32, 0x93, 0x53, 0xf5, 0xc0, 0x92, 0x42, + 0xf2, 0xe9, 0x45, 0x05, 0xc9, 0x7a, 0xe9, 0x89, 0x25, 0xa9, 0xe5, 0x89, 0x95, 0x7a, 0x30, 0x9d, + 0x7a, 0x70, 0x3d, 0x52, 0x32, 0xe9, 0xf9, 0xf9, 0xe9, 0x39, 0xa9, 0xfa, 0x89, 0x05, 0x99, 0xfa, + 0x89, 0x79, 0x79, 0xf9, 0x25, 0x89, 0x25, 0x99, 0xf9, 0x79, 0xc5, 0x10, 0xed, 0x4a, 0x86, 0x5c, + 0xbc, 0xc1, 0x99, 0x20, 0x95, 0xbe, 0xa9, 0xc5, 0xc5, 0x89, 0xe9, 0xa9, 0x42, 0x7c, 0x5c, 0x4c, + 0x99, 0x29, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x4c, 0x99, 0x29, 0x42, 0x02, 0x5c, 0xcc, + 0x79, 0xa5, 0xb9, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x20, 0xa6, 0xd1, 0x65, 0x66, 0x2e, + 0x6e, 0xd7, 0xe4, 0x8c, 0xfc, 0x60, 0x88, 0x3b, 0x84, 0x96, 0x30, 0x72, 0xb1, 0x80, 0xf8, 0x42, + 0x7a, 0x7a, 0x04, 0xdc, 0xa2, 0x87, 0x62, 0x95, 0x14, 0x89, 0xea, 0x95, 0x6c, 0x9a, 0x2e, 0x3f, + 0x99, 0xcc, 0x64, 0xa6, 0x24, 0xaa, 0x5f, 0x66, 0x08, 0x0b, 0x14, 0x70, 0x90, 0xe8, 0x57, 0x67, + 0xa6, 0xd4, 0x46, 0xc9, 0x0a, 0x49, 0x63, 0x95, 0xd0, 0xaf, 0xce, 0x2b, 0xcd, 0xad, 0x15, 0xea, + 0x61, 0xe4, 0xe2, 0x00, 0x39, 0xd3, 0x29, 0x3f, 0xa5, 0x92, 0xe6, 0x4e, 0x55, 0x00, 0x3b, 0x55, + 0x0a, 0xd3, 0xa9, 0xf1, 0x49, 0xf9, 0x29, 0x95, 0x56, 0x8c, 0x5a, 0x42, 0xbd, 0x8c, 0x5c, 0x5c, + 0x20, 0xe7, 0xb8, 0xa4, 0xe6, 0xa4, 0x96, 0xa4, 0xd2, 0xdc, 0x41, 0xf2, 0x60, 0x07, 0x49, 0x6a, + 0x89, 0x63, 0x38, 0x28, 0x05, 0xec, 0x00, 0x27, 0xee, 0x28, 0x4e, 0xb8, 0xde, 0x24, 0x36, 0x70, + 0xe2, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x46, 0x53, 0x05, 0xe1, 0x8a, 0x02, 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.gw.go new file mode 100644 index 00000000..c281543b --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.pb.gw.go @@ -0,0 +1,309 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: examples/proto/examplepb/echo_service.proto + +/* +Package examplepb is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package examplepb + +import ( + "io" + "net/http" + + "github.com/golang/protobuf/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/utilities" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray + +var ( + filter_EchoService_Echo_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_EchoService_Echo_0(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SimpleMessage + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_EchoService_Echo_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.Echo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_EchoService_Echo_1(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SimpleMessage + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + val, ok = pathParams["num"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "num") + } + + protoReq.Num, err = runtime.Int64(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "num", err) + } + + msg, err := client.Echo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_EchoService_EchoBody_0(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SimpleMessage + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.EchoBody(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_EchoService_EchoDelete_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_EchoService_EchoDelete_0(ctx context.Context, marshaler runtime.Marshaler, client EchoServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SimpleMessage + var metadata runtime.ServerMetadata + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_EchoService_EchoDelete_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.EchoDelete(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +// RegisterEchoServiceHandlerFromEndpoint is same as RegisterEchoServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterEchoServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterEchoServiceHandler(ctx, mux, conn) +} + +// RegisterEchoServiceHandler registers the http handlers for service EchoService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterEchoServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterEchoServiceHandlerClient(ctx, mux, NewEchoServiceClient(conn)) +} + +// RegisterEchoServiceHandler registers the http handlers for service EchoService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "EchoServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "EchoServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "EchoServiceClient" to call the correct interceptors. +func RegisterEchoServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client EchoServiceClient) error { + + mux.Handle("POST", pattern_EchoService_Echo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_EchoService_Echo_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_EchoService_Echo_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_EchoService_Echo_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_EchoService_Echo_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_EchoService_Echo_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_EchoService_EchoBody_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_EchoService_EchoBody_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_EchoService_EchoBody_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_EchoService_EchoDelete_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_EchoService_EchoDelete_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_EchoService_EchoDelete_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_EchoService_Echo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v1", "example", "echo", "id"}, "")) + + pattern_EchoService_Echo_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"v1", "example", "echo", "id", "num"}, "")) + + pattern_EchoService_EchoBody_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "example", "echo_body"}, "")) + + pattern_EchoService_EchoDelete_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "example", "echo_delete"}, "")) +) + +var ( + forward_EchoService_Echo_0 = runtime.ForwardResponseMessage + + forward_EchoService_Echo_1 = runtime.ForwardResponseMessage + + forward_EchoService_EchoBody_0 = runtime.ForwardResponseMessage + + forward_EchoService_EchoDelete_0 = runtime.ForwardResponseMessage +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.proto similarity index 76% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.proto rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.proto index 44555e85..506917c1 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.proto +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.proto @@ -13,6 +13,7 @@ import "google/api/annotations.proto"; message SimpleMessage { // Id represents the message identifier. string id = 1; + int64 num = 2; } // Echo service responds to incoming echo requests. @@ -24,6 +25,9 @@ service EchoService { rpc Echo(SimpleMessage) returns (SimpleMessage) { option (google.api.http) = { post: "/v1/example/echo/{id}" + additional_bindings { + get: "/v1/example/echo/{id}/{num}" + } }; } // EchoBody method receives a simple message and returns it. @@ -33,4 +37,10 @@ service EchoService { body: "*" }; } + // EchoDelete method receives a simple message and returns it. + rpc EchoDelete(SimpleMessage) returns (SimpleMessage) { + option (google.api.http) = { + delete: "/v1/example/echo_delete" + }; + } } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.swagger.json b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.swagger.json similarity index 53% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.swagger.json rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.swagger.json index 9380472a..f76f9289 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/echo_service.swagger.json +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/echo_service.swagger.json @@ -42,6 +42,39 @@ ] } }, + "/v1/example/echo/{id}/{num}": { + "get": { + "summary": "Echo method receives a simple message and returns it.", + "description": "The message posted as the id parameter will also be\nreturned.", + "operationId": "Echo2", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/examplepbSimpleMessage" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "num", + "in": "path", + "required": true, + "type": "string", + "format": "int64" + } + ], + "tags": [ + "EchoService" + ] + } + }, "/v1/example/echo_body": { "post": { "summary": "EchoBody method receives a simple message and returns it.", @@ -68,6 +101,39 @@ "EchoService" ] } + }, + "/v1/example/echo_delete": { + "delete": { + "summary": "EchoDelete method receives a simple message and returns it.", + "operationId": "EchoDelete", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/examplepbSimpleMessage" + } + } + }, + "parameters": [ + { + "name": "id", + "description": "Id represents the message identifier.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "num", + "in": "query", + "required": false, + "type": "string", + "format": "int64" + } + ], + "tags": [ + "EchoService" + ] + } } }, "definitions": { @@ -77,6 +143,10 @@ "id": { "type": "string", "description": "Id represents the message identifier." + }, + "num": { + "type": "string", + "format": "int64" } }, "description": "SimpleMessage represents a simple message sent to the Echo service." diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.go similarity index 83% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.go index 421c9a87..66e6193c 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/examplepb/flow_combination.proto +// source: examples/proto/examplepb/flow_combination.proto package examplepb @@ -671,52 +671,52 @@ var _FlowCombination_serviceDesc = grpc.ServiceDesc{ ServerStreams: true, }, }, - Metadata: "examples/examplepb/flow_combination.proto", + Metadata: "examples/proto/examplepb/flow_combination.proto", } -func init() { proto.RegisterFile("examples/examplepb/flow_combination.proto", fileDescriptor3) } +func init() { proto.RegisterFile("examples/proto/examplepb/flow_combination.proto", fileDescriptor3) } var fileDescriptor3 = []byte{ - // 656 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x96, 0x3f, 0x8f, 0x12, 0x4f, - 0x18, 0xc7, 0xf3, 0x70, 0xc9, 0x2f, 0xb9, 0xe1, 0xfe, 0x70, 0xcb, 0x2f, 0x08, 0x1c, 0x1e, 0x77, - 0xe3, 0x25, 0xe2, 0xbf, 0x5d, 0x82, 0xd5, 0x51, 0x9e, 0xd1, 0x92, 0x5c, 0xb8, 0xd8, 0x6c, 0x63, - 0x66, 0x87, 0x15, 0x48, 0x60, 0x67, 0x6e, 0x77, 0x0d, 0x5e, 0x08, 0x31, 0xb1, 0xb1, 0xb4, 0xf0, - 0x05, 0x58, 0x5a, 0xf9, 0x06, 0xec, 0xac, 0x6c, 0x4c, 0x2c, 0x4c, 0xec, 0xec, 0xec, 0x7c, 0x13, - 0x66, 0x67, 0x66, 0x77, 0x58, 0x05, 0x37, 0x18, 0xb1, 0xdb, 0x99, 0x79, 0x9e, 0x67, 0x3e, 0xf3, - 0x7d, 0xbe, 0x0f, 0x01, 0xdd, 0x70, 0x9f, 0x92, 0x31, 0x1f, 0xb9, 0x81, 0xa5, 0x3e, 0xb8, 0x63, - 0x3d, 0x1e, 0xb1, 0xc9, 0x23, 0xca, 0xc6, 0xce, 0xd0, 0x23, 0xe1, 0x90, 0x79, 0x26, 0xf7, 0x59, - 0xc8, 0x8c, 0x7a, 0xdf, 0xe7, 0xd4, 0xec, 0x93, 0xd0, 0x9d, 0x90, 0x4b, 0x33, 0xce, 0x33, 0x93, - 0xbc, 0x6a, 0xad, 0xcf, 0x58, 0x7f, 0xe4, 0x5a, 0x84, 0x0f, 0x2d, 0xe2, 0x79, 0x2c, 0x14, 0xd9, - 0x81, 0x4c, 0xc7, 0x5b, 0x08, 0xdd, 0x1f, 0xf3, 0xf0, 0xf2, 0x4c, 0xac, 0x4e, 0xd0, 0x76, 0x87, - 0x79, 0x7a, 0xc3, 0xd8, 0x42, 0x40, 0xca, 0x70, 0x08, 0x8d, 0xcd, 0x2e, 0x90, 0x68, 0xe5, 0x94, - 0x73, 0x72, 0xe5, 0x44, 0x2b, 0x5a, 0xde, 0x90, 0x2b, 0x8a, 0x0f, 0x10, 0x7a, 0xe8, 0x11, 0x5f, - 0xe5, 0x15, 0xd0, 0x46, 0x10, 0xfa, 0x2a, 0x33, 0xfa, 0xc4, 0x3d, 0x94, 0xef, 0xb8, 0x41, 0xe8, - 0xf6, 0x64, 0xc0, 0x49, 0x5c, 0x38, 0xdf, 0xba, 0x65, 0x66, 0x3c, 0xc1, 0xd4, 0x85, 0xb3, 0x28, - 0x3a, 0x68, 0xef, 0x7c, 0xe8, 0xf5, 0x47, 0xee, 0xdf, 0xb9, 0xab, 0xf5, 0x71, 0x17, 0xed, 0x3e, - 0x18, 0xb1, 0xc9, 0x3d, 0xad, 0xbb, 0xf1, 0x0c, 0xe5, 0xbb, 0x9c, 0x0a, 0x91, 0xba, 0x9c, 0x1a, - 0xd9, 0x25, 0xb5, 0x9e, 0xd5, 0x55, 0x82, 0x71, 0xe9, 0xf9, 0xe7, 0x6f, 0xaf, 0x72, 0x05, 0xbc, - 0x63, 0xf9, 0x9c, 0x5a, 0x6e, 0x74, 0x10, 0x7d, 0x19, 0x2f, 0x00, 0xed, 0xc4, 0x04, 0xe7, 0xa1, - 0xef, 0x92, 0xf1, 0x1a, 0x21, 0x2a, 0x02, 0xa2, 0x88, 0xf7, 0xe6, 0x20, 0x02, 0x71, 0x69, 0x13, - 0x04, 0x89, 0x24, 0xf8, 0x07, 0x72, 0x68, 0x12, 0x79, 0xbf, 0x56, 0xa4, 0x01, 0xc6, 0x4b, 0x40, - 0x7b, 0x73, 0x24, 0x6b, 0x97, 0xa5, 0x26, 0x60, 0x4a, 0xf8, 0xff, 0x34, 0x8c, 0x5c, 0x34, 0xa0, - 0x09, 0xc6, 0xdb, 0x1c, 0x42, 0x5d, 0x4e, 0x4f, 0x59, 0x4f, 0xe8, 0x62, 0x66, 0x56, 0x4f, 0x4d, - 0xde, 0x6a, 0x34, 0xef, 0x41, 0xe0, 0xbc, 0x03, 0xbc, 0x2d, 0xda, 0xe4, 0xb0, 0x9e, 0x10, 0xa6, - 0x0d, 0x37, 0xed, 0x7d, 0x5c, 0x11, 0x7b, 0x9c, 0x84, 0x03, 0x6b, 0x4a, 0x66, 0xd6, 0xd4, 0x99, - 0x59, 0x53, 0x3a, 0x8b, 0x36, 0xed, 0xd8, 0x5c, 0x17, 0x4f, 0x5c, 0x5f, 0x64, 0xd8, 0x75, 0x5c, - 0xd5, 0x25, 0x52, 0x39, 0xa2, 0x1e, 0xb5, 0xcb, 0xb8, 0xa8, 0x03, 0x92, 0xbc, 0xe8, 0xe4, 0x08, - 0xd7, 0x16, 0xa4, 0xa6, 0x42, 0x2a, 0xf8, 0x4a, 0x1a, 0x26, 0x39, 0x35, 0x5e, 0x03, 0x2a, 0x75, - 0x39, 0x3d, 0x23, 0xe1, 0x60, 0x7e, 0x84, 0x23, 0xed, 0x5a, 0x99, 0x5a, 0xfc, 0x32, 0xf4, 0xab, - 0xe9, 0x77, 0x2c, 0xe4, 0x3b, 0x50, 0xfc, 0x11, 0xdc, 0x1d, 0x4f, 0xd4, 0xb2, 0xa6, 0xc4, 0x0c, - 0x42, 0x5f, 0x3c, 0xde, 0xf8, 0x0a, 0xa8, 0xa0, 0x08, 0x35, 0xdb, 0xed, 0xec, 0xbe, 0xfe, 0x29, - 0x95, 0x27, 0xa8, 0x06, 0xf8, 0x70, 0x29, 0xd5, 0x5c, 0x5b, 0x32, 0xe0, 0x93, 0xe6, 0x2c, 0x39, - 0x6f, 0x03, 0x35, 0x3e, 0xe4, 0xd0, 0xb6, 0x72, 0xac, 0x9a, 0x9f, 0xb5, 0x9a, 0xf6, 0x8b, 0x34, - 0xed, 0x27, 0xc0, 0x05, 0x6d, 0x1b, 0x39, 0x40, 0x91, 0x6f, 0xe7, 0x1f, 0x94, 0xf2, 0xad, 0x0c, - 0xb1, 0xe3, 0x9f, 0x24, 0xe9, 0x20, 0xb5, 0x89, 0xf1, 0xd5, 0x25, 0xee, 0x8d, 0x0b, 0x53, 0x7b, - 0x1f, 0x97, 0x7e, 0x36, 0xb0, 0x3e, 0x3c, 0xc6, 0xf5, 0xa5, 0x1e, 0xd6, 0x51, 0x35, 0x35, 0x24, - 0x0b, 0x03, 0x9a, 0x60, 0xbc, 0x01, 0x54, 0x59, 0xe0, 0x65, 0xa5, 0xea, 0xda, 0xed, 0x7c, 0x5d, - 0x08, 0x7b, 0xa4, 0x9e, 0xb2, 0xa8, 0xe3, 0x09, 0xe9, 0x77, 0x40, 0xc5, 0x94, 0xa7, 0x15, 0xe3, - 0x1a, 0x6d, 0x3d, 0x11, 0x74, 0x17, 0xf8, 0xda, 0x6f, 0x6d, 0xad, 0xc5, 0xce, 0x7e, 0x47, 0xd2, - 0xb5, 0xe5, 0x21, 0x6d, 0xa0, 0x4d, 0x38, 0xcd, 0xdb, 0x9b, 0x09, 0x92, 0xf3, 0x9f, 0xf8, 0x07, - 0x74, 0xf7, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0xaf, 0x85, 0xaf, 0x3c, 0x6d, 0x09, 0x00, 0x00, + // 655 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x96, 0xbf, 0x8f, 0x12, 0x41, + 0x14, 0xc7, 0xf3, 0xb8, 0xc4, 0xe4, 0x86, 0xfb, 0xc1, 0x2d, 0x06, 0x81, 0xc3, 0xe3, 0x6e, 0xbc, + 0x44, 0xe2, 0x8f, 0x5d, 0x82, 0xd5, 0x51, 0x9e, 0xd1, 0x92, 0x5c, 0xb8, 0xd8, 0x6c, 0x63, 0x66, + 0x87, 0x15, 0x48, 0x60, 0x67, 0x6e, 0x77, 0x0d, 0x5e, 0x08, 0x31, 0xb1, 0xb1, 0xb4, 0xf0, 0x0f, + 0xb0, 0xb4, 0xf2, 0x1f, 0xb0, 0xb3, 0xb2, 0x31, 0xb1, 0x30, 0xb1, 0xb3, 0xb3, 0xf3, 0x9f, 0x30, + 0xfb, 0xf6, 0xc7, 0xb0, 0x0a, 0x6e, 0x30, 0x62, 0xb7, 0x6f, 0xe6, 0xbd, 0x37, 0x9f, 0xf9, 0xce, + 0xf7, 0x11, 0x88, 0x61, 0x3f, 0x63, 0x63, 0x39, 0xb2, 0x3d, 0x43, 0xba, 0xc2, 0x17, 0x71, 0x28, + 0x2d, 0xe3, 0xc9, 0x48, 0x4c, 0x1e, 0x73, 0x31, 0xb6, 0x86, 0x0e, 0xf3, 0x87, 0xc2, 0xd1, 0x31, + 0x41, 0xab, 0xf7, 0x5d, 0xc9, 0xf5, 0x3e, 0xf3, 0xed, 0x09, 0xbb, 0xd4, 0xe3, 0x6a, 0x3d, 0xa9, + 0xab, 0xd6, 0xfa, 0x42, 0xf4, 0x47, 0xb6, 0xc1, 0xe4, 0xd0, 0x60, 0x8e, 0x23, 0x7c, 0xac, 0xf6, + 0xc2, 0x72, 0xba, 0x45, 0xc8, 0x83, 0xb1, 0xf4, 0x2f, 0xcf, 0x30, 0x3a, 0x21, 0xdb, 0x1d, 0xe1, + 0xa8, 0x05, 0x6d, 0x8b, 0x00, 0x2b, 0xc3, 0x21, 0x34, 0x36, 0xbb, 0xc0, 0x82, 0xc8, 0x2a, 0xe7, + 0xc2, 0xc8, 0x0a, 0x22, 0x5e, 0xde, 0x08, 0x23, 0x4e, 0x0f, 0x08, 0x79, 0xe4, 0x30, 0x37, 0xaa, + 0x2b, 0x90, 0x0d, 0xcf, 0x77, 0xa3, 0xca, 0xe0, 0x93, 0xf6, 0x48, 0xbe, 0x63, 0x7b, 0xbe, 0xdd, + 0x0b, 0x13, 0x4e, 0xe2, 0xc6, 0xf9, 0xd6, 0x6d, 0x3d, 0xe3, 0x0a, 0xba, 0x6a, 0x9c, 0x45, 0xd1, + 0x21, 0x7b, 0xe7, 0x43, 0xa7, 0x3f, 0xb2, 0xff, 0xcd, 0x59, 0xad, 0x4f, 0xbb, 0x64, 0xf7, 0xe1, + 0x48, 0x4c, 0xee, 0x2b, 0xdd, 0xb5, 0xe7, 0x24, 0xdf, 0x95, 0x1c, 0x45, 0xea, 0x4a, 0xae, 0x65, + 0xb7, 0x54, 0x7a, 0x56, 0x57, 0x49, 0xa6, 0xa5, 0x17, 0x5f, 0xbe, 0xbf, 0xce, 0x15, 0xe8, 0x8e, + 0xe1, 0x4a, 0x6e, 0xd8, 0xc1, 0x46, 0xf0, 0xa5, 0xbd, 0x04, 0xb2, 0x13, 0x13, 0x9c, 0xfb, 0xae, + 0xcd, 0xc6, 0x6b, 0x84, 0xa8, 0x20, 0x44, 0x91, 0xee, 0xcd, 0x41, 0x78, 0x78, 0x68, 0x13, 0x90, + 0x24, 0x24, 0xf8, 0x0f, 0x72, 0x28, 0x92, 0xf0, 0x7c, 0xa5, 0x48, 0x03, 0xb4, 0x57, 0x40, 0xf6, + 0xe6, 0x48, 0xd6, 0x2e, 0x4b, 0x0d, 0x61, 0x4a, 0xf4, 0x6a, 0x1a, 0x26, 0x0c, 0x1a, 0xd0, 0x04, + 0xed, 0x5d, 0x8e, 0x90, 0xae, 0xe4, 0xa7, 0xa2, 0x87, 0xba, 0xe8, 0x99, 0xdd, 0x53, 0x93, 0xb7, + 0x1a, 0xcd, 0x07, 0x40, 0x9c, 0xf7, 0x40, 0xb7, 0xf1, 0x99, 0x2c, 0xd1, 0x43, 0x61, 0xda, 0x70, + 0xcb, 0xdc, 0xa7, 0x15, 0x5c, 0x93, 0xcc, 0x1f, 0x18, 0x53, 0x36, 0x33, 0xa6, 0xd6, 0xcc, 0x98, + 0xf2, 0x59, 0xb0, 0x68, 0xc6, 0xe6, 0xba, 0x78, 0x6a, 0xbb, 0x58, 0x61, 0xd6, 0x69, 0x55, 0xb5, + 0x48, 0xd5, 0x60, 0x3f, 0x6e, 0x96, 0x69, 0x51, 0x25, 0x24, 0x75, 0xc1, 0xce, 0x11, 0xad, 0x2d, + 0x28, 0x4d, 0xa5, 0x54, 0xe8, 0xb5, 0x34, 0x4c, 0xb2, 0xab, 0xbd, 0x01, 0x52, 0xea, 0x4a, 0x7e, + 0xc6, 0xfc, 0xc1, 0xfc, 0x08, 0x07, 0xda, 0xb5, 0x32, 0xb5, 0xf8, 0x6d, 0xe8, 0x57, 0xd3, 0xef, + 0x18, 0xe5, 0x3b, 0x88, 0xf8, 0x03, 0xb8, 0xbb, 0x0e, 0xf6, 0x32, 0xa6, 0x4c, 0xf7, 0x7c, 0x17, + 0x2f, 0xaf, 0x7d, 0x03, 0x52, 0x88, 0x08, 0x15, 0xdb, 0x9d, 0xec, 0x77, 0xfd, 0x5b, 0x2a, 0x07, + 0xa9, 0x06, 0xf4, 0x70, 0x29, 0xd5, 0xdc, 0xb3, 0x64, 0xc0, 0x27, 0x8f, 0xb3, 0x64, 0xbf, 0x0d, + 0x5c, 0xfb, 0x98, 0x23, 0xdb, 0x91, 0x63, 0xa3, 0xf9, 0x59, 0xab, 0x69, 0xbf, 0x86, 0xa6, 0xfd, + 0x0c, 0xb4, 0xa0, 0x6c, 0x13, 0x0e, 0x50, 0xe0, 0xdb, 0xf9, 0x0b, 0xa5, 0x7c, 0x1b, 0xa6, 0x98, + 0xf1, 0x4f, 0x52, 0xe8, 0xa0, 0x68, 0x91, 0xd2, 0xeb, 0x4b, 0xdc, 0x1b, 0x37, 0xe6, 0xe6, 0x3e, + 0x2d, 0xfd, 0x6a, 0x60, 0xb5, 0x79, 0x4c, 0xeb, 0x4b, 0x3d, 0xac, 0xb2, 0x6a, 0xd1, 0x90, 0x2c, + 0x4c, 0x68, 0x82, 0xf6, 0x16, 0x48, 0x65, 0x81, 0x97, 0x23, 0x55, 0xd7, 0x6e, 0xe7, 0x9b, 0x28, + 0xec, 0x51, 0x74, 0x95, 0x45, 0x2f, 0x9e, 0x90, 0xfe, 0x00, 0x52, 0x4c, 0x79, 0x3a, 0x62, 0x5c, + 0xa3, 0xad, 0x27, 0x48, 0x77, 0x41, 0x6f, 0xfc, 0xd1, 0xd6, 0x4a, 0xec, 0xec, 0x7b, 0x24, 0xaf, + 0xb6, 0x3c, 0xa5, 0x0d, 0xbc, 0x09, 0xa7, 0x79, 0x73, 0x33, 0x41, 0xb2, 0xae, 0xe0, 0x3f, 0xa0, + 0x7b, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x36, 0xcd, 0x7d, 0x1f, 0x73, 0x09, 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.gw.go similarity index 92% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.gw.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.gw.go index a18997ce..9bc32889 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.pb.gw.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.pb.gw.go @@ -1,6 +1,5 @@ -// Code generated by protoc-gen-grpc-gateway -// source: examples/examplepb/flow_combination.proto -// DO NOT EDIT! +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: examples/proto/examplepb/flow_combination.proto /* Package examplepb is a reverse proxy. @@ -106,7 +105,7 @@ func request_FlowCombination_StreamEmptyStream_0(ctx context.Context, marshaler dec := marshaler.NewDecoder(req.Body) handleSend := func() error { var protoReq EmptyProto - err = dec.Decode(&protoReq) + err := dec.Decode(&protoReq) if err == io.EOF { return err } @@ -114,7 +113,7 @@ func request_FlowCombination_StreamEmptyStream_0(ctx context.Context, marshaler grpclog.Printf("Failed to decode request: %v", err) return err } - if err = stream.Send(&protoReq); err != nil { + if err := stream.Send(&protoReq); err != nil { grpclog.Printf("Failed to send request: %v", err) return err } @@ -152,7 +151,7 @@ func request_FlowCombination_RpcBodyRpc_0(ctx context.Context, marshaler runtime var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -180,7 +179,7 @@ func request_FlowCombination_RpcBodyRpc_1(ctx context.Context, marshaler runtime protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } val, ok = pathParams["b"] @@ -191,7 +190,7 @@ func request_FlowCombination_RpcBodyRpc_1(ctx context.Context, marshaler runtime protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } val, ok = pathParams["c"] @@ -202,7 +201,7 @@ func request_FlowCombination_RpcBodyRpc_1(ctx context.Context, marshaler runtime protoReq.C, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "c", err) } msg, err := client.RpcBodyRpc(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -231,7 +230,7 @@ func request_FlowCombination_RpcBodyRpc_3(ctx context.Context, marshaler runtime var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -250,7 +249,7 @@ func request_FlowCombination_RpcBodyRpc_3(ctx context.Context, marshaler runtime protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } val, ok = pathParams["b"] @@ -261,7 +260,7 @@ func request_FlowCombination_RpcBodyRpc_3(ctx context.Context, marshaler runtime protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } msg, err := client.RpcBodyRpc(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) @@ -277,7 +276,7 @@ func request_FlowCombination_RpcBodyRpc_4(ctx context.Context, marshaler runtime var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -298,7 +297,7 @@ func request_FlowCombination_RpcBodyRpc_5(ctx context.Context, marshaler runtime var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -317,7 +316,7 @@ func request_FlowCombination_RpcBodyRpc_5(ctx context.Context, marshaler runtime protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcBodyRpc_5); err != nil { @@ -352,7 +351,7 @@ func request_FlowCombination_RpcBodyRpc_6(ctx context.Context, marshaler runtime protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcBodyRpc_6); err != nil { @@ -387,7 +386,7 @@ func request_FlowCombination_RpcPathSingleNestedRpc_0(ctx context.Context, marsh err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathSingleNestedRpc_0); err != nil { @@ -407,7 +406,7 @@ func request_FlowCombination_RpcPathNestedRpc_0(ctx context.Context, marshaler r var protoReq NestedProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -426,7 +425,7 @@ func request_FlowCombination_RpcPathNestedRpc_0(ctx context.Context, marshaler r err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } val, ok = pathParams["b"] @@ -437,7 +436,7 @@ func request_FlowCombination_RpcPathNestedRpc_0(ctx context.Context, marshaler r protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedRpc_0); err != nil { @@ -472,7 +471,7 @@ func request_FlowCombination_RpcPathNestedRpc_1(ctx context.Context, marshaler r err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedRpc_1); err != nil { @@ -492,7 +491,7 @@ func request_FlowCombination_RpcPathNestedRpc_2(ctx context.Context, marshaler r var protoReq NestedProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -511,7 +510,7 @@ func request_FlowCombination_RpcPathNestedRpc_2(ctx context.Context, marshaler r err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedRpc_2); err != nil { @@ -527,7 +526,7 @@ func request_FlowCombination_RpcBodyStream_0(ctx context.Context, marshaler runt var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -563,7 +562,7 @@ func request_FlowCombination_RpcBodyStream_1(ctx context.Context, marshaler runt protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } val, ok = pathParams["b"] @@ -574,7 +573,7 @@ func request_FlowCombination_RpcBodyStream_1(ctx context.Context, marshaler runt protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } val, ok = pathParams["c"] @@ -585,7 +584,7 @@ func request_FlowCombination_RpcBodyStream_1(ctx context.Context, marshaler runt protoReq.C, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "c", err) } stream, err := client.RpcBodyStream(ctx, &protoReq) @@ -630,7 +629,7 @@ func request_FlowCombination_RpcBodyStream_3(ctx context.Context, marshaler runt var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -649,7 +648,7 @@ func request_FlowCombination_RpcBodyStream_3(ctx context.Context, marshaler runt protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } val, ok = pathParams["b"] @@ -660,7 +659,7 @@ func request_FlowCombination_RpcBodyStream_3(ctx context.Context, marshaler runt protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } stream, err := client.RpcBodyStream(ctx, &protoReq) @@ -684,7 +683,7 @@ func request_FlowCombination_RpcBodyStream_4(ctx context.Context, marshaler runt var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -713,7 +712,7 @@ func request_FlowCombination_RpcBodyStream_5(ctx context.Context, marshaler runt var protoReq NonEmptyProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -732,7 +731,7 @@ func request_FlowCombination_RpcBodyStream_5(ctx context.Context, marshaler runt protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcBodyStream_5); err != nil { @@ -775,7 +774,7 @@ func request_FlowCombination_RpcBodyStream_6(ctx context.Context, marshaler runt protoReq.A, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcBodyStream_6); err != nil { @@ -818,7 +817,7 @@ func request_FlowCombination_RpcPathSingleNestedStream_0(ctx context.Context, ma err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathSingleNestedStream_0); err != nil { @@ -846,7 +845,7 @@ func request_FlowCombination_RpcPathNestedStream_0(ctx context.Context, marshale var protoReq NestedProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -865,7 +864,7 @@ func request_FlowCombination_RpcPathNestedStream_0(ctx context.Context, marshale err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } val, ok = pathParams["b"] @@ -876,7 +875,7 @@ func request_FlowCombination_RpcPathNestedStream_0(ctx context.Context, marshale protoReq.B, err = runtime.String(val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "b", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedStream_0); err != nil { @@ -919,7 +918,7 @@ func request_FlowCombination_RpcPathNestedStream_1(ctx context.Context, marshale err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedStream_1); err != nil { @@ -947,7 +946,7 @@ func request_FlowCombination_RpcPathNestedStream_2(ctx context.Context, marshale var protoReq NestedProto var metadata runtime.ServerMetadata - if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.C); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -966,7 +965,7 @@ func request_FlowCombination_RpcPathNestedStream_2(ctx context.Context, marshale err = runtime.PopulateFieldFromPath(&protoReq, "a.str", val) if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "a.str", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_FlowCombination_RpcPathNestedStream_2); err != nil { @@ -1014,10 +1013,18 @@ func RegisterFlowCombinationHandlerFromEndpoint(ctx context.Context, mux *runtim // RegisterFlowCombinationHandler registers the http handlers for service FlowCombination to "mux". // The handlers forward requests to the grpc endpoint over "conn". func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewFlowCombinationClient(conn) + return RegisterFlowCombinationHandlerClient(ctx, mux, NewFlowCombinationClient(conn)) +} + +// RegisterFlowCombinationHandler registers the http handlers for service FlowCombination to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "FlowCombinationClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "FlowCombinationClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "FlowCombinationClient" to call the correct interceptors. +func RegisterFlowCombinationHandlerClient(ctx context.Context, mux *runtime.ServeMux, client FlowCombinationClient) error { mux.Handle("POST", pattern_FlowCombination_RpcEmptyRpc_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1046,7 +1053,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcEmptyStream_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1075,7 +1082,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_StreamEmptyRpc_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1104,7 +1111,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_StreamEmptyStream_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1133,7 +1140,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1162,7 +1169,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1191,7 +1198,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1220,7 +1227,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1249,7 +1256,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_4, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1278,7 +1285,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_5, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1307,7 +1314,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyRpc_6, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1336,7 +1343,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathSingleNestedRpc_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1365,7 +1372,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedRpc_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1394,7 +1401,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedRpc_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1423,7 +1430,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedRpc_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1452,7 +1459,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1481,7 +1488,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1510,7 +1517,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1539,7 +1546,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1568,7 +1575,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_4, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1597,7 +1604,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_5, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1626,7 +1633,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcBodyStream_6, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1655,7 +1662,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathSingleNestedStream_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1684,7 +1691,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedStream_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1713,7 +1720,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedStream_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -1742,7 +1749,7 @@ func RegisterFlowCombinationHandler(ctx context.Context, mux *runtime.ServeMux, }) mux.Handle("POST", pattern_FlowCombination_RpcPathNestedStream_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.proto similarity index 100% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/flow_combination.proto rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/flow_combination.proto diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.go similarity index 77% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.go index 71066ccd..1f1a74bf 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/examplepb/stream.proto +// source: examples/proto/examplepb/stream.proto package examplepb @@ -8,7 +8,7 @@ import fmt "fmt" import math "math" import _ "google.golang.org/genproto/googleapis/api/annotations" import google_protobuf1 "github.com/golang/protobuf/ptypes/empty" -import grpc_gateway_examples_sub "github.com/grpc-ecosystem/grpc-gateway/examples/sub" +import grpc_gateway_examples_sub "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" import ( context "golang.org/x/net/context" @@ -248,31 +248,31 @@ var _StreamService_serviceDesc = grpc.ServiceDesc{ ClientStreams: true, }, }, - Metadata: "examples/examplepb/stream.proto", + Metadata: "examples/proto/examplepb/stream.proto", } -func init() { proto.RegisterFile("examples/examplepb/stream.proto", fileDescriptor2) } +func init() { proto.RegisterFile("examples/proto/examplepb/stream.proto", fileDescriptor2) } var fileDescriptor2 = []byte{ - // 314 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0xbf, 0x4a, 0x43, 0x31, - 0x14, 0xc6, 0xb9, 0x2a, 0xa2, 0x11, 0x97, 0x0c, 0x0e, 0x51, 0x28, 0x16, 0xc1, 0x2a, 0x92, 0xb4, - 0xba, 0xb9, 0x59, 0xe9, 0xa6, 0x38, 0x74, 0x73, 0x29, 0xc9, 0xe5, 0x34, 0x0d, 0xbd, 0xf7, 0x26, - 0x24, 0xe7, 0x56, 0x0b, 0x4e, 0x8e, 0xae, 0x7d, 0x11, 0xdf, 0xc5, 0x57, 0xf0, 0x41, 0xa4, 0xf7, - 0xdf, 0xd4, 0xd2, 0xba, 0x25, 0x9c, 0x2f, 0xf9, 0x7e, 0xe7, 0x47, 0x5a, 0xf0, 0x2e, 0x53, 0x97, - 0x40, 0x10, 0xd5, 0xc1, 0x29, 0x11, 0xd0, 0x83, 0x4c, 0xb9, 0xf3, 0x16, 0x2d, 0x6d, 0x69, 0xef, - 0x62, 0xae, 0x25, 0xc2, 0x9b, 0x9c, 0xf3, 0x3a, 0xcd, 0x9b, 0x34, 0x3b, 0xd3, 0xd6, 0xea, 0x04, - 0x84, 0x74, 0x46, 0xc8, 0x2c, 0xb3, 0x28, 0xd1, 0xd8, 0x2c, 0x94, 0xcf, 0xd9, 0x69, 0x35, 0x2d, - 0x6e, 0x2a, 0x1f, 0x0b, 0x48, 0x1d, 0xce, 0xab, 0xe1, 0xcd, 0x8a, 0x72, 0x39, 0x52, 0x06, 0x47, - 0x76, 0x3c, 0x82, 0x19, 0xf8, 0x39, 0x4e, 0x4c, 0xa6, 0xab, 0x34, 0x6b, 0xd2, 0x21, 0x57, 0x22, - 0x85, 0x10, 0xa4, 0x86, 0x72, 0x76, 0xfb, 0xbd, 0x4b, 0x8e, 0x87, 0x05, 0xf6, 0x10, 0xfc, 0xcc, - 0xc4, 0x40, 0xbf, 0x22, 0x42, 0xfa, 0x79, 0x32, 0x7d, 0xf4, 0x20, 0x11, 0x68, 0x8f, 0x6f, 0xd8, - 0x83, 0x3f, 0xf4, 0x0d, 0xbe, 0x8c, 0x07, 0x4d, 0x2b, 0x3b, 0xe1, 0x25, 0x3b, 0xaf, 0xd9, 0xf9, - 0x60, 0xc9, 0xde, 0x16, 0x9f, 0x3f, 0xbf, 0x8b, 0x9d, 0xab, 0xf6, 0x85, 0x98, 0xf5, 0x6a, 0xf0, - 0x55, 0xd8, 0x42, 0xe5, 0xc9, 0xf4, 0x3e, 0xba, 0xee, 0x44, 0xf4, 0x83, 0xec, 0x3d, 0x99, 0x80, - 0x74, 0xcd, 0x97, 0xec, 0xff, 0x74, 0xed, 0xcb, 0x82, 0xe2, 0x9c, 0xb6, 0x36, 0x50, 0x74, 0x23, - 0xba, 0x88, 0xc8, 0xc1, 0x52, 0xc5, 0x20, 0x9e, 0x58, 0xda, 0x59, 0x53, 0x15, 0x72, 0xc5, 0x87, - 0xe8, 0x4d, 0xa6, 0x9f, 0x4b, 0xb3, 0x6c, 0xeb, 0xe4, 0xf6, 0x46, 0x20, 0x9e, 0xd8, 0xc2, 0x48, - 0x37, 0xea, 0x1f, 0xbd, 0x1e, 0x36, 0xeb, 0xa9, 0xfd, 0x42, 0xc8, 0xdd, 0x5f, 0x00, 0x00, 0x00, - 0xff, 0xff, 0xbc, 0x52, 0x49, 0x85, 0x8f, 0x02, 0x00, 0x00, + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x86, 0x15, 0x40, 0x08, 0x8c, 0x58, 0x3c, 0x30, 0x04, 0xa4, 0x42, 0x05, 0xa2, 0x30, 0xd8, + 0x6d, 0xd9, 0xd8, 0x28, 0xea, 0x06, 0x62, 0xe8, 0xc6, 0x52, 0xd9, 0xd1, 0xd5, 0xb5, 0x9a, 0xc4, + 0x96, 0x7d, 0x29, 0x54, 0x62, 0x62, 0x64, 0xed, 0x8b, 0xf0, 0x2e, 0xbc, 0x02, 0x0f, 0x82, 0xea, + 0xa4, 0x1d, 0x10, 0x51, 0xcb, 0x78, 0xbe, 0xff, 0xf7, 0xfd, 0xdf, 0x4f, 0x2e, 0xe0, 0x55, 0x64, + 0x36, 0x05, 0xcf, 0xad, 0x33, 0x68, 0x78, 0x35, 0x5a, 0xc9, 0x3d, 0x3a, 0x10, 0x19, 0x0b, 0xcf, + 0xb4, 0xa1, 0x9c, 0x4d, 0x98, 0x12, 0x08, 0x2f, 0x62, 0xc6, 0x96, 0x1e, 0xb6, 0x52, 0xc7, 0x27, + 0xca, 0x18, 0x95, 0x02, 0x17, 0x56, 0x73, 0x91, 0xe7, 0x06, 0x05, 0x6a, 0x93, 0xfb, 0xd2, 0x1e, + 0x1f, 0x57, 0xdb, 0x30, 0xc9, 0x62, 0xc4, 0x21, 0xb3, 0x38, 0xab, 0x96, 0xdd, 0xda, 0x08, 0x62, + 0x28, 0x35, 0x0e, 0xcd, 0x68, 0x08, 0x53, 0x70, 0x33, 0x1c, 0xeb, 0x5c, 0x55, 0x9e, 0xd3, 0x5f, + 0x1e, 0x5f, 0x48, 0x9e, 0x81, 0xf7, 0x42, 0x41, 0xa9, 0xe8, 0x7e, 0x6e, 0x93, 0xc3, 0x41, 0x40, + 0x18, 0x80, 0x9b, 0xea, 0x04, 0xe8, 0x47, 0x44, 0x48, 0xaf, 0x48, 0x27, 0xf7, 0x0e, 0x04, 0x02, + 0xed, 0xb0, 0x35, 0x4c, 0xec, 0xae, 0xa7, 0xf1, 0x69, 0xd4, 0x5f, 0xdd, 0x8e, 0x8f, 0x58, 0xc9, + 0xc1, 0x96, 0x1c, 0xac, 0xbf, 0xe0, 0x68, 0xf2, 0xf7, 0xaf, 0xef, 0xf9, 0xd6, 0x55, 0xf3, 0x9c, + 0x4f, 0x3b, 0xcb, 0xf8, 0x7f, 0x85, 0xe7, 0xb2, 0x48, 0x27, 0xb7, 0xd1, 0x75, 0x2b, 0xa2, 0x6f, + 0x64, 0xe7, 0x41, 0x7b, 0xa4, 0x35, 0x5f, 0xc6, 0xff, 0x4f, 0xd7, 0xbc, 0x0c, 0x29, 0xce, 0x68, + 0x63, 0x4d, 0x8a, 0x76, 0x44, 0xe7, 0x11, 0xd9, 0x5b, 0x54, 0xd1, 0x4f, 0xc6, 0x86, 0xb6, 0x6a, + 0x4e, 0xf9, 0x42, 0xb2, 0x01, 0x3a, 0x9d, 0xab, 0xc7, 0xb2, 0xd9, 0x78, 0x63, 0xe5, 0xe6, 0x8d, + 0x40, 0x32, 0x36, 0xa1, 0x91, 0x76, 0xd4, 0x3b, 0x78, 0xde, 0x5f, 0xe1, 0xc9, 0xdd, 0x50, 0xc8, + 0xcd, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x63, 0x7a, 0xd9, 0xa1, 0x02, 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.gw.go similarity index 88% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.gw.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.gw.go index b72b1d15..642d59a2 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.pb.gw.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.pb.gw.go @@ -1,6 +1,5 @@ -// Code generated by protoc-gen-grpc-gateway -// source: examples/examplepb/stream.proto -// DO NOT EDIT! +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: examples/proto/examplepb/stream.proto /* Package examplepb is a reverse proxy. @@ -15,7 +14,7 @@ import ( "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes/empty" - "github.com/grpc-ecosystem/grpc-gateway/examples/sub" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/grpc-ecosystem/grpc-gateway/utilities" "golang.org/x/net/context" @@ -99,7 +98,7 @@ func request_StreamService_BulkEcho_0(ctx context.Context, marshaler runtime.Mar dec := marshaler.NewDecoder(req.Body) handleSend := func() error { var protoReq sub.StringMessage - err = dec.Decode(&protoReq) + err := dec.Decode(&protoReq) if err == io.EOF { return err } @@ -107,7 +106,7 @@ func request_StreamService_BulkEcho_0(ctx context.Context, marshaler runtime.Mar grpclog.Printf("Failed to decode request: %v", err) return err } - if err = stream.Send(&protoReq); err != nil { + if err := stream.Send(&protoReq); err != nil { grpclog.Printf("Failed to send request: %v", err) return err } @@ -169,10 +168,18 @@ func RegisterStreamServiceHandlerFromEndpoint(ctx context.Context, mux *runtime. // RegisterStreamServiceHandler registers the http handlers for service StreamService to "mux". // The handlers forward requests to the grpc endpoint over "conn". func RegisterStreamServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := NewStreamServiceClient(conn) + return RegisterStreamServiceHandlerClient(ctx, mux, NewStreamServiceClient(conn)) +} + +// RegisterStreamServiceHandler registers the http handlers for service StreamService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "StreamServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "StreamServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "StreamServiceClient" to call the correct interceptors. +func RegisterStreamServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client StreamServiceClient) error { mux.Handle("POST", pattern_StreamService_BulkCreate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -201,7 +208,7 @@ func RegisterStreamServiceHandler(ctx context.Context, mux *runtime.ServeMux, co }) mux.Handle("GET", pattern_StreamService_List_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { @@ -230,7 +237,7 @@ func RegisterStreamServiceHandler(ctx context.Context, mux *runtime.ServeMux, co }) mux.Handle("POST", pattern_StreamService_BulkEcho_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := context.WithCancel(req.Context()) defer cancel() if cn, ok := w.(http.CloseNotifier); ok { go func(done <-chan struct{}, closed <-chan bool) { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.proto similarity index 88% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.proto rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.proto index 573a2f4d..66e59dde 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/examplepb/stream.proto +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/stream.proto @@ -4,8 +4,8 @@ package grpc.gateway.examples.examplepb; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; -import "examples/examplepb/a_bit_of_everything.proto"; -import "examples/sub/message.proto"; +import "examples/proto/examplepb/a_bit_of_everything.proto"; +import "examples/proto/sub/message.proto"; // Defines some more operations to be added to ABitOfEverythingService service StreamService { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.go new file mode 100644 index 00000000..be6f14bf --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.go @@ -0,0 +1,179 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: examples/proto/examplepb/wrappers.proto + +package examplepb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import google_protobuf5 "github.com/golang/protobuf/ptypes/wrappers" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +type Wrappers struct { + StringValue *google_protobuf5.StringValue `protobuf:"bytes,1,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + Int32Value *google_protobuf5.Int32Value `protobuf:"bytes,2,opt,name=int32_value,json=int32Value" json:"int32_value,omitempty"` + Int64Value *google_protobuf5.Int64Value `protobuf:"bytes,3,opt,name=int64_value,json=int64Value" json:"int64_value,omitempty"` + FloatValue *google_protobuf5.FloatValue `protobuf:"bytes,4,opt,name=float_value,json=floatValue" json:"float_value,omitempty"` + DoubleValue *google_protobuf5.DoubleValue `protobuf:"bytes,5,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + BoolValue *google_protobuf5.BoolValue `protobuf:"bytes,6,opt,name=bool_value,json=boolValue" json:"bool_value,omitempty"` +} + +func (m *Wrappers) Reset() { *m = Wrappers{} } +func (m *Wrappers) String() string { return proto.CompactTextString(m) } +func (*Wrappers) ProtoMessage() {} +func (*Wrappers) Descriptor() ([]byte, []int) { return fileDescriptor4, []int{0} } + +func (m *Wrappers) GetStringValue() *google_protobuf5.StringValue { + if m != nil { + return m.StringValue + } + return nil +} + +func (m *Wrappers) GetInt32Value() *google_protobuf5.Int32Value { + if m != nil { + return m.Int32Value + } + return nil +} + +func (m *Wrappers) GetInt64Value() *google_protobuf5.Int64Value { + if m != nil { + return m.Int64Value + } + return nil +} + +func (m *Wrappers) GetFloatValue() *google_protobuf5.FloatValue { + if m != nil { + return m.FloatValue + } + return nil +} + +func (m *Wrappers) GetDoubleValue() *google_protobuf5.DoubleValue { + if m != nil { + return m.DoubleValue + } + return nil +} + +func (m *Wrappers) GetBoolValue() *google_protobuf5.BoolValue { + if m != nil { + return m.BoolValue + } + return nil +} + +func init() { + proto.RegisterType((*Wrappers)(nil), "grpc.gateway.examples.examplepb.Wrappers") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for WrappersService service + +type WrappersServiceClient interface { + Create(ctx context.Context, in *Wrappers, opts ...grpc.CallOption) (*Wrappers, error) +} + +type wrappersServiceClient struct { + cc *grpc.ClientConn +} + +func NewWrappersServiceClient(cc *grpc.ClientConn) WrappersServiceClient { + return &wrappersServiceClient{cc} +} + +func (c *wrappersServiceClient) Create(ctx context.Context, in *Wrappers, opts ...grpc.CallOption) (*Wrappers, error) { + out := new(Wrappers) + err := grpc.Invoke(ctx, "/grpc.gateway.examples.examplepb.WrappersService/Create", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for WrappersService service + +type WrappersServiceServer interface { + Create(context.Context, *Wrappers) (*Wrappers, error) +} + +func RegisterWrappersServiceServer(s *grpc.Server, srv WrappersServiceServer) { + s.RegisterService(&_WrappersService_serviceDesc, srv) +} + +func _WrappersService_Create_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Wrappers) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WrappersServiceServer).Create(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.gateway.examples.examplepb.WrappersService/Create", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WrappersServiceServer).Create(ctx, req.(*Wrappers)) + } + return interceptor(ctx, in, info, handler) +} + +var _WrappersService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.gateway.examples.examplepb.WrappersService", + HandlerType: (*WrappersServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Create", + Handler: _WrappersService_Create_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "examples/proto/examplepb/wrappers.proto", +} + +func init() { proto.RegisterFile("examples/proto/examplepb/wrappers.proto", fileDescriptor4) } + +var fileDescriptor4 = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd2, 0xcf, 0x4a, 0x3b, 0x31, + 0x10, 0x07, 0x70, 0xb6, 0xfd, 0xfd, 0x8a, 0xcd, 0x0a, 0xc2, 0xe2, 0x41, 0xd7, 0x62, 0xa5, 0x17, + 0xff, 0x1c, 0xb2, 0xd8, 0x96, 0x82, 0x22, 0x08, 0x55, 0x04, 0xaf, 0x16, 0x14, 0xbc, 0x48, 0xd2, + 0xa6, 0x4b, 0x20, 0xee, 0x84, 0x6c, 0xda, 0xea, 0x49, 0xf4, 0x11, 0xf4, 0xd1, 0x7c, 0x05, 0x1f, + 0x44, 0x36, 0x9d, 0x6c, 0xc1, 0x52, 0xf4, 0x36, 0xb3, 0xf9, 0x7e, 0x06, 0x86, 0x1d, 0xb2, 0x2f, + 0x9e, 0xd8, 0xa3, 0x56, 0x22, 0x4f, 0xb4, 0x01, 0x0b, 0x09, 0xb6, 0x9a, 0x27, 0x33, 0xc3, 0xb4, + 0x16, 0x26, 0xa7, 0xee, 0x21, 0x6a, 0xa6, 0x46, 0x0f, 0x69, 0xca, 0xac, 0x98, 0xb1, 0x67, 0xea, + 0x15, 0x2d, 0xf3, 0x71, 0x23, 0x05, 0x48, 0x95, 0x48, 0x98, 0x96, 0x09, 0xcb, 0x32, 0xb0, 0xcc, + 0x4a, 0xc8, 0x90, 0xc7, 0xbb, 0xf8, 0xea, 0x3a, 0x3e, 0x19, 0xff, 0x18, 0xdf, 0x7a, 0xad, 0x92, + 0xb5, 0x3b, 0xfc, 0x14, 0x9d, 0x93, 0xf5, 0xdc, 0x1a, 0x99, 0xa5, 0x0f, 0x53, 0xa6, 0x26, 0x62, + 0x2b, 0xd8, 0x0b, 0x0e, 0xc2, 0x76, 0x83, 0xce, 0x67, 0x50, 0x3f, 0x83, 0x0e, 0x5c, 0xe8, 0xb6, + 0xc8, 0xdc, 0x84, 0xf9, 0xa2, 0x89, 0xce, 0x48, 0x28, 0x33, 0xdb, 0x69, 0xa3, 0xaf, 0x38, 0xbf, + 0xb3, 0xe4, 0xaf, 0x8b, 0xcc, 0x9c, 0x13, 0x59, 0xd6, 0xa8, 0x7b, 0x5d, 0xd4, 0xd5, 0xd5, 0xba, + 0xd7, 0x5d, 0x68, 0xac, 0x0b, 0x3d, 0x56, 0xc0, 0x2c, 0xea, 0x7f, 0x2b, 0xf4, 0x55, 0x91, 0x41, + 0x3d, 0x2e, 0xeb, 0x62, 0xf5, 0x11, 0x4c, 0xb8, 0x12, 0xc8, 0xff, 0xaf, 0x58, 0xfd, 0xd2, 0x85, + 0x70, 0xf5, 0xd1, 0xa2, 0x89, 0x4e, 0x08, 0xe1, 0x00, 0x0a, 0x79, 0xcd, 0xf1, 0x78, 0x89, 0xf7, + 0x01, 0xd4, 0x1c, 0xd7, 0xb9, 0x2f, 0xdb, 0xef, 0x01, 0xd9, 0xf0, 0xff, 0x60, 0x20, 0xcc, 0x54, + 0x0e, 0x45, 0xf4, 0x42, 0x6a, 0x17, 0x46, 0x30, 0x2b, 0xa2, 0x43, 0xfa, 0xcb, 0x05, 0x50, 0x6f, + 0xe3, 0xbf, 0x47, 0x5b, 0xcd, 0xb7, 0xcf, 0xaf, 0x8f, 0xca, 0x76, 0x6b, 0x33, 0x99, 0x1e, 0xfb, + 0xe3, 0x2b, 0x6f, 0xe3, 0x34, 0x38, 0xea, 0x87, 0xf7, 0xf5, 0x92, 0xf1, 0x9a, 0x5b, 0xa0, 0xf3, + 0x1d, 0x00, 0x00, 0xff, 0xff, 0x3f, 0xa4, 0x0c, 0xe1, 0xb6, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.gw.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.gw.go new file mode 100644 index 00000000..b42c7715 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.pb.gw.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: examples/proto/examplepb/wrappers.proto + +/* +Package examplepb is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package examplepb + +import ( + "io" + "net/http" + + "github.com/golang/protobuf/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/utilities" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray + +func request_WrappersService_Create_0(ctx context.Context, marshaler runtime.Marshaler, client WrappersServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq Wrappers + var metadata runtime.ServerMetadata + + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.Create(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +// RegisterWrappersServiceHandlerFromEndpoint is same as RegisterWrappersServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterWrappersServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Printf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterWrappersServiceHandler(ctx, mux, conn) +} + +// RegisterWrappersServiceHandler registers the http handlers for service WrappersService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterWrappersServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterWrappersServiceHandlerClient(ctx, mux, NewWrappersServiceClient(conn)) +} + +// RegisterWrappersServiceHandler registers the http handlers for service WrappersService to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "WrappersServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "WrappersServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "WrappersServiceClient" to call the correct interceptors. +func RegisterWrappersServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client WrappersServiceClient) error { + + mux.Handle("POST", pattern_WrappersService_Create_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + if cn, ok := w.(http.CloseNotifier); ok { + go func(done <-chan struct{}, closed <-chan bool) { + select { + case <-done: + case <-closed: + cancel() + } + }(ctx.Done(), cn.CloseNotify()) + } + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_WrappersService_Create_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_WrappersService_Create_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_WrappersService_Create_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "example", "wrappers"}, "")) +) + +var ( + forward_WrappersService_Create_0 = runtime.ForwardResponseMessage +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.proto new file mode 100644 index 00000000..094b0a4c --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; +option go_package = "examplepb"; +package grpc.gateway.examples.examplepb; + +import "google/api/annotations.proto"; +import "google/protobuf/wrappers.proto"; + +message Wrappers { + google.protobuf.StringValue string_value = 1; + google.protobuf.Int32Value int32_value = 2; + google.protobuf.Int64Value int64_value = 3; + google.protobuf.FloatValue float_value = 4; + google.protobuf.DoubleValue double_value = 5; + google.protobuf.BoolValue bool_value = 6; +} + +service WrappersService { + rpc Create(Wrappers) returns (Wrappers) { + option (google.api.http) = { + post: "/v1/example/wrappers" + body: "*" + }; + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.swagger.json b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.swagger.json new file mode 100644 index 00000000..37c6144a --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb/wrappers.swagger.json @@ -0,0 +1,75 @@ +{ + "swagger": "2.0", + "info": { + "title": "examples/proto/examplepb/wrappers.proto", + "version": "version not set" + }, + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/v1/example/wrappers": { + "post": { + "operationId": "Create", + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/examplepbWrappers" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/examplepbWrappers" + } + } + ], + "tags": [ + "WrappersService" + ] + } + } + }, + "definitions": { + "examplepbWrappers": { + "type": "object", + "properties": { + "string_value": { + "type": "string" + }, + "int32_value": { + "type": "integer", + "format": "int32" + }, + "int64_value": { + "type": "integer", + "format": "int64" + }, + "float_value": { + "type": "number", + "format": "float" + }, + "double_value": { + "type": "number", + "format": "double" + }, + "bool_value": { + "type": "boolean", + "format": "boolean" + } + } + } + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/BUILD.bazel new file mode 100644 index 00000000..bc5874bb --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "sub_proto", + srcs = ["message.proto"], +) + +go_proto_library( + name = "sub_go_proto", + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub", + proto = ":sub_proto", +) + +go_library( + name = "go_default_library", + embed = [":sub_go_proto"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub", +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub/message.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/message.pb.go similarity index 64% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub/message.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/message.pb.go index 9faad923..7d4ec6ac 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub/message.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/message.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/sub/message.proto +// source: examples/proto/sub/message.proto /* Package sub is a generated protocol buffer package. It is generated from these files: - examples/sub/message.proto + examples/proto/sub/message.proto It has these top-level messages: StringMessage @@ -48,15 +48,16 @@ func init() { proto.RegisterType((*StringMessage)(nil), "grpc.gateway.examples.sub.StringMessage") } -func init() { proto.RegisterFile("examples/sub/message.proto", fileDescriptor0) } +func init() { proto.RegisterFile("examples/proto/sub/message.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 111 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0xad, 0x48, 0xcc, - 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x2f, 0x2e, 0x4d, 0xd2, 0xcf, 0x4d, 0x2d, 0x2e, 0x4e, 0x4c, 0x4f, - 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x4c, 0x2f, 0x2a, 0x48, 0xd6, 0x4b, 0x4f, 0x2c, - 0x49, 0x2d, 0x4f, 0xac, 0xd4, 0x83, 0x29, 0xd4, 0x2b, 0x2e, 0x4d, 0x52, 0x52, 0xe5, 0xe2, 0x0d, - 0x2e, 0x29, 0xca, 0xcc, 0x4b, 0xf7, 0x85, 0xe8, 0x10, 0x12, 0xe1, 0x62, 0x2d, 0x4b, 0xcc, 0x29, - 0x4d, 0x95, 0x60, 0x54, 0x60, 0xd2, 0xe0, 0x0c, 0x82, 0x70, 0x9c, 0x58, 0xa3, 0x98, 0x8b, 0x4b, - 0x93, 0x00, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x10, 0x60, 0xa9, 0x65, 0x00, 0x00, 0x00, + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xad, 0x48, 0xcc, + 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0x2f, 0x2e, 0x4d, 0xd2, 0xcf, + 0x4d, 0x2d, 0x2e, 0x4e, 0x4c, 0x4f, 0xd5, 0x03, 0x8b, 0x08, 0x49, 0xa6, 0x17, 0x15, 0x24, 0xeb, + 0xa5, 0x27, 0x96, 0xa4, 0x96, 0x27, 0x56, 0xea, 0xc1, 0x94, 0xeb, 0x15, 0x97, 0x26, 0x29, 0xa9, + 0x72, 0xf1, 0x06, 0x97, 0x14, 0x65, 0xe6, 0xa5, 0xfb, 0x42, 0x74, 0x08, 0x89, 0x70, 0xb1, 0x96, + 0x25, 0xe6, 0x94, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x69, 0x70, 0x06, 0x41, 0x38, 0x4e, 0xac, 0x51, + 0xcc, 0xc5, 0xa5, 0x49, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6c, 0x00, 0x25, 0x33, 0x6b, 0x00, + 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub/message.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/message.proto similarity index 100% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub/message.proto rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub/message.proto diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/BUILD.bazel new file mode 100644 index 00000000..539f5b6e --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "sub2_proto", + srcs = ["message.proto"], +) + +go_proto_library( + name = "sub2_go_proto", + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2", + proto = ":sub2_proto", +) + +go_library( + name = "go_default_library", + embed = [":sub2_go_proto"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2", +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.pb.go similarity index 59% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.pb.go rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.pb.go index 710d9525..156774b7 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: examples/sub2/message.proto +// source: examples/proto/sub2/message.proto /* Package sub2 is a generated protocol buffer package. It is generated from these files: - examples/sub2/message.proto + examples/proto/sub2/message.proto It has these top-level messages: IdMessage @@ -47,16 +47,17 @@ func init() { proto.RegisterType((*IdMessage)(nil), "sub2.IdMessage") } -func init() { proto.RegisterFile("examples/sub2/message.proto", fileDescriptor0) } +func init() { proto.RegisterFile("examples/proto/sub2/message.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 128 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xad, 0x48, 0xcc, - 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x2f, 0x2e, 0x4d, 0x32, 0xd2, 0xcf, 0x4d, 0x2d, 0x2e, 0x4e, 0x4c, - 0x4f, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x01, 0x89, 0x29, 0xc9, 0x73, 0x71, 0x7a, - 0xa6, 0xf8, 0x42, 0x24, 0x84, 0x84, 0xb8, 0x58, 0x4a, 0x4b, 0x33, 0x53, 0x24, 0x18, 0x15, 0x18, - 0x35, 0x38, 0x83, 0xc0, 0x6c, 0x27, 0xb3, 0x28, 0x93, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, - 0xe4, 0xfc, 0x5c, 0xfd, 0xf4, 0xa2, 0x82, 0x64, 0xdd, 0xd4, 0xe4, 0xfc, 0xe2, 0xca, 0xe2, 0x92, - 0x54, 0x28, 0x37, 0x3d, 0xb1, 0x24, 0xb5, 0x3c, 0xb1, 0x52, 0x1f, 0xc5, 0xb2, 0x24, 0x36, 0xb0, - 0x2d, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x53, 0x75, 0xef, 0xe0, 0x84, 0x00, 0x00, 0x00, + // 130 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4c, 0xad, 0x48, 0xcc, + 0x2d, 0xc8, 0x49, 0x2d, 0xd6, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0x2f, 0x2e, 0x4d, 0x32, 0xd2, + 0xcf, 0x4d, 0x2d, 0x2e, 0x4e, 0x4c, 0x4f, 0xd5, 0x03, 0x0b, 0x09, 0xb1, 0x80, 0xc4, 0x94, 0xe4, + 0xb9, 0x38, 0x3d, 0x53, 0x7c, 0x21, 0x12, 0x42, 0x42, 0x5c, 0x2c, 0xa5, 0xa5, 0x99, 0x29, 0x12, + 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x60, 0xb6, 0x93, 0x4d, 0x94, 0x55, 0x7a, 0x66, 0x49, 0x46, + 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x51, 0x41, 0xb2, 0x6e, 0x6a, 0x72, 0x7e, 0x71, + 0x65, 0x71, 0x49, 0x2a, 0x94, 0x9b, 0x9e, 0x58, 0x92, 0x5a, 0x9e, 0x58, 0xa9, 0x8f, 0xc5, 0xca, + 0x24, 0x36, 0x30, 0xdb, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xfd, 0x49, 0xe7, 0x2f, 0x90, 0x00, + 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.proto similarity index 88% rename from vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.proto rename to vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.proto index 9c266430..59c9bd9f 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/sub2/message.proto +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2/message.proto @@ -1,5 +1,5 @@ syntax = "proto3"; -option go_package = "github.com/grpc-ecosystem/grpc-gateway/examples/sub2"; +option go_package = "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2"; package sub2; message IdMessage { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/BUILD.bazel new file mode 100644 index 00000000..6d924fe4 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/BUILD.bazel @@ -0,0 +1,29 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +package(default_visibility = ["//visibility:public"]) + +go_library( + name = "go_default_library", + srcs = [ + "a_bit_of_everything.go", + "echo.go", + "flow_combination.go", + "main.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/examples/server", + deps = [ + "//examples/proto/examplepb:go_default_library", + "//examples/proto/sub:go_default_library", + "//examples/proto/sub2:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//ptypes/duration:go_default_library", + "@com_github_golang_protobuf//ptypes/empty:go_default_library", + "@com_github_rogpeppe_fastuuid//:go_default_library", + "@org_golang_google_genproto//googleapis/rpc/errdetails:go_default_library", + "@org_golang_google_grpc//:go_default_library", + "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//metadata:go_default_library", + "@org_golang_google_grpc//status:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/a_bit_of_everything.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/a_bit_of_everything.go index 190d0f85..2d8dec55 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/a_bit_of_everything.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/a_bit_of_everything.go @@ -1,18 +1,20 @@ package server import ( + "context" "fmt" "io" "sync" "github.com/golang/glog" + "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes/duration" "github.com/golang/protobuf/ptypes/empty" - examples "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" - sub "github.com/grpc-ecosystem/grpc-gateway/examples/sub" - sub2 "github.com/grpc-ecosystem/grpc-gateway/examples/sub2" + examples "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" + sub "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub" + sub2 "github.com/grpc-ecosystem/grpc-gateway/examples/proto/sub2" "github.com/rogpeppe/fastuuid" - "golang.org/x/net/context" + "google.golang.org/genproto/googleapis/rpc/errdetails" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" @@ -134,7 +136,7 @@ func (s *_ABitOfEverythingServer) List(_ *empty.Empty, stream examples.StreamSer } // return error when metadata includes error header - if header, ok := metadata.FromContext(stream.Context()); ok { + if header, ok := metadata.FromIncomingContext(stream.Context()); ok { if v, ok := header["error"]; ok { stream.SetTrailer(metadata.New(map[string]string{ "foo": "foo2", @@ -246,3 +248,25 @@ func (s *_ABitOfEverythingServer) Timeout(ctx context.Context, msg *empty.Empty) return nil, ctx.Err() } } + +func (s *_ABitOfEverythingServer) ErrorWithDetails(ctx context.Context, msg *empty.Empty) (*empty.Empty, error) { + stat, err := status.New(codes.Unknown, "with details"). + WithDetails(proto.Message( + &errdetails.DebugInfo{ + StackEntries: []string{"foo:1"}, + Detail: "error debug details", + }, + )) + if err != nil { + return nil, status.Errorf(codes.Internal, "unexpected error adding details: %s", err) + } + return nil, stat.Err() +} + +func (s *_ABitOfEverythingServer) GetMessageWithBody(ctx context.Context, msg *examples.MessageWithBody) (*empty.Empty, error) { + return &empty.Empty{}, nil +} + +func (s *_ABitOfEverythingServer) PostWithEmptyBody(ctx context.Context, msg *examples.Body) (*empty.Empty, error) { + return &empty.Empty{}, nil +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/cmd/example-server/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/cmd/example-server/main.go deleted file mode 100644 index 34b319ab..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/cmd/example-server/main.go +++ /dev/null @@ -1,17 +0,0 @@ -package main - -import ( - "flag" - - "github.com/golang/glog" - "github.com/grpc-ecosystem/grpc-gateway/examples/server" -) - -func main() { - flag.Parse() - defer glog.Flush() - - if err := server.Run(); err != nil { - glog.Fatal(err) - } -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/echo.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/echo.go index e87db2d5..249aeb0b 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/echo.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/echo.go @@ -1,9 +1,10 @@ package server import ( + "context" + "github.com/golang/glog" - examples "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" - "golang.org/x/net/context" + examples "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" "google.golang.org/grpc" "google.golang.org/grpc/metadata" ) @@ -33,3 +34,8 @@ func (s *echoServer) EchoBody(ctx context.Context, msg *examples.SimpleMessage) })) return msg, nil } + +func (s *echoServer) EchoDelete(ctx context.Context, msg *examples.SimpleMessage) (*examples.SimpleMessage, error) { + glog.Info(msg) + return msg, nil +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/flow_combination.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/flow_combination.go index f1a90fa2..516510f4 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/flow_combination.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/flow_combination.go @@ -1,10 +1,10 @@ package server import ( + "context" "io" - examples "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" - "golang.org/x/net/context" + examples "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" ) type flowCombinationServer struct{} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/main.go index c5e6cb6f..4cd385f8 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/main.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/examples/server/main.go @@ -1,17 +1,27 @@ package server import ( + "context" "net" - examples "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" + "github.com/golang/glog" + examples "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" "google.golang.org/grpc" ) -func Run() error { - l, err := net.Listen("tcp", ":9090") +// Run starts the example gRPC service. +// "network" and "address" are passed to net.Listen. +func Run(ctx context.Context, network, address string) error { + l, err := net.Listen(network, address) if err != nil { return err } + defer func() { + if err := l.Close(); err != nil { + glog.Errorf("Failed to close %s %s: %v", network, address, err) + } + }() + s := grpc.NewServer() examples.RegisterEchoServiceServer(s, newEchoServer()) examples.RegisterFlowCombinationServer(s, newFlowCombinationServer()) @@ -20,6 +30,9 @@ func Run() error { examples.RegisterABitOfEverythingServiceServer(s, abe) examples.RegisterStreamServiceServer(s, abe) - s.Serve(l) - return nil + go func() { + defer s.GracefulStop() + <-ctx.Done() + }() + return s.Serve(l) } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/options/options.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/options/options.proto deleted file mode 100644 index be81c963..00000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/options/options.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto2"; -option go_package = "options"; - -package gengo.grpc.gateway; -import "google/protobuf/descriptor.proto"; - -message ApiMethodOptions { - // Use HttpRule instead. - option deprecated = true; - - extend google.protobuf.MethodOptions { - // Describes how the gRPC method should be exported as a RESTful API. - // - // The id is a globally unique id for this option, assigned by - // protobuf-global-extension-registry@google.com. - optional ApiMethodOptions api_options = 1022; - } - - // Path of the RESTful API method. - // Path components which start with colon is mapped to the corresponding fields in the request message. - required string path = 1; - // HTTP method of the RESTful API method - required string method = 2; - // Human-readable description of the method. - optional string description = 3; -} - diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel new file mode 100644 index 00000000..fc4850fb --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/BUILD.bazel @@ -0,0 +1,41 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") +load("@io_bazel_rules_go//proto:compiler.bzl", "go_proto_compiler") + +package(default_visibility = ["//visibility:private"]) + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway", + deps = [ + "//codegenerator:go_default_library", + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-grpc-gateway/gengateway:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) + +go_binary( + name = "protoc-gen-grpc-gateway", + embed = [":go_default_library"], +) + +go_proto_compiler( + name = "go_gen_grpc_gateway", + options = ["logtostderr=true"], + plugin = ":protoc-gen-grpc-gateway", + suffix = ".pb.gw.go", + visibility = ["//visibility:public"], + deps = [ + "//runtime:go_default_library", + "//utilities:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@org_golang_google_grpc//:go_default_library", + "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//grpclog:go_default_library", + "@org_golang_google_grpc//status:go_default_library", + "@org_golang_x_net//context:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel new file mode 100644 index 00000000..7c68fa88 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/BUILD.bazel @@ -0,0 +1,39 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//:generators"]) + +go_library( + name = "go_default_library", + srcs = [ + "registry.go", + "services.go", + "types.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor", + deps = [ + "//protoc-gen-grpc-gateway/httprule:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/generator:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + "@org_golang_google_genproto//googleapis/api/annotations:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = [ + "registry_test.go", + "services_test.go", + "types_test.go", + ], + embed = [":go_default_library"], + deps = [ + "//protoc-gen-grpc-gateway/httprule:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go index c4d70038..16df86b4 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go @@ -25,6 +25,9 @@ type Registry struct { // prefix is a prefix to be inserted to golang package paths generated from proto package names. prefix string + // importPath is used as the package if no input files declare go_package. If it contains slashes, everything up to the rightmost slash is ignored. + importPath string + // pkgMap is a user-specified mapping from file path to proto package. pkgMap map[string]string @@ -58,7 +61,7 @@ func (r *Registry) Load(req *plugin.CodeGeneratorRequest) error { if target == nil { return fmt.Errorf("no such file: %s", name) } - name := packageIdentityName(target.FileDescriptorProto) + name := r.packageIdentityName(target.FileDescriptorProto) if targetPkg == "" { targetPkg = name } else { @@ -80,7 +83,7 @@ func (r *Registry) Load(req *plugin.CodeGeneratorRequest) error { func (r *Registry) loadFile(file *descriptor.FileDescriptorProto) { pkg := GoPackage{ Path: r.goPackagePath(file), - Name: defaultGoPackageName(file), + Name: r.defaultGoPackageName(file), } if err := r.ReserveGoPackageAlias(pkg.Name, pkg.Path); err != nil { for i := 0; ; i++ { @@ -207,11 +210,18 @@ func (r *Registry) AddPkgMap(file, protoPkg string) { r.pkgMap[file] = protoPkg } -// SetPrefix registeres the perfix to be added to go package paths generated from proto package names. +// SetPrefix registers the prefix to be added to go package paths generated from proto package names. func (r *Registry) SetPrefix(prefix string) { r.prefix = prefix } +// SetImportPath registers the importPath which is used as the package if no +// input files declare go_package. If it contains slashes, everything up to the +// rightmost slash is ignored. +func (r *Registry) SetImportPath(importPath string) { + r.importPath = importPath +} + // ReserveGoPackageAlias reserves the unique alias of go package. // If succeeded, the alias will be never used for other packages in generated go files. // If failed, the alias is already taken by another package, so you need to use another @@ -282,15 +292,15 @@ func sanitizePackageName(pkgName string) string { // defaultGoPackageName returns the default go package name to be used for go files generated from "f". // You might need to use an unique alias for the package when you import it. Use ReserveGoPackageAlias to get a unique alias. -func defaultGoPackageName(f *descriptor.FileDescriptorProto) string { - name := packageIdentityName(f) +func (r *Registry) defaultGoPackageName(f *descriptor.FileDescriptorProto) string { + name := r.packageIdentityName(f) return sanitizePackageName(name) } // packageIdentityName returns the identity of packages. // protoc-gen-grpc-gateway rejects CodeGenerationRequests which contains more than one packages // as protoc-gen-go does. -func packageIdentityName(f *descriptor.FileDescriptorProto) string { +func (r *Registry) packageIdentityName(f *descriptor.FileDescriptorProto) string { if f.Options != nil && f.Options.GoPackage != nil { gopkg := f.Options.GetGoPackage() idx := strings.LastIndex(gopkg, "/") @@ -308,6 +318,12 @@ func packageIdentityName(f *descriptor.FileDescriptorProto) string { } return sanitizePackageName(gopkg[sc+1:]) } + if p := r.importPath; len(p) != 0 { + if i := strings.LastIndex(p, "/"); i >= 0 { + p = p[i+1:] + } + return p + } if f.Package == nil { base := filepath.Base(f.GetName()) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry_test.go index b73c967b..16e76655 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry_test.go @@ -549,3 +549,40 @@ func TestLoadOverridedPackageName(t *testing.T) { t.Errorf("file.GoPkg = %#v; want %#v", got, want) } } + +func TestLoadSetInputPath(t *testing.T) { + reg := NewRegistry() + reg.SetImportPath("foo/examplepb") + loadFile(t, reg, ` + name: 'example.proto' + package: 'example' + `) + file := reg.files["example.proto"] + if file == nil { + t.Errorf("reg.files[%q] = nil; want non-nil", "example.proto") + return + } + wantPkg := GoPackage{Path: ".", Name: "examplepb"} + if got, want := file.GoPkg, wantPkg; got != want { + t.Errorf("file.GoPkg = %#v; want %#v", got, want) + } +} + +func TestLoadGoPackageInputPath(t *testing.T) { + reg := NewRegistry() + reg.SetImportPath("examplepb") + loadFile(t, reg, ` + name: 'example.proto' + package: 'example' + options < go_package: 'example.com/xyz;pb' > + `) + file := reg.files["example.proto"] + if file == nil { + t.Errorf("reg.files[%q] = nil; want non-nil", "example.proto") + return + } + wantPkg := GoPackage{Path: "example.com/xyz", Name: "pb"} + if got, want := file.GoPkg, wantPkg; got != want { + t.Errorf("file.GoPkg = %#v; want %#v", got, want) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go index c9dfec8e..d3020ff2 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go @@ -27,7 +27,7 @@ func (r *Registry) loadServices(file *File) error { glog.V(2).Infof("Processing %s.%s", sd.GetName(), md.GetName()) opts, err := extractAPIOptions(md) if err != nil { - glog.Errorf("Failed to extract ApiMethodOptions from %s.%s: %v", svc.GetName(), md.GetName(), err) + glog.Errorf("Failed to extract HttpRule from %s.%s: %v", svc.GetName(), md.GetName(), err) return err } if opts == nil { @@ -75,7 +75,7 @@ func (r *Registry) newMethod(svc *Service, md *descriptor.MethodDescriptorProto, httpMethod = "GET" pathTemplate = opts.GetGet() if opts.Body != "" { - return nil, fmt.Errorf("needs request body even though http method is GET: %s", md.GetName()) + return nil, fmt.Errorf("must not set request body when http method is GET: %s", md.GetName()) } case opts.GetPut() != "": @@ -90,7 +90,7 @@ func (r *Registry) newMethod(svc *Service, md *descriptor.MethodDescriptorProto, httpMethod = "DELETE" pathTemplate = opts.GetDelete() if opts.Body != "" && !r.allowDeleteBody { - return nil, fmt.Errorf("needs request body even though http method is DELETE: %s", md.GetName()) + return nil, fmt.Errorf("must not set request body when http method is DELETE except allow_delete_body option is true: %s", md.GetName()) } case opts.GetPatch() != "": @@ -183,7 +183,7 @@ func extractAPIOptions(meth *descriptor.MethodDescriptorProto) (*options.HttpRul func (r *Registry) newParam(meth *Method, path string) (Parameter, error) { msg := meth.RequestType - fields, err := r.resolveFiledPath(msg, path) + fields, err := r.resolveFieldPath(msg, path) if err != nil { return Parameter{}, err } @@ -194,7 +194,12 @@ func (r *Registry) newParam(meth *Method, path string) (Parameter, error) { target := fields[l-1].Target switch target.GetType() { case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_GROUP: - return Parameter{}, fmt.Errorf("aggregate type %s in parameter of %s.%s: %s", target.Type, meth.Service.GetName(), meth.GetName(), path) + glog.V(2).Infoln("found aggregate type:", target, target.TypeName) + if IsWellKnownType(*target.TypeName) { + glog.V(2).Infoln("found well known aggregate type:", target) + } else { + return Parameter{}, fmt.Errorf("aggregate type %s in parameter of %s.%s: %s", target.Type, meth.Service.GetName(), meth.GetName(), path) + } } return Parameter{ FieldPath: FieldPath(fields), @@ -211,7 +216,7 @@ func (r *Registry) newBody(meth *Method, path string) (*Body, error) { case "*": return &Body{FieldPath: nil}, nil } - fields, err := r.resolveFiledPath(msg, path) + fields, err := r.resolveFieldPath(msg, path) if err != nil { return nil, err } @@ -230,7 +235,7 @@ func lookupField(msg *Message, name string) *Field { } // resolveFieldPath resolves "path" into a list of fieldDescriptor, starting from "msg". -func (r *Registry) resolveFiledPath(msg *Message, path string) ([]FieldPathComponent, error) { +func (r *Registry) resolveFieldPath(msg *Message, path string) ([]FieldPathComponent, error) { if path == "" { return nil, nil } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services_test.go index eda34d41..93a8f67c 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services_test.go @@ -1097,7 +1097,7 @@ func TestResolveFieldPath(t *testing.T) { if err != nil { t.Fatalf("reg.LookupFile(%q) failed with %v; want success; on file=%s", file.GetName(), err, spec.src) } - _, err = reg.resolveFiledPath(f.Messages[0], spec.path) + _, err = reg.resolveFieldPath(f.Messages[0], spec.path) if got, want := err != nil, spec.wantErr; got != want { if want { t.Errorf("reg.resolveFiledPath(%q, %q) succeeded; want an error", f.Messages[0].GetName(), spec.path) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go index 248538e7..bfdb2dc8 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go @@ -9,6 +9,12 @@ import ( "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule" ) +// IsWellKnownType returns true if the provided fully qualified type name is considered 'well-known'. +func IsWellKnownType(typeName string) bool { + _, ok := wellKnownTypeConv[typeName] + return ok +} + // GoPackage represents a golang package type GoPackage struct { // Path is the package path to the package. @@ -194,6 +200,9 @@ func (p Parameter) ConvertFuncExpr() (string, error) { } typ := p.Target.GetType() conv, ok := tbl[typ] + if !ok { + conv, ok = wellKnownTypeConv[p.Target.GetTypeName()] + } if !ok { return "", fmt.Errorf("unsupported field type %s of parameter %s in %s.%s", typ, p.FieldPath, p.Method.Service.GetName(), p.Method.GetName()) } @@ -207,10 +216,10 @@ type Body struct { FieldPath FieldPath } -// RHS returns a right-hand-side expression in go to be used to initialize method request object. +// AssignableExpr returns an assignable expression in Go to be used to initialize method request object. // It starts with "msgExpr", which is the go expression of the method request object. -func (b Body) RHS(msgExpr string) string { - return b.FieldPath.RHS(msgExpr) +func (b Body) AssignableExpr(msgExpr string) string { + return b.FieldPath.AssignableExpr(msgExpr) } // FieldPath is a path to a field from a request message. @@ -233,9 +242,9 @@ func (p FieldPath) IsNestedProto3() bool { return false } -// RHS is a right-hand-side expression in go to be used to assign a value to the target field. +// AssignableExpr is an assignable expression in Go to be used to assign a value to the target field. // It starts with "msgExpr", which is the go expression of the method request object. -func (p FieldPath) RHS(msgExpr string) string { +func (p FieldPath) AssignableExpr(msgExpr string) string { l := len(p) if l == 0 { return msgExpr @@ -243,10 +252,10 @@ func (p FieldPath) RHS(msgExpr string) string { components := []string{msgExpr} for i, c := range p { if i == l-1 { - components = append(components, c.RHS()) + components = append(components, c.AssignableExpr()) continue } - components = append(components, c.LHS()) + components = append(components, c.ValueExpr()) } return strings.Join(components, ".") } @@ -260,13 +269,13 @@ type FieldPathComponent struct { Target *Field } -// RHS returns a right-hand-side expression in go for this field. -func (c FieldPathComponent) RHS() string { +// AssignableExpr returns an assignable expression in go for this field. +func (c FieldPathComponent) AssignableExpr() string { return gogen.CamelCase(c.Name) } -// LHS returns a left-hand-side expression in go for this field. -func (c FieldPathComponent) LHS() string { +// ValueExpr returns an expression in go for this field. +func (c FieldPathComponent) ValueExpr() string { if c.Target.Message.File.proto2() { return fmt.Sprintf("Get%s()", gogen.CamelCase(c.Name)) } @@ -286,8 +295,7 @@ var ( descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.String", // FieldDescriptorProto_TYPE_GROUP // FieldDescriptorProto_TYPE_MESSAGE - // FieldDescriptorProto_TYPE_BYTES - // TODO(yugui) Handle bytes + descriptor.FieldDescriptorProto_TYPE_BYTES: "runtime.Bytes", descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32", // FieldDescriptorProto_TYPE_ENUM // TODO(yugui) Handle Enum @@ -319,4 +327,9 @@ var ( descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32P", descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64P", } + + wellKnownTypeConv = map[string]string{ + ".google.protobuf.Timestamp": "runtime.Timestamp", + ".google.protobuf.Duration": "runtime.Duration", + } ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types_test.go index ef2162a6..1dcdb341 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types_test.go @@ -161,22 +161,22 @@ func TestFieldPath(t *testing.T) { Name: "nest_field", Target: nest2.Fields[0], } - if got, want := c1.LHS(), "GetNestField()"; got != want { - t.Errorf("c1.LHS() = %q; want %q", got, want) + if got, want := c1.ValueExpr(), "GetNestField()"; got != want { + t.Errorf("c1.ValueExpr() = %q; want %q", got, want) } - if got, want := c1.RHS(), "NestField"; got != want { - t.Errorf("c1.RHS() = %q; want %q", got, want) + if got, want := c1.AssignableExpr(), "NestField"; got != want { + t.Errorf("c1.AssignableExpr() = %q; want %q", got, want) } c2 := FieldPathComponent{ Name: "nest2_field", Target: nest.Fields[0], } - if got, want := c2.LHS(), "Nest2Field"; got != want { - t.Errorf("c2.LHS() = %q; want %q", got, want) + if got, want := c2.ValueExpr(), "Nest2Field"; got != want { + t.Errorf("c2.ValueExpr() = %q; want %q", got, want) } - if got, want := c2.LHS(), "Nest2Field"; got != want { - t.Errorf("c2.LHS() = %q; want %q", got, want) + if got, want := c2.ValueExpr(), "Nest2Field"; got != want { + t.Errorf("c2.ValueExpr() = %q; want %q", got, want) } fp := FieldPath{ @@ -185,8 +185,8 @@ func TestFieldPath(t *testing.T) { Target: nest.Fields[1], }, } - if got, want := fp.RHS("resp"), "resp.GetNestField().Nest2Field.GetNestField().TerminalField"; got != want { - t.Errorf("fp.RHS(%q) = %q; want %q", "resp", got, want) + if got, want := fp.AssignableExpr("resp"), "resp.GetNestField().Nest2Field.GetNestField().TerminalField"; got != want { + t.Errorf("fp.AssignableExpr(%q) = %q; want %q", "resp", got, want) } fp2 := FieldPath{ @@ -195,12 +195,12 @@ func TestFieldPath(t *testing.T) { Target: nest2.Fields[1], }, } - if got, want := fp2.RHS("resp"), "resp.Nest2Field.GetNestField().Nest2Field.TerminalField"; got != want { - t.Errorf("fp2.RHS(%q) = %q; want %q", "resp", got, want) + if got, want := fp2.AssignableExpr("resp"), "resp.Nest2Field.GetNestField().Nest2Field.TerminalField"; got != want { + t.Errorf("fp2.AssignableExpr(%q) = %q; want %q", "resp", got, want) } var fpEmpty FieldPath - if got, want := fpEmpty.RHS("resp"), "resp"; got != want { - t.Errorf("fpEmpty.RHS(%q) = %q; want %q", "resp", got, want) + if got, want := fpEmpty.AssignableExpr("resp"), "resp"; got != want { + t.Errorf("fpEmpty.AssignableExpr(%q) = %q; want %q", "resp", got, want) } } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel new file mode 100644 index 00000000..5995e5df --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator/BUILD.bazel @@ -0,0 +1,13 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +package(default_visibility = ["//:generators"]) + +go_library( + name = "go_default_library", + srcs = ["generator.go"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator", + deps = [ + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel new file mode 100644 index 00000000..517909b5 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/BUILD.bazel @@ -0,0 +1,38 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//protoc-gen-grpc-gateway:__subpackages__"]) + +go_library( + name = "go_default_library", + srcs = [ + "doc.go", + "generator.go", + "template.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway", + deps = [ + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-grpc-gateway/generator:go_default_library", + "//utilities:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + "@org_golang_google_genproto//googleapis/api/annotations:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = [ + "generator_test.go", + "template_test.go", + ], + embed = [":go_default_library"], + deps = [ + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-grpc-gateway/httprule:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go index d08609bc..cb2f5e14 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator.go @@ -78,6 +78,9 @@ func (g *generator) Generate(targets []*descriptor.File) ([]*plugin.CodeGenerato return nil, err } name := file.GetName() + if file.GoPkg.Path != "" { + name = fmt.Sprintf("%s/%s", file.GoPkg.Path, filepath.Base(name)) + } ext := filepath.Ext(name) base := strings.TrimSuffix(name, ext) output := fmt.Sprintf("%s.pb.gw.go", base) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator_test.go index 755a0923..986ff415 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/generator_test.go @@ -1,6 +1,7 @@ package gengateway import ( + "path/filepath" "strings" "testing" @@ -9,7 +10,16 @@ import ( "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" ) -func TestGenerateServiceWithoutBindings(t *testing.T) { +func newExampleFileDescriptor() *descriptor.File { + return newExampleFileDescriptorWithGoPkg( + &descriptor.GoPackage{ + Path: "example.com/path/to/example/example.pb", + Name: "example_pb", + }, + ) +} + +func newExampleFileDescriptorWithGoPkg(gp *descriptor.GoPackage) *descriptor.File { msgdesc := &protodescriptor.DescriptorProto{ Name: proto.String("ExampleMessage"), } @@ -39,7 +49,7 @@ func TestGenerateServiceWithoutBindings(t *testing.T) { Name: proto.String("ExampleService"), Method: []*protodescriptor.MethodDescriptorProto{meth, meth1}, } - file := descriptor.File{ + return &descriptor.File{ FileDescriptorProto: &protodescriptor.FileDescriptorProto{ Name: proto.String("example.proto"), Package: proto.String("example"), @@ -47,10 +57,7 @@ func TestGenerateServiceWithoutBindings(t *testing.T) { MessageType: []*protodescriptor.DescriptorProto{msgdesc}, Service: []*protodescriptor.ServiceDescriptorProto{svc}, }, - GoPkg: descriptor.GoPackage{ - Path: "example.com/path/to/example/example.pb", - Name: "example_pb", - }, + GoPkg: *gp, Messages: []*descriptor.Message{msg}, Services: []*descriptor.Service{ { @@ -76,8 +83,12 @@ func TestGenerateServiceWithoutBindings(t *testing.T) { }, }, } +} + +func TestGenerateServiceWithoutBindings(t *testing.T) { + file := newExampleFileDescriptor() g := &generator{} - got, err := g.generate(crossLinkFixture(&file)) + got, err := g.generate(crossLinkFixture(file)) if err != nil { t.Errorf("generate(%#v) failed with %v; want success", file, err) return @@ -86,3 +97,57 @@ func TestGenerateServiceWithoutBindings(t *testing.T) { t.Errorf("generate(%#v) = %s; does not want to contain %s", file, got, notwanted) } } + +func TestGenerateOutputPath(t *testing.T) { + cases := []struct { + file *descriptor.File + expected string + }{ + { + file: newExampleFileDescriptorWithGoPkg( + &descriptor.GoPackage{ + Path: "example.com/path/to/example", + Name: "example_pb", + }, + ), + expected: "example.com/path/to/example", + }, + { + file: newExampleFileDescriptorWithGoPkg( + &descriptor.GoPackage{ + Path: "example", + Name: "example_pb", + }, + ), + expected: "example", + }, + } + + g := &generator{} + for _, c := range cases { + file := c.file + gots, err := g.Generate([]*descriptor.File{crossLinkFixture(file)}) + if err != nil { + t.Errorf("Generate(%#v) failed with %v; wants success", file, err) + return + } + + if len(gots) != 1 { + t.Errorf("Generate(%#v) failed; expects on result got %d", file, len(gots)) + return + } + + got := gots[0] + if got.Name == nil { + t.Errorf("Generate(%#v) failed; expects non-nil Name(%v)", file, got.Name) + return + } + + gotPath := filepath.Dir(*got.Name) + expectedPath := c.expected + if gotPath != expectedPath { + t.Errorf("Generate(%#v) failed; got path: %s expected path: %s", file, gotPath, expectedPath) + return + } + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go index c6fcc3bf..435a1405 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway/template.go @@ -80,6 +80,8 @@ func applyTemplate(p param) (string, error) { var targetServices []*descriptor.Service for _, svc := range p.Services { var methodWithBindingsSeen bool + svcName := strings.Title(*svc.Name) + svc.Name = &svcName for _, meth := range svc.Methods { glog.V(2).Infof("Processing %s.%s", svc.GetName(), meth.GetName()) methName := strings.Title(*meth.Name) @@ -111,9 +113,8 @@ func applyTemplate(p param) (string, error) { var ( headerTemplate = template.Must(template.New("header").Parse(` -// Code generated by protoc-gen-grpc-gateway +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. // source: {{.GetName}} -// DO NOT EDIT! /* Package {{.GoPkg.Name}} is a reverse proxy. @@ -206,7 +207,7 @@ var ( var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}} var metadata runtime.ServerMetadata {{if .Body}} - if err := marshaler.NewDecoder(req.Body).Decode(&{{.Body.RHS "protoReq"}}); err != nil { + if err := marshaler.NewDecoder(req.Body).Decode(&{{.Body.AssignableExpr "protoReq"}}); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } {{end}} @@ -225,10 +226,10 @@ var ( {{if $param.IsNestedProto3 }} err = runtime.PopulateFieldFromPath(&protoReq, {{$param | printf "%q"}}, val) {{else}} - {{$param.RHS "protoReq"}}, err = {{$param.ConvertFuncExpr}}(val) + {{$param.AssignableExpr "protoReq"}}, err = {{$param.ConvertFuncExpr}}(val) {{end}} if err != nil { - return nil, metadata, err + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", {{$param | printf "%q"}}, err) } {{end}} {{end}} @@ -265,7 +266,7 @@ var ( dec := marshaler.NewDecoder(req.Body) handleSend := func() error { var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}} - err = dec.Decode(&protoReq) + err := dec.Decode(&protoReq) if err == io.EOF { return err } @@ -273,7 +274,7 @@ var ( grpclog.Printf("Failed to decode request: %v", err) return err } - if err = stream.Send(&protoReq); err != nil { + if err := stream.Send(&protoReq); err != nil { grpclog.Printf("Failed to send request: %v", err) return err } @@ -339,7 +340,15 @@ func Register{{$svc.GetName}}HandlerFromEndpoint(ctx context.Context, mux *runti // Register{{$svc.GetName}}Handler registers the http handlers for service {{$svc.GetName}} to "mux". // The handlers forward requests to the grpc endpoint over "conn". func Register{{$svc.GetName}}Handler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - client := New{{$svc.GetName}}Client(conn) + return Register{{$svc.GetName}}HandlerClient(ctx, mux, New{{$svc.GetName}}Client(conn)) +} + +// Register{{$svc.GetName}}Handler registers the http handlers for service {{$svc.GetName}} to "mux". +// The handlers forward requests to the grpc endpoint over the given implementation of "{{$svc.GetName}}Client". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "{{$svc.GetName}}Client" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "{{$svc.GetName}}Client" to call the correct interceptors. +func Register{{$svc.GetName}}HandlerClient(ctx context.Context, mux *runtime.ServeMux, client {{$svc.GetName}}Client) error { {{range $m := $svc.Methods}} {{range $b := $m.Bindings}} mux.Handle({{$b.HTTPMethod | printf "%q"}}, pattern_{{$svc.GetName}}_{{$m.GetName}}_{{$b.Index}}, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel new file mode 100644 index 00000000..89f94a14 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/BUILD.bazel @@ -0,0 +1,32 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//:generators"]) + +go_library( + name = "go_default_library", + srcs = [ + "compile.go", + "parse.go", + "types.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule", + deps = [ + "//utilities:go_default_library", + "@com_github_golang_glog//:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = [ + "compile_test.go", + "parse_test.go", + "types_test.go", + ], + embed = [":go_default_library"], + deps = [ + "//utilities:go_default_library", + "@com_github_golang_glog//:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go index cf365f24..d4569d20 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/main.go @@ -10,51 +10,36 @@ package main import ( "flag" - "io" - "io/ioutil" "os" "strings" "github.com/golang/glog" "github.com/golang/protobuf/proto" plugin "github.com/golang/protobuf/protoc-gen-go/plugin" + "github.com/grpc-ecosystem/grpc-gateway/codegenerator" "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/gengateway" ) var ( importPrefix = flag.String("import_prefix", "", "prefix to be added to go package paths for imported proto files") - useRequestContext = flag.Bool("request_context", false, "determine whether to use http.Request's context or not") + importPath = flag.String("import_path", "", "used as the package if no input files declare go_package. If it contains slashes, everything up to the rightmost slash is ignored.") + useRequestContext = flag.Bool("request_context", true, "determine whether to use http.Request's context or not") allowDeleteBody = flag.Bool("allow_delete_body", false, "unless set, HTTP DELETE methods may not have a body") ) -func parseReq(r io.Reader) (*plugin.CodeGeneratorRequest, error) { - glog.V(1).Info("Parsing code generator request") - input, err := ioutil.ReadAll(r) - if err != nil { - glog.Errorf("Failed to read code generator request: %v", err) - return nil, err - } - req := new(plugin.CodeGeneratorRequest) - if err = proto.Unmarshal(input, req); err != nil { - glog.Errorf("Failed to unmarshal code generator request: %v", err) - return nil, err - } - glog.V(1).Info("Parsed code generator request") - return req, nil -} - func main() { flag.Parse() defer glog.Flush() reg := descriptor.NewRegistry() - glog.V(1).Info("Processing code generator request") - req, err := parseReq(os.Stdin) + glog.V(1).Info("Parsing code generator request") + req, err := codegenerator.ParseRequest(os.Stdin) if err != nil { glog.Fatal(err) } + glog.V(1).Info("Parsed code generator request") if req.Parameter != nil { for _, p := range strings.Split(req.GetParameter(), ",") { spec := strings.SplitN(p, "=", 2) @@ -78,6 +63,7 @@ func main() { g := gengateway.New(reg, *useRequestContext) reg.SetPrefix(*importPrefix) + reg.SetImportPath(*importPath) reg.SetAllowDeleteBody(*allowDeleteBody) if err := reg.Load(req); err != nil { emitError(err) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/BUILD.bazel new file mode 100644 index 00000000..62384ef3 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/BUILD.bazel @@ -0,0 +1,30 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test") + +package(default_visibility = ["//visibility:private"]) + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger", + deps = [ + "//codegenerator:go_default_library", + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-swagger/genswagger:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) + +go_binary( + name = "protoc-gen-swagger", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = ["main_test.go"], + embed = [":go_default_library"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/defs.bzl b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/defs.bzl new file mode 100644 index 00000000..a7a8c858 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/defs.bzl @@ -0,0 +1,75 @@ +def _collect_includes(srcs): + includes = ["."] + for src in srcs: + include = src.dirname + if include and not include in includes: + includes += [include] + + return includes + +def _run_proto_gen_swagger(direct_proto_srcs, transitive_proto_srcs, actions, protoc, protoc_gen_swagger): + swagger_files = [] + for proto in direct_proto_srcs: + swagger_file = actions.declare_file( + "%s.swagger.json" % proto.basename[:-len(".proto")], + sibling = proto, + ) + + args = actions.args() + args.add("--plugin=%s" % protoc_gen_swagger.path) + args.add("--swagger_out=logtostderr=true:%s" % swagger_file.dirname) + args.add("-Iexternal/com_google_protobuf/src") + args.add("-Iexternal/com_github_googleapis_googleapis") + args.add(["-I%s" % include for include in _collect_includes(direct_proto_srcs + transitive_proto_srcs)]) + args.add(proto.basename) + + actions.run( + executable = protoc, + inputs = direct_proto_srcs + transitive_proto_srcs + [protoc_gen_swagger], + outputs = [swagger_file], + arguments = [args], + ) + + swagger_files.append(swagger_file) + + return swagger_files + +def _proto_gen_swagger_impl(ctx): + proto = ctx.attr.proto.proto + + return struct( + files=depset( + _run_proto_gen_swagger( + direct_proto_srcs = proto.direct_sources, + transitive_proto_srcs = ctx.files._well_known_protos + proto.transitive_sources.to_list(), + actions = ctx.actions, + protoc = ctx.executable._protoc, + protoc_gen_swagger = ctx.executable._protoc_gen_swagger, + ) + ) + ) + +protoc_gen_swagger = rule( + attrs = { + "proto": attr.label( + allow_rules = ["proto_library"], + mandatory = True, + providers = ['proto'], + ), + "_protoc": attr.label( + default = "@com_google_protobuf//:protoc", + executable = True, + cfg = "host", + ), + "_well_known_protos": attr.label( + default = "@com_google_protobuf//:well_known_protos", + allow_files = True, + ), + "_protoc_gen_swagger": attr.label( + default = Label("@grpc_ecosystem_grpc_gateway//protoc-gen-swagger:protoc-gen-swagger"), + executable = True, + cfg = "host", + ), + }, + implementation = _proto_gen_swagger_impl, +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/BUILD.bazel new file mode 100644 index 00000000..7f34844a --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/BUILD.bazel @@ -0,0 +1,37 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//protoc-gen-swagger:__subpackages__"]) + +go_library( + name = "go_default_library", + srcs = [ + "doc.go", + "generator.go", + "template.go", + "types.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger", + deps = [ + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-grpc-gateway/generator:go_default_library", + "//protoc-gen-swagger/options:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = ["template_test.go"], + embed = [":go_default_library"], + deps = [ + "//protoc-gen-grpc-gateway/descriptor:go_default_library", + "//protoc-gen-grpc-gateway/httprule:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + "@com_github_golang_protobuf//protoc-gen-go/plugin:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template.go index 170e0e4b..d2b73e1a 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template.go @@ -4,15 +4,52 @@ import ( "bytes" "encoding/json" "fmt" + "os" "reflect" "regexp" "strconv" "strings" + "sync" + "github.com/golang/protobuf/proto" pbdescriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" + swagger_options "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" ) +var wktSchemas = map[string]schemaCore{ + ".google.protobuf.Timestamp": schemaCore{ + Type: "string", + Format: "date-time", + }, + ".google.protobuf.Duration": schemaCore{ + Type: "string", + }, + ".google.protobuf.StringValue": schemaCore{ + Type: "string", + }, + ".google.protobuf.Int32Value": schemaCore{ + Type: "integer", + Format: "int32", + }, + ".google.protobuf.Int64Value": schemaCore{ + Type: "integer", + Format: "int64", + }, + ".google.protobuf.FloatValue": schemaCore{ + Type: "number", + Format: "float", + }, + ".google.protobuf.DoubleValue": schemaCore{ + Type: "number", + Format: "double", + }, + ".google.protobuf.BoolValue": schemaCore{ + Type: "boolean", + Format: "boolean", + }, +} + func listEnumNames(enum *descriptor.Enum) (names []string) { for _, value := range enum.GetValue() { names = append(names, value.GetName()) @@ -124,8 +161,9 @@ func findServicesMessagesAndEnumerations(s []*descriptor.Service, reg *descripto // Request may be fully included in query if _, ok := refs[fmt.Sprintf("#/definitions/%s", fullyQualifiedNameToSwaggerName(meth.RequestType.FQMN(), reg))]; ok { m[fullyQualifiedNameToSwaggerName(meth.RequestType.FQMN(), reg)] = meth.RequestType - findNestedMessagesAndEnumerations(meth.RequestType, reg, m, e) } + findNestedMessagesAndEnumerations(meth.RequestType, reg, m, e) + m[fullyQualifiedNameToSwaggerName(meth.ResponseType.FQMN(), reg)] = meth.ResponseType findNestedMessagesAndEnumerations(meth.ResponseType, reg, m, e) } @@ -161,6 +199,20 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject, switch name { case ".google.protobuf.Timestamp": continue + case ".google.protobuf.Duration": + continue + case ".google.protobuf.StringValue": + continue + case ".google.protobuf.Int32Value": + continue + case ".google.protobuf.Int64Value": + continue + case ".google.protobuf.FloatValue": + continue + case ".google.protobuf.DoubleValue": + continue + case ".google.protobuf.BoolValue": + continue } if opt := msg.GetOptions(); opt != nil && opt.MapEntry != nil && *opt.MapEntry { continue @@ -174,6 +226,25 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject, if err := updateSwaggerDataFromComments(&schema, msgComments); err != nil { panic(err) } + opts, err := extractSchemaOptionFromMessageDescriptor(msg.DescriptorProto) + if err != nil { + panic(err) + } + if opts != nil { + if opts.ExternalDocs != nil { + if schema.ExternalDocs == nil { + schema.ExternalDocs = &swaggerExternalDocumentationObject{} + } + if opts.ExternalDocs.Description != "" { + schema.ExternalDocs.Description = opts.ExternalDocs.Description + } + if opts.ExternalDocs.Url != "" { + schema.ExternalDocs.URL = opts.ExternalDocs.Url + } + } + + // TODO(ivucica): add remaining fields of schema object + } for _, f := range msg.Fields { fieldValue := schemaOfField(f, reg) @@ -213,11 +284,8 @@ func schemaOfField(f *descriptor.Field, reg *descriptor.Registry) swaggerSchemaO switch ft := fd.GetType(); ft { case pbdescriptor.FieldDescriptorProto_TYPE_ENUM, pbdescriptor.FieldDescriptorProto_TYPE_MESSAGE, pbdescriptor.FieldDescriptorProto_TYPE_GROUP: - if fd.GetTypeName() == ".google.protobuf.Timestamp" && pbdescriptor.FieldDescriptorProto_TYPE_MESSAGE == ft { - core = schemaCore{ - Type: "string", - Format: "date-time", - } + if wktSchema, ok := wktSchemas[fd.GetTypeName()]; ok { + core = wktSchema } else { core = schemaCore{ Ref: "#/definitions/" + fullyQualifiedNameToSwaggerName(fd.GetTypeName(), reg), @@ -329,9 +397,21 @@ func renderEnumerationsAsDefinition(enums enumMap, d swaggerDefinitionsObject, r // Take in a FQMN or FQEN and return a swagger safe version of the FQMN func fullyQualifiedNameToSwaggerName(fqn string, reg *descriptor.Registry) string { - return resolveFullyQualifiedNameToSwaggerName(fqn, append(reg.GetAllFQMNs(), reg.GetAllFQENs()...)) + registriesSeenMutex.Lock() + defer registriesSeenMutex.Unlock() + if mapping, present := registriesSeen[reg]; present { + return mapping[fqn] + } + mapping := resolveFullyQualifiedNameToSwaggerNames(append(reg.GetAllFQMNs(), reg.GetAllFQENs()...)) + registriesSeen[reg] = mapping + return mapping[fqn] } +// registriesSeen is used to memoise calls to resolveFullyQualifiedNameToSwaggerNames so +// we don't repeat it unnecessarily, since it can take some time. +var registriesSeen = map[*descriptor.Registry]map[string]string{} +var registriesSeenMutex sync.Mutex + // Take the names of every proto and "uniq-ify" them. The idea is to produce a // set of names that meet a couple of conditions. They must be stable, they // must be unique, and they must be shorter than the FQN. @@ -339,7 +419,7 @@ func fullyQualifiedNameToSwaggerName(fqn string, reg *descriptor.Registry) strin // This likely could be made better. This will always generate the same names // but may not always produce optimal names. This is a reasonably close // approximation of what they should look like in most cases. -func resolveFullyQualifiedNameToSwaggerName(fqn string, messages []string) string { +func resolveFullyQualifiedNameToSwaggerNames(messages []string) map[string]string { packagesByDepth := make(map[int][][]string) uniqueNames := make(map[string]string) @@ -379,7 +459,7 @@ func resolveFullyQualifiedNameToSwaggerName(fqn string, messages []string) strin } } } - return uniqueNames[fqn] + return uniqueNames } // Swagger expects paths of the form /path/{string_value} but grpc-gateway paths are expected to be of the form /path/{string_value=strprefix/*}. This should reformat it correctly. @@ -438,7 +518,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re // Correctness of svcIdx and methIdx depends on 'services' containing the services in the same order as the 'file.Service' array. for svcIdx, svc := range services { for methIdx, meth := range svc.Methods { - for _, b := range meth.Bindings { + for bIdx, b := range meth.Bindings { // Iterate over all the swagger parameters parameters := swaggerParametersObject{} for _, parameter := range b.PathParams { @@ -446,7 +526,13 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re var paramType, paramFormat string switch pt := parameter.Target.GetType(); pt { case pbdescriptor.FieldDescriptorProto_TYPE_GROUP, pbdescriptor.FieldDescriptorProto_TYPE_MESSAGE: - return fmt.Errorf("only primitive types are allowed in path parameters") + if descriptor.IsWellKnownType(parameter.Target.GetTypeName()) { + schema := schemaOfField(parameter.Target, reg) + paramType = schema.Type + paramFormat = schema.Format + } else { + return fmt.Errorf("only primitive and well-known types are allowed in path parameters") + } case pbdescriptor.FieldDescriptorProto_TYPE_ENUM: paramType = fullyQualifiedNameToSwaggerName(parameter.Target.GetTypeName(), reg) paramFormat = "" @@ -493,7 +579,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re Required: true, Schema: &schema, }) - } else if b.HTTPMethod == "GET" { + } else if b.HTTPMethod == "GET" || b.HTTPMethod == "DELETE" { // add the parameters to the query string queryParams, err := messageToQueryParameters(meth.RequestType, reg, b.PathParams) if err != nil { @@ -513,9 +599,8 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re desc += "(streaming responses)" } operationObject := &swaggerOperationObject{ - Tags: []string{svc.GetName()}, - OperationID: fmt.Sprintf("%s", meth.GetName()), - Parameters: parameters, + Tags: []string{svc.GetName()}, + Parameters: parameters, Responses: swaggerResponsesObject{ "200": swaggerResponseObject{ Description: desc, @@ -527,6 +612,12 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re }, }, } + if bIdx == 0 { + operationObject.OperationID = fmt.Sprintf("%s", meth.GetName()) + } else { + // OperationID must be unique in an OpenAPI v2 definition. + operationObject.OperationID = fmt.Sprintf("%s%d", meth.GetName(), bIdx+1) + } // Fill reference map with referenced request messages for _, param := range operationObject.Parameters { @@ -540,6 +631,57 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re panic(err) } + opts, err := extractOperationOptionFromMethodDescriptor(meth.MethodDescriptorProto) + if opts != nil { + if err != nil { + panic(err) + } + if opts.ExternalDocs != nil { + if operationObject.ExternalDocs == nil { + operationObject.ExternalDocs = &swaggerExternalDocumentationObject{} + } + if opts.ExternalDocs.Description != "" { + operationObject.ExternalDocs.Description = opts.ExternalDocs.Description + } + if opts.ExternalDocs.Url != "" { + operationObject.ExternalDocs.URL = opts.ExternalDocs.Url + } + } + // TODO(ivucica): this would be better supported by looking whether the method is deprecated in the proto file + operationObject.Deprecated = opts.Deprecated + + if opts.Summary != "" { + operationObject.Summary = opts.Summary + } + if opts.Description != "" { + operationObject.Description = opts.Description + } + if len(opts.Tags) > 0 { + operationObject.Tags = make([]string, len(opts.Tags)) + copy(operationObject.Tags, opts.Tags) + } + if opts.Security != nil { + newSecurity := []swaggerSecurityRequirementObject{} + if operationObject.Security == nil { + newSecurity = []swaggerSecurityRequirementObject{} + } else { + newSecurity = operationObject.Security + } + for _, secReq := range opts.Security { + newSecReq := swaggerSecurityRequirementObject{} + for secReqKey, secReqValue := range secReq.SecurityRequirement { + newSecReqValue := make([]string, len(secReqValue.Scope)) + copy(newSecReqValue, secReqValue.Scope) + newSecReq[secReqKey] = newSecReqValue + } + newSecurity = append(newSecurity, newSecReq) + } + operationObject.Security = newSecurity + } + + // TODO(ivucica): add remaining fields of operation object + } + switch b.HTTPMethod { case "DELETE": pathItemObject.Delete = operationObject @@ -606,6 +748,161 @@ func applyTemplate(p param) (string, error) { panic(err) } + // There may be additional options in the swagger option in the proto. + spb, err := extractSwaggerOptionFromFileDescriptor(p.FileDescriptorProto) + if err != nil { + panic(err) + } + if spb != nil { + if spb.Swagger != "" { + s.Swagger = spb.Swagger + } + if spb.Info != nil { + if spb.Info.Title != "" { + s.Info.Title = spb.Info.Title + } + if spb.Info.Description != "" { + s.Info.Description = spb.Info.Description + } + if spb.Info.TermsOfService != "" { + s.Info.TermsOfService = spb.Info.TermsOfService + } + if spb.Info.Version != "" { + s.Info.Version = spb.Info.Version + } + if spb.Info.Contact != nil { + if s.Info.Contact == nil { + s.Info.Contact = &swaggerContactObject{} + } + if spb.Info.Contact.Name != "" { + s.Info.Contact.Name = spb.Info.Contact.Name + } + if spb.Info.Contact.Url != "" { + s.Info.Contact.URL = spb.Info.Contact.Url + } + if spb.Info.Contact.Email != "" { + s.Info.Contact.Email = spb.Info.Contact.Email + } + } + } + if spb.Host != "" { + s.Host = spb.Host + } + if spb.BasePath != "" { + s.BasePath = spb.BasePath + } + if len(spb.Schemes) > 0 { + s.Schemes = make([]string, len(spb.Schemes)) + for i, scheme := range spb.Schemes { + s.Schemes[i] = strings.ToLower(scheme.String()) + } + } + if len(spb.Consumes) > 0 { + s.Consumes = make([]string, len(spb.Consumes)) + copy(s.Consumes, spb.Consumes) + } + if len(spb.Produces) > 0 { + s.Produces = make([]string, len(spb.Produces)) + copy(s.Produces, spb.Produces) + } + if spb.SecurityDefinitions != nil && spb.SecurityDefinitions.Security != nil { + if s.SecurityDefinitions == nil { + s.SecurityDefinitions = swaggerSecurityDefinitionsObject{} + } + for secDefKey, secDefValue := range spb.SecurityDefinitions.Security { + var newSecDefValue swaggerSecuritySchemeObject + if oldSecDefValue, ok := s.SecurityDefinitions[secDefKey]; !ok { + newSecDefValue = swaggerSecuritySchemeObject{} + } else { + newSecDefValue = oldSecDefValue + } + if secDefValue.Type != swagger_options.SecurityScheme_TYPE_INVALID { + switch secDefValue.Type { + case swagger_options.SecurityScheme_TYPE_BASIC: + newSecDefValue.Type = "basic" + case swagger_options.SecurityScheme_TYPE_API_KEY: + newSecDefValue.Type = "apiKey" + case swagger_options.SecurityScheme_TYPE_OAUTH2: + newSecDefValue.Type = "oauth2" + } + } + if secDefValue.Description != "" { + newSecDefValue.Description = secDefValue.Description + } + if secDefValue.Name != "" { + newSecDefValue.Name = secDefValue.Name + } + if secDefValue.In != swagger_options.SecurityScheme_IN_INVALID { + switch secDefValue.In { + case swagger_options.SecurityScheme_IN_QUERY: + newSecDefValue.In = "query" + case swagger_options.SecurityScheme_IN_HEADER: + newSecDefValue.In = "header" + } + } + if secDefValue.Flow != swagger_options.SecurityScheme_FLOW_INVALID { + switch secDefValue.Flow { + case swagger_options.SecurityScheme_FLOW_IMPLICIT: + newSecDefValue.Flow = "implicit" + case swagger_options.SecurityScheme_FLOW_PASSWORD: + newSecDefValue.Flow = "password" + case swagger_options.SecurityScheme_FLOW_APPLICATION: + newSecDefValue.Flow = "application" + case swagger_options.SecurityScheme_FLOW_ACCESS_CODE: + newSecDefValue.Flow = "accessCode" + } + } + if secDefValue.AuthorizationUrl != "" { + newSecDefValue.AuthorizationURL = secDefValue.AuthorizationUrl + } + if secDefValue.TokenUrl != "" { + newSecDefValue.TokenURL = secDefValue.TokenUrl + } + if secDefValue.Scopes != nil { + if newSecDefValue.Scopes == nil { + newSecDefValue.Scopes = swaggerScopesObject{} + } + for scopeKey, scopeDesc := range secDefValue.Scopes.Scope { + newSecDefValue.Scopes[scopeKey] = scopeDesc + } + } + s.SecurityDefinitions[secDefKey] = newSecDefValue + } + } + if spb.Security != nil { + newSecurity := []swaggerSecurityRequirementObject{} + if s.Security == nil { + newSecurity = []swaggerSecurityRequirementObject{} + } else { + newSecurity = s.Security + } + for _, secReq := range spb.Security { + newSecReq := swaggerSecurityRequirementObject{} + for secReqKey, secReqValue := range secReq.SecurityRequirement { + newSecReqValue := make([]string, len(secReqValue.Scope)) + copy(newSecReqValue, secReqValue.Scope) + newSecReq[secReqKey] = newSecReqValue + } + newSecurity = append(newSecurity, newSecReq) + } + s.Security = newSecurity + } + if spb.ExternalDocs != nil { + if s.ExternalDocs == nil { + s.ExternalDocs = &swaggerExternalDocumentationObject{} + } + if spb.ExternalDocs.Description != "" { + s.ExternalDocs.Description = spb.ExternalDocs.Description + } + if spb.ExternalDocs.Url != "" { + s.ExternalDocs.URL = spb.ExternalDocs.Url + } + } + + // Additional fields on the OpenAPI v2 spec's "Swagger" object + // should be added here, once supported in the proto. + } + // We now have rendered the entire swagger object. Write the bytes out to a // string so it can be written to disk. var w bytes.Buffer @@ -618,9 +915,9 @@ func applyTemplate(p param) (string, error) { // updateSwaggerDataFromComments updates a Swagger object based on a comment // from the proto file. // -// First paragraph of a comment is used for summary. Remaining paragraphs of a -// comment are used for description. If 'Summary' field is not present on the -// passed swaggerObject, the summary and description are joined by \n\n. +// First paragraph of a comment is used for summary. Remaining paragraphs of +// a comment are used for description. If 'Summary' field is not present on +// the passed swaggerObject, the summary and description are joined by \n\n. // // If there is a field named 'Info', its 'Summary' and 'Description' fields // will be updated instead. @@ -650,17 +947,15 @@ func updateSwaggerDataFromComments(swaggerObject interface{}, comment string) er usingTitle = true } + paragraphs := strings.Split(comment, "\n\n") + // If there is a summary (or summary-equivalent), use the first // paragraph as summary, and the rest as description. if summaryValue.CanSet() { - paragraphs := strings.Split(comment, "\n\n") - summary := strings.TrimSpace(paragraphs[0]) description := strings.TrimSpace(strings.Join(paragraphs[1:], "\n\n")) - if !usingTitle || summary == "" || summary[len(summary)-1] != '.' { - if len(summary) > 0 { - summaryValue.Set(reflect.ValueOf(summary)) - } + if !usingTitle || (len(summary) > 0 && summary[len(summary)-1] != '.') { + summaryValue.Set(reflect.ValueOf(summary)) if len(description) > 0 { if !descriptionValue.CanSet() { return fmt.Errorf("Encountered object type with a summary, but no description") @@ -674,7 +969,7 @@ func updateSwaggerDataFromComments(swaggerObject interface{}, comment string) er // There was no summary field on the swaggerObject. Try to apply the // whole comment into description. if descriptionValue.CanSet() { - descriptionValue.Set(reflect.ValueOf(comment)) + descriptionValue.Set(reflect.ValueOf(strings.Join(paragraphs, "\n\n"))) return nil } @@ -709,13 +1004,8 @@ func enumValueProtoComments(reg *descriptor.Registry, enum *descriptor.Enum) str func protoComments(reg *descriptor.Registry, file *descriptor.File, outers []string, typeName string, typeIndex int32, fieldPaths ...int32) string { if file.SourceCodeInfo == nil { - // Curious! A file without any source code info. - // This could be a test that's providing incomplete - // descriptor.File information. - // - // We could simply return no comments, but panic - // could make debugging easier. - panic("descriptor.File should not contain nil SourceCodeInfo") + fmt.Fprintln(os.Stderr, "descriptor.File should not contain nil SourceCodeInfo") + return "" } outerPaths := make([]int32, len(outers)) @@ -845,3 +1135,63 @@ func protoPathIndex(descriptorType reflect.Type, what string) int32 { return int32(path) } + +// extractOperationOptionFromMethodDescriptor extracts the message of type +// swagger_options.Operation from a given proto method's descriptor. +func extractOperationOptionFromMethodDescriptor(meth *pbdescriptor.MethodDescriptorProto) (*swagger_options.Operation, error) { + if meth.Options == nil { + return nil, nil + } + if !proto.HasExtension(meth.Options, swagger_options.E_Openapiv2Operation) { + return nil, nil + } + ext, err := proto.GetExtension(meth.Options, swagger_options.E_Openapiv2Operation) + if err != nil { + return nil, err + } + opts, ok := ext.(*swagger_options.Operation) + if !ok { + return nil, fmt.Errorf("extension is %T; want an Operation", ext) + } + return opts, nil +} + +// extractSchemaOptionFromMessageDescriptor extracts the message of type +// swagger_options.Schema from a given proto message's descriptor. +func extractSchemaOptionFromMessageDescriptor(msg *pbdescriptor.DescriptorProto) (*swagger_options.Schema, error) { + if msg.Options == nil { + return nil, nil + } + if !proto.HasExtension(msg.Options, swagger_options.E_Openapiv2Schema) { + return nil, nil + } + ext, err := proto.GetExtension(msg.Options, swagger_options.E_Openapiv2Schema) + if err != nil { + return nil, err + } + opts, ok := ext.(*swagger_options.Schema) + if !ok { + return nil, fmt.Errorf("extension is %T; want a Schema", ext) + } + return opts, nil +} + +// extractSwaggerOptionFromFileDescriptor extracts the message of type +// swagger_options.Swagger from a given proto method's descriptor. +func extractSwaggerOptionFromFileDescriptor(file *pbdescriptor.FileDescriptorProto) (*swagger_options.Swagger, error) { + if file.Options == nil { + return nil, nil + } + if !proto.HasExtension(file.Options, swagger_options.E_Openapiv2Swagger) { + return nil, nil + } + ext, err := proto.GetExtension(file.Options, swagger_options.E_Openapiv2Swagger) + if err != nil { + return nil, err + } + opts, ok := ext.(*swagger_options.Swagger) + if !ok { + return nil, fmt.Errorf("extension is %T; want a Swagger object", ext) + } + return opts, nil +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template_test.go index 1d438936..d24f978f 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/template_test.go @@ -758,7 +758,8 @@ func TestResolveFullyQualifiedNameToSwaggerName(t *testing.T) { } for _, data := range tests { - output := resolveFullyQualifiedNameToSwaggerName(data.input, data.listOfFQMNs) + names := resolveFullyQualifiedNameToSwaggerNames(data.listOfFQMNs) + output := names[data.input] if output != data.output { t.Errorf("Expected fullyQualifiedNameToSwaggerName(%v) to be %s but got %s", data.input, data.output, output) @@ -786,3 +787,121 @@ func TestFQMNtoSwaggerName(t *testing.T) { } } } + +func TestSchemaOfField(t *testing.T) { + type test struct { + field *descriptor.Field + expected schemaCore + } + + tests := []test{ + { + field: &descriptor.Field{ + FieldDescriptorProto: &protodescriptor.FieldDescriptorProto{ + Name: proto.String("primitive_field"), + Type: protodescriptor.FieldDescriptorProto_TYPE_STRING.Enum(), + }, + }, + expected: schemaCore{ + Type: "string", + }, + }, + { + field: &descriptor.Field{ + FieldDescriptorProto: &protodescriptor.FieldDescriptorProto{ + Name: proto.String("repeated_primitive_field"), + Type: protodescriptor.FieldDescriptorProto_TYPE_STRING.Enum(), + Label: protodescriptor.FieldDescriptorProto_LABEL_REPEATED.Enum(), + }, + }, + expected: schemaCore{ + Type: "array", + Items: &swaggerItemsObject{ + Type: "string", + }, + }, + }, + { + field: &descriptor.Field{ + FieldDescriptorProto: &protodescriptor.FieldDescriptorProto{ + Name: proto.String("wrapped_field"), + TypeName: proto.String(".google.protobuf.StringValue"), + Type: protodescriptor.FieldDescriptorProto_TYPE_MESSAGE.Enum(), + }, + }, + expected: schemaCore{ + Type: "string", + }, + }, + { + field: &descriptor.Field{ + FieldDescriptorProto: &protodescriptor.FieldDescriptorProto{ + Name: proto.String("repeated_wrapped_field"), + TypeName: proto.String(".google.protobuf.StringValue"), + Type: protodescriptor.FieldDescriptorProto_TYPE_MESSAGE.Enum(), + Label: protodescriptor.FieldDescriptorProto_LABEL_REPEATED.Enum(), + }, + }, + expected: schemaCore{ + Type: "array", + Items: &swaggerItemsObject{ + Type: "string", + }, + }, + }, + { + field: &descriptor.Field{ + FieldDescriptorProto: &protodescriptor.FieldDescriptorProto{ + Name: proto.String("message_field"), + TypeName: proto.String(".example.Message"), + Type: protodescriptor.FieldDescriptorProto_TYPE_MESSAGE.Enum(), + }, + }, + expected: schemaCore{ + Ref: "#/definitions/exampleMessage", + }, + }, + } + + reg := descriptor.NewRegistry() + reg.Load(&plugin.CodeGeneratorRequest{ + ProtoFile: []*protodescriptor.FileDescriptorProto{ + { + SourceCodeInfo: &protodescriptor.SourceCodeInfo{}, + Name: proto.String("example.proto"), + Package: proto.String("example"), + Dependency: []string{}, + MessageType: []*protodescriptor.DescriptorProto{ + { + Name: proto.String("Message"), + Field: []*protodescriptor.FieldDescriptorProto{ + { + Name: proto.String("value"), + Type: protodescriptor.FieldDescriptorProto_TYPE_STRING.Enum(), + }, + }, + }, + }, + EnumType: []*protodescriptor.EnumDescriptorProto{ + { + Name: proto.String("Message"), + }, + }, + Service: []*protodescriptor.ServiceDescriptorProto{}, + }, + }, + }) + + for _, test := range tests { + actual := schemaOfField(test.field, reg) + if e, a := test.expected.Type, actual.Type; e != a { + t.Errorf("Expected schemaOfField(%v).Type = %s, actual: %s", test.field, e, a) + } + if e, a := test.expected.Ref, actual.Ref; e != a { + t.Errorf("Expected schemaOfField(%v).Ref = %s, actual: %s", test.field, e, a) + } + if e, a := test.expected.Items.getType(), actual.Items.getType(); e != a { + t.Errorf("Expected schemaOfField(%v).Items.Type = %v, actual.Type: %v", test.field, e, a) + } + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/types.go index c328d1c6..0c85d941 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/types.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger/types.go @@ -23,9 +23,8 @@ type swaggerInfoObject struct { TermsOfService string `json:"termsOfService,omitempty"` Version string `json:"version"` - Contact *swaggerContactObject `json:"contact,omitempty"` - License *swaggerLicenseObject `json:"license,omitempty"` - ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"` + Contact *swaggerContactObject `json:"contact,omitempty"` + License *swaggerLicenseObject `json:"license,omitempty"` } // http://swagger.io/specification/#contactObject @@ -49,17 +48,41 @@ type swaggerExternalDocumentationObject struct { // http://swagger.io/specification/#swaggerObject type swaggerObject struct { - Swagger string `json:"swagger"` - Info swaggerInfoObject `json:"info"` - Host string `json:"host,omitempty"` - BasePath string `json:"basePath,omitempty"` - Schemes []string `json:"schemes"` - Consumes []string `json:"consumes"` - Produces []string `json:"produces"` - Paths swaggerPathsObject `json:"paths"` - Definitions swaggerDefinitionsObject `json:"definitions"` + Swagger string `json:"swagger"` + Info swaggerInfoObject `json:"info"` + Host string `json:"host,omitempty"` + BasePath string `json:"basePath,omitempty"` + Schemes []string `json:"schemes"` + Consumes []string `json:"consumes"` + Produces []string `json:"produces"` + Paths swaggerPathsObject `json:"paths"` + Definitions swaggerDefinitionsObject `json:"definitions"` + SecurityDefinitions swaggerSecurityDefinitionsObject `json:"securityDefinitions,omitempty"` + Security []swaggerSecurityRequirementObject `json:"security,omitempty"` + ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"` } +// http://swagger.io/specification/#securityDefinitionsObject +type swaggerSecurityDefinitionsObject map[string]swaggerSecuritySchemeObject + +// http://swagger.io/specification/#securitySchemeObject +type swaggerSecuritySchemeObject struct { + Type string `json:"type"` + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` + In string `json:"in,omitempty"` + Flow string `json:"flow,omitempty"` + AuthorizationURL string `json:"authorizationUrl,omitempty"` + TokenURL string `json:"tokenUrl,omitempty"` + Scopes swaggerScopesObject `json:"scopes,omitempty"` +} + +// http://swagger.io/specification/#scopesObject +type swaggerScopesObject map[string]string + +// http://swagger.io/specification/#securityRequirementObject +type swaggerSecurityRequirementObject map[string][]string + // http://swagger.io/specification/#pathsObject type swaggerPathsObject map[string]swaggerPathItemObject @@ -80,7 +103,9 @@ type swaggerOperationObject struct { Responses swaggerResponsesObject `json:"responses"` Parameters swaggerParametersObject `json:"parameters,omitempty"` Tags []string `json:"tags,omitempty"` + Deprecated bool `json:"deprecated,omitempty"` + Security []swaggerSecurityRequirementObject `json:"security,omitempty"` ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"` } @@ -121,6 +146,13 @@ type schemaCore struct { type swaggerItemsObject schemaCore +func (o *swaggerItemsObject) getType() string { + if o == nil { + return "" + } + return o.Type +} + // http://swagger.io/specification/#responsesObject type swaggerResponsesObject map[string]swaggerResponseObject @@ -170,6 +202,8 @@ type swaggerSchemaObject struct { Description string `json:"description,omitempty"` Title string `json:"title,omitempty"` + + ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"` } // http://swagger.io/specification/#referenceObject diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main.go index db747704..3d7f1ab7 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main.go @@ -3,40 +3,23 @@ package main import ( "flag" "fmt" - "io" - "io/ioutil" "os" "strings" "github.com/golang/glog" "github.com/golang/protobuf/proto" plugin "github.com/golang/protobuf/protoc-gen-go/plugin" + "github.com/grpc-ecosystem/grpc-gateway/codegenerator" "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger" ) var ( importPrefix = flag.String("import_prefix", "", "prefix to be added to go package paths for imported proto files") - file = flag.String("file", "stdin", "where to load data from") + file = flag.String("file", "-", "where to load data from") allowDeleteBody = flag.Bool("allow_delete_body", false, "unless set, HTTP DELETE methods may not have a body") ) -func parseReq(r io.Reader) (*plugin.CodeGeneratorRequest, error) { - glog.V(1).Info("Parsing code generator request") - input, err := ioutil.ReadAll(r) - if err != nil { - glog.Errorf("Failed to read code generator request: %v", err) - return nil, err - } - req := new(plugin.CodeGeneratorRequest) - if err = proto.Unmarshal(input, req); err != nil { - glog.Errorf("Failed to unmarshal code generator request: %v", err) - return nil, err - } - glog.V(1).Info("Parsed code generator request") - return req, nil -} - func main() { flag.Parse() defer glog.Flush() @@ -45,13 +28,19 @@ func main() { glog.V(1).Info("Processing code generator request") f := os.Stdin - if *file != "stdin" { - f, _ = os.Open("input.txt") + if *file != "-" { + var err error + f, err = os.Open(*file) + if err != nil { + glog.Fatal(err) + } } - req, err := parseReq(f) + glog.V(1).Info("Parsing code generator request") + req, err := codegenerator.ParseRequest(f) if err != nil { glog.Fatal(err) } + glog.V(1).Info("Parsed code generator request") pkgMap := make(map[string]string) if req.Parameter != nil { err := parseReqParam(req.GetParameter(), flag.CommandLine, pkgMap) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main_test.go index c4d12dd2..d4b9d91d 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/main_test.go @@ -22,7 +22,7 @@ func TestParseReqParam(t *testing.T) { if !reflect.DeepEqual(pkgMap, expected) { t.Errorf("Test 0: pkgMap parse error, expected '%v', got '%v'", expected, pkgMap) } - checkFlags(false, "stdin", "", t, 0) + checkFlags(false, "-", "", t, 0) clearFlags() pkgMap = make(map[string]string) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/BUILD.bazel new file mode 100644 index 00000000..476cb59d --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/BUILD.bazel @@ -0,0 +1,33 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "options_proto", + srcs = [ + "annotations.proto", + "openapiv2.proto", + ], + deps = [ + "@com_google_protobuf//:any_proto", + "@com_google_protobuf//:descriptor_proto", + ], +) + +go_proto_library( + name = "options_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options", + proto = ":options_proto", + deps = [ + "@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library", + "@com_github_golang_protobuf//ptypes/any:go_default_library", + ], +) + +go_library( + name = "go_default_library", + embed = [":options_go_proto"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options", +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.pb.go new file mode 100644 index 00000000..7bc93b91 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.pb.go @@ -0,0 +1,83 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: protoc-gen-swagger/options/annotations.proto + +package options + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf1 "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +var E_Openapiv2Swagger = &proto.ExtensionDesc{ + ExtendedType: (*google_protobuf1.FileOptions)(nil), + ExtensionType: (*Swagger)(nil), + Field: 1042, + Name: "grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger", + Tag: "bytes,1042,opt,name=openapiv2_swagger,json=openapiv2Swagger", + Filename: "protoc-gen-swagger/options/annotations.proto", +} + +var E_Openapiv2Operation = &proto.ExtensionDesc{ + ExtendedType: (*google_protobuf1.MethodOptions)(nil), + ExtensionType: (*Operation)(nil), + Field: 1042, + Name: "grpc.gateway.protoc_gen_swagger.options.openapiv2_operation", + Tag: "bytes,1042,opt,name=openapiv2_operation,json=openapiv2Operation", + Filename: "protoc-gen-swagger/options/annotations.proto", +} + +var E_Openapiv2Schema = &proto.ExtensionDesc{ + ExtendedType: (*google_protobuf1.MessageOptions)(nil), + ExtensionType: (*Schema)(nil), + Field: 1042, + Name: "grpc.gateway.protoc_gen_swagger.options.openapiv2_schema", + Tag: "bytes,1042,opt,name=openapiv2_schema,json=openapiv2Schema", + Filename: "protoc-gen-swagger/options/annotations.proto", +} + +var E_Openapiv2Tag = &proto.ExtensionDesc{ + ExtendedType: (*google_protobuf1.ServiceOptions)(nil), + ExtensionType: (*Tag)(nil), + Field: 1042, + Name: "grpc.gateway.protoc_gen_swagger.options.openapiv2_tag", + Tag: "bytes,1042,opt,name=openapiv2_tag,json=openapiv2Tag", + Filename: "protoc-gen-swagger/options/annotations.proto", +} + +func init() { + proto.RegisterExtension(E_Openapiv2Swagger) + proto.RegisterExtension(E_Openapiv2Operation) + proto.RegisterExtension(E_Openapiv2Schema) + proto.RegisterExtension(E_Openapiv2Tag) +} + +func init() { proto.RegisterFile("protoc-gen-swagger/options/annotations.proto", fileDescriptor1) } + +var fileDescriptor1 = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x41, 0x4b, 0x03, 0x31, + 0x10, 0x85, 0xe9, 0x45, 0x64, 0x55, 0xac, 0xeb, 0x45, 0x8a, 0x68, 0x6f, 0x8a, 0xb4, 0x89, 0xd4, + 0xdb, 0xde, 0x54, 0xf0, 0x26, 0x85, 0x6d, 0x4f, 0x5e, 0x4a, 0x9a, 0x8e, 0xd3, 0x40, 0x9b, 0x09, + 0x49, 0xda, 0x52, 0xe8, 0xd1, 0x5f, 0xe0, 0x2f, 0x16, 0x93, 0xed, 0x56, 0xd6, 0x2a, 0x7b, 0xdb, + 0x99, 0x9d, 0xf7, 0xbe, 0xc7, 0x23, 0x49, 0xc7, 0x58, 0xf2, 0x24, 0xbb, 0x08, 0xba, 0xeb, 0x56, + 0x02, 0x11, 0x2c, 0x27, 0xe3, 0x15, 0x69, 0xc7, 0x85, 0xd6, 0xe4, 0x45, 0xf8, 0x66, 0xe1, 0x2c, + 0xbd, 0x41, 0x6b, 0x24, 0x43, 0xe1, 0x61, 0x25, 0xd6, 0x71, 0x27, 0x47, 0x08, 0x7a, 0x54, 0x48, + 0x59, 0x21, 0x6d, 0xdd, 0xfd, 0x63, 0x4b, 0x06, 0xb4, 0x30, 0x6a, 0xd9, 0x8b, 0x06, 0xad, 0x36, + 0x12, 0xe1, 0x0c, 0x78, 0x98, 0xc6, 0x8b, 0x77, 0x3e, 0x01, 0x27, 0xad, 0x32, 0x9e, 0x6c, 0xbc, + 0xc8, 0x36, 0xc9, 0x59, 0x29, 0xda, 0xa2, 0xd2, 0x4b, 0x16, 0x75, 0x6c, 0xab, 0x63, 0x2f, 0x6a, + 0x06, 0xfd, 0x08, 0xb9, 0xf8, 0x3c, 0x6c, 0x37, 0x6e, 0x8f, 0x7a, 0xf7, 0xac, 0x66, 0x62, 0x36, + 0x88, 0x73, 0xde, 0x2c, 0x49, 0xc5, 0x26, 0xfb, 0x68, 0x24, 0xe7, 0x3b, 0x3c, 0x19, 0xb0, 0xa1, + 0x93, 0xf4, 0xea, 0x57, 0x80, 0x57, 0xf0, 0x53, 0x9a, 0x54, 0x22, 0xf4, 0x6a, 0x47, 0xe8, 0x6f, + 0xad, 0xf3, 0xb4, 0xe4, 0x95, 0xbb, 0x6c, 0x93, 0x34, 0x7f, 0x94, 0x20, 0xa7, 0x30, 0x17, 0xe9, + 0xf5, 0x9e, 0x08, 0xce, 0x09, 0xac, 0xd6, 0xc0, 0xeb, 0xd7, 0x10, 0x8c, 0xf3, 0xd3, 0x5d, 0x0b, + 0x61, 0x91, 0xb9, 0xe4, 0x64, 0x47, 0xf7, 0x02, 0xf7, 0xa0, 0x07, 0x60, 0x97, 0x4a, 0x56, 0xd1, + 0x9d, 0xda, 0xe8, 0xa1, 0xc0, 0xfc, 0xb8, 0x84, 0x0c, 0x05, 0x3e, 0x3d, 0xbf, 0x3d, 0xa2, 0xf2, + 0xd3, 0xc5, 0x98, 0x49, 0x9a, 0xf3, 0x6f, 0x9f, 0x2e, 0x48, 0x72, 0x6b, 0xe7, 0xa1, 0x18, 0x0b, + 0x5b, 0xfe, 0xf7, 0x73, 0x1b, 0x1f, 0x84, 0x7f, 0x0f, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x80, + 0x7f, 0xc1, 0x6a, 0xea, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.proto new file mode 100644 index 00000000..8746192b --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/annotations.proto @@ -0,0 +1,37 @@ +syntax = "proto3"; + +package grpc.gateway.protoc_gen_swagger.options; + +option go_package = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options"; + +import "protoc-gen-swagger/options/openapiv2.proto"; +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.FileOptions { + // ID assigned by protobuf-global-extension-registry@google.com for grpc-gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Swagger openapiv2_swagger = 1042; +} +extend google.protobuf.MethodOptions { + // ID assigned by protobuf-global-extension-registry@google.com for grpc-gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Operation openapiv2_operation = 1042; +} +extend google.protobuf.MessageOptions { + // ID assigned by protobuf-global-extension-registry@google.com for grpc-gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Schema openapiv2_schema = 1042; +} +extend google.protobuf.ServiceOptions { + // ID assigned by protobuf-global-extension-registry@google.com for grpc-gateway project. + // + // All IDs are the same, as assigned. It is okay that they are the same, as they extend + // different descriptor messages. + Tag openapiv2_tag = 1042; +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.pb.go new file mode 100644 index 00000000..1b639f78 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.pb.go @@ -0,0 +1,1100 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: protoc-gen-swagger/options/openapiv2.proto + +/* +Package options is a generated protocol buffer package. + +It is generated from these files: + protoc-gen-swagger/options/openapiv2.proto + protoc-gen-swagger/options/annotations.proto + +It has these top-level messages: + Swagger + Operation + Info + Contact + ExternalDocumentation + Schema + JSONSchema + Tag + SecurityDefinitions + SecurityScheme + SecurityRequirement + Scopes +*/ +package options + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Swagger_SwaggerScheme int32 + +const ( + Swagger_UNKNOWN Swagger_SwaggerScheme = 0 + Swagger_HTTP Swagger_SwaggerScheme = 1 + Swagger_HTTPS Swagger_SwaggerScheme = 2 + Swagger_WS Swagger_SwaggerScheme = 3 + Swagger_WSS Swagger_SwaggerScheme = 4 +) + +var Swagger_SwaggerScheme_name = map[int32]string{ + 0: "UNKNOWN", + 1: "HTTP", + 2: "HTTPS", + 3: "WS", + 4: "WSS", +} +var Swagger_SwaggerScheme_value = map[string]int32{ + "UNKNOWN": 0, + "HTTP": 1, + "HTTPS": 2, + "WS": 3, + "WSS": 4, +} + +func (x Swagger_SwaggerScheme) String() string { + return proto.EnumName(Swagger_SwaggerScheme_name, int32(x)) +} +func (Swagger_SwaggerScheme) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +type JSONSchema_JSONSchemaSimpleTypes int32 + +const ( + JSONSchema_UNKNOWN JSONSchema_JSONSchemaSimpleTypes = 0 + JSONSchema_ARRAY JSONSchema_JSONSchemaSimpleTypes = 1 + JSONSchema_BOOLEAN JSONSchema_JSONSchemaSimpleTypes = 2 + JSONSchema_INTEGER JSONSchema_JSONSchemaSimpleTypes = 3 + JSONSchema_NULL JSONSchema_JSONSchemaSimpleTypes = 4 + JSONSchema_NUMBER JSONSchema_JSONSchemaSimpleTypes = 5 + JSONSchema_OBJECT JSONSchema_JSONSchemaSimpleTypes = 6 + JSONSchema_STRING JSONSchema_JSONSchemaSimpleTypes = 7 +) + +var JSONSchema_JSONSchemaSimpleTypes_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ARRAY", + 2: "BOOLEAN", + 3: "INTEGER", + 4: "NULL", + 5: "NUMBER", + 6: "OBJECT", + 7: "STRING", +} +var JSONSchema_JSONSchemaSimpleTypes_value = map[string]int32{ + "UNKNOWN": 0, + "ARRAY": 1, + "BOOLEAN": 2, + "INTEGER": 3, + "NULL": 4, + "NUMBER": 5, + "OBJECT": 6, + "STRING": 7, +} + +func (x JSONSchema_JSONSchemaSimpleTypes) String() string { + return proto.EnumName(JSONSchema_JSONSchemaSimpleTypes_name, int32(x)) +} +func (JSONSchema_JSONSchemaSimpleTypes) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{6, 0} +} + +// Required. The type of the security scheme. Valid values are "basic", +// "apiKey" or "oauth2". +type SecurityScheme_Type int32 + +const ( + SecurityScheme_TYPE_INVALID SecurityScheme_Type = 0 + SecurityScheme_TYPE_BASIC SecurityScheme_Type = 1 + SecurityScheme_TYPE_API_KEY SecurityScheme_Type = 2 + SecurityScheme_TYPE_OAUTH2 SecurityScheme_Type = 3 +) + +var SecurityScheme_Type_name = map[int32]string{ + 0: "TYPE_INVALID", + 1: "TYPE_BASIC", + 2: "TYPE_API_KEY", + 3: "TYPE_OAUTH2", +} +var SecurityScheme_Type_value = map[string]int32{ + "TYPE_INVALID": 0, + "TYPE_BASIC": 1, + "TYPE_API_KEY": 2, + "TYPE_OAUTH2": 3, +} + +func (x SecurityScheme_Type) String() string { + return proto.EnumName(SecurityScheme_Type_name, int32(x)) +} +func (SecurityScheme_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } + +// Required. The location of the API key. Valid values are "query" or "header". +type SecurityScheme_In int32 + +const ( + SecurityScheme_IN_INVALID SecurityScheme_In = 0 + SecurityScheme_IN_QUERY SecurityScheme_In = 1 + SecurityScheme_IN_HEADER SecurityScheme_In = 2 +) + +var SecurityScheme_In_name = map[int32]string{ + 0: "IN_INVALID", + 1: "IN_QUERY", + 2: "IN_HEADER", +} +var SecurityScheme_In_value = map[string]int32{ + "IN_INVALID": 0, + "IN_QUERY": 1, + "IN_HEADER": 2, +} + +func (x SecurityScheme_In) String() string { + return proto.EnumName(SecurityScheme_In_name, int32(x)) +} +func (SecurityScheme_In) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 1} } + +// Required. The flow used by the OAuth2 security scheme. Valid values are +// "implicit", "password", "application" or "accessCode". +type SecurityScheme_Flow int32 + +const ( + SecurityScheme_FLOW_INVALID SecurityScheme_Flow = 0 + SecurityScheme_FLOW_IMPLICIT SecurityScheme_Flow = 1 + SecurityScheme_FLOW_PASSWORD SecurityScheme_Flow = 2 + SecurityScheme_FLOW_APPLICATION SecurityScheme_Flow = 3 + SecurityScheme_FLOW_ACCESS_CODE SecurityScheme_Flow = 4 +) + +var SecurityScheme_Flow_name = map[int32]string{ + 0: "FLOW_INVALID", + 1: "FLOW_IMPLICIT", + 2: "FLOW_PASSWORD", + 3: "FLOW_APPLICATION", + 4: "FLOW_ACCESS_CODE", +} +var SecurityScheme_Flow_value = map[string]int32{ + "FLOW_INVALID": 0, + "FLOW_IMPLICIT": 1, + "FLOW_PASSWORD": 2, + "FLOW_APPLICATION": 3, + "FLOW_ACCESS_CODE": 4, +} + +func (x SecurityScheme_Flow) String() string { + return proto.EnumName(SecurityScheme_Flow_name, int32(x)) +} +func (SecurityScheme_Flow) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 2} } + +// `Swagger` is a representation of OpenAPI v2 specification's Swagger object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#swaggerObject +// +// TODO(ivucica): document fields +type Swagger struct { + Swagger string `protobuf:"bytes,1,opt,name=swagger" json:"swagger,omitempty"` + Info *Info `protobuf:"bytes,2,opt,name=info" json:"info,omitempty"` + Host string `protobuf:"bytes,3,opt,name=host" json:"host,omitempty"` + BasePath string `protobuf:"bytes,4,opt,name=base_path,json=basePath" json:"base_path,omitempty"` + Schemes []Swagger_SwaggerScheme `protobuf:"varint,5,rep,packed,name=schemes,enum=grpc.gateway.protoc_gen_swagger.options.Swagger_SwaggerScheme" json:"schemes,omitempty"` + Consumes []string `protobuf:"bytes,6,rep,name=consumes" json:"consumes,omitempty"` + Produces []string `protobuf:"bytes,7,rep,name=produces" json:"produces,omitempty"` + SecurityDefinitions *SecurityDefinitions `protobuf:"bytes,11,opt,name=security_definitions,json=securityDefinitions" json:"security_definitions,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security" json:"security,omitempty"` + ExternalDocs *ExternalDocumentation `protobuf:"bytes,14,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` +} + +func (m *Swagger) Reset() { *m = Swagger{} } +func (m *Swagger) String() string { return proto.CompactTextString(m) } +func (*Swagger) ProtoMessage() {} +func (*Swagger) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Swagger) GetSwagger() string { + if m != nil { + return m.Swagger + } + return "" +} + +func (m *Swagger) GetInfo() *Info { + if m != nil { + return m.Info + } + return nil +} + +func (m *Swagger) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Swagger) GetBasePath() string { + if m != nil { + return m.BasePath + } + return "" +} + +func (m *Swagger) GetSchemes() []Swagger_SwaggerScheme { + if m != nil { + return m.Schemes + } + return nil +} + +func (m *Swagger) GetConsumes() []string { + if m != nil { + return m.Consumes + } + return nil +} + +func (m *Swagger) GetProduces() []string { + if m != nil { + return m.Produces + } + return nil +} + +func (m *Swagger) GetSecurityDefinitions() *SecurityDefinitions { + if m != nil { + return m.SecurityDefinitions + } + return nil +} + +func (m *Swagger) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +func (m *Swagger) GetExternalDocs() *ExternalDocumentation { + if m != nil { + return m.ExternalDocs + } + return nil +} + +// `Operation` is a representation of OpenAPI v2 specification's Operation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#operationObject +// +// TODO(ivucica): document fields +type Operation struct { + Tags []string `protobuf:"bytes,1,rep,name=tags" json:"tags,omitempty"` + Summary string `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocumentation `protobuf:"bytes,4,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + OperationId string `protobuf:"bytes,5,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` + Consumes []string `protobuf:"bytes,6,rep,name=consumes" json:"consumes,omitempty"` + Produces []string `protobuf:"bytes,7,rep,name=produces" json:"produces,omitempty"` + Schemes []string `protobuf:"bytes,10,rep,name=schemes" json:"schemes,omitempty"` + Deprecated bool `protobuf:"varint,11,opt,name=deprecated" json:"deprecated,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security" json:"security,omitempty"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Operation) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Operation) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *Operation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Operation) GetExternalDocs() *ExternalDocumentation { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Operation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *Operation) GetConsumes() []string { + if m != nil { + return m.Consumes + } + return nil +} + +func (m *Operation) GetProduces() []string { + if m != nil { + return m.Produces + } + return nil +} + +func (m *Operation) GetSchemes() []string { + if m != nil { + return m.Schemes + } + return nil +} + +func (m *Operation) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Operation) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +// `Info` is a representation of OpenAPI v2 specification's Info object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#infoObject +// +// TODO(ivucica): document fields +type Info struct { + Title string `protobuf:"bytes,1,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + TermsOfService string `protobuf:"bytes,3,opt,name=terms_of_service,json=termsOfService" json:"terms_of_service,omitempty"` + Contact *Contact `protobuf:"bytes,4,opt,name=contact" json:"contact,omitempty"` + Version string `protobuf:"bytes,6,opt,name=version" json:"version,omitempty"` +} + +func (m *Info) Reset() { *m = Info{} } +func (m *Info) String() string { return proto.CompactTextString(m) } +func (*Info) ProtoMessage() {} +func (*Info) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *Info) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Info) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Info) GetTermsOfService() string { + if m != nil { + return m.TermsOfService + } + return "" +} + +func (m *Info) GetContact() *Contact { + if m != nil { + return m.Contact + } + return nil +} + +func (m *Info) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +// `Contact` is a representation of OpenAPI v2 specification's Contact object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#contactObject +// +// TODO(ivucica): document fields +type Contact struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + Email string `protobuf:"bytes,3,opt,name=email" json:"email,omitempty"` +} + +func (m *Contact) Reset() { *m = Contact{} } +func (m *Contact) String() string { return proto.CompactTextString(m) } +func (*Contact) ProtoMessage() {} +func (*Contact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *Contact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Contact) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Contact) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +// `ExternalDocumentation` is a representation of OpenAPI v2 specification's +// ExternalDocumentation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#externalDocumentationObject +// +// TODO(ivucica): document fields +type ExternalDocumentation struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` +} + +func (m *ExternalDocumentation) Reset() { *m = ExternalDocumentation{} } +func (m *ExternalDocumentation) String() string { return proto.CompactTextString(m) } +func (*ExternalDocumentation) ProtoMessage() {} +func (*ExternalDocumentation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *ExternalDocumentation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ExternalDocumentation) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// `Schema` is a representation of OpenAPI v2 specification's Schema object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +// TODO(ivucica): document fields +type Schema struct { + JsonSchema *JSONSchema `protobuf:"bytes,1,opt,name=json_schema,json=jsonSchema" json:"json_schema,omitempty"` + Discriminator string `protobuf:"bytes,2,opt,name=discriminator" json:"discriminator,omitempty"` + ReadOnly bool `protobuf:"varint,3,opt,name=read_only,json=readOnly" json:"read_only,omitempty"` + ExternalDocs *ExternalDocumentation `protobuf:"bytes,5,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + Example *google_protobuf.Any `protobuf:"bytes,6,opt,name=example" json:"example,omitempty"` +} + +func (m *Schema) Reset() { *m = Schema{} } +func (m *Schema) String() string { return proto.CompactTextString(m) } +func (*Schema) ProtoMessage() {} +func (*Schema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *Schema) GetJsonSchema() *JSONSchema { + if m != nil { + return m.JsonSchema + } + return nil +} + +func (m *Schema) GetDiscriminator() string { + if m != nil { + return m.Discriminator + } + return "" +} + +func (m *Schema) GetReadOnly() bool { + if m != nil { + return m.ReadOnly + } + return false +} + +func (m *Schema) GetExternalDocs() *ExternalDocumentation { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Schema) GetExample() *google_protobuf.Any { + if m != nil { + return m.Example + } + return nil +} + +// `JSONSchema` represents properties from JSON Schema taken, and as used, in +// the OpenAPI v2 spec. +// +// This includes changes made by OpenAPI v2. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +// See also: https://cswr.github.io/JsonSchema/spec/basic_types/, +// https://github.com/json-schema-org/json-schema-spec/blob/master/schema.json +// +// TODO(ivucica): document fields +type JSONSchema struct { + Title string `protobuf:"bytes,5,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,6,opt,name=description" json:"description,omitempty"` + Default string `protobuf:"bytes,7,opt,name=default" json:"default,omitempty"` + MultipleOf float64 `protobuf:"fixed64,10,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength uint64 `protobuf:"varint,15,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength uint64 `protobuf:"varint,16,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern" json:"pattern,omitempty"` + MaxItems uint64 `protobuf:"varint,20,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems uint64 `protobuf:"varint,21,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,22,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + MaxProperties uint64 `protobuf:"varint,24,opt,name=max_properties,json=maxProperties" json:"max_properties,omitempty"` + MinProperties uint64 `protobuf:"varint,25,opt,name=min_properties,json=minProperties" json:"min_properties,omitempty"` + Required []string `protobuf:"bytes,26,rep,name=required" json:"required,omitempty"` + // Items in 'array' must be unique. + Array []string `protobuf:"bytes,34,rep,name=array" json:"array,omitempty"` + Type []JSONSchema_JSONSchemaSimpleTypes `protobuf:"varint,35,rep,packed,name=type,enum=grpc.gateway.protoc_gen_swagger.options.JSONSchema_JSONSchemaSimpleTypes" json:"type,omitempty"` +} + +func (m *JSONSchema) Reset() { *m = JSONSchema{} } +func (m *JSONSchema) String() string { return proto.CompactTextString(m) } +func (*JSONSchema) ProtoMessage() {} +func (*JSONSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *JSONSchema) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *JSONSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *JSONSchema) GetDefault() string { + if m != nil { + return m.Default + } + return "" +} + +func (m *JSONSchema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *JSONSchema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *JSONSchema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *JSONSchema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *JSONSchema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *JSONSchema) GetMaxLength() uint64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *JSONSchema) GetMinLength() uint64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *JSONSchema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *JSONSchema) GetMaxItems() uint64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *JSONSchema) GetMinItems() uint64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *JSONSchema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *JSONSchema) GetMaxProperties() uint64 { + if m != nil { + return m.MaxProperties + } + return 0 +} + +func (m *JSONSchema) GetMinProperties() uint64 { + if m != nil { + return m.MinProperties + } + return 0 +} + +func (m *JSONSchema) GetRequired() []string { + if m != nil { + return m.Required + } + return nil +} + +func (m *JSONSchema) GetArray() []string { + if m != nil { + return m.Array + } + return nil +} + +func (m *JSONSchema) GetType() []JSONSchema_JSONSchemaSimpleTypes { + if m != nil { + return m.Type + } + return nil +} + +// `Tag` is a representation of OpenAPI v2 specification's Tag object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#tagObject +// +// TODO(ivucica): document fields +type Tag struct { + // TODO(ivucica): Description should be extracted from comments on the proto + // service object. + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocumentation `protobuf:"bytes,3,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` +} + +func (m *Tag) Reset() { *m = Tag{} } +func (m *Tag) String() string { return proto.CompactTextString(m) } +func (*Tag) ProtoMessage() {} +func (*Tag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *Tag) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Tag) GetExternalDocs() *ExternalDocumentation { + if m != nil { + return m.ExternalDocs + } + return nil +} + +// `SecurityDefinitions` is a representation of OpenAPI v2 specification's +// Security Definitions object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject +// +// A declaration of the security schemes available to be used in the +// specification. This does not enforce the security schemes on the operations +// and only serves to provide the relevant details for each scheme. +type SecurityDefinitions struct { + // A single security scheme definition, mapping a "name" to the scheme it defines. + Security map[string]*SecurityScheme `protobuf:"bytes,1,rep,name=security" json:"security,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *SecurityDefinitions) Reset() { *m = SecurityDefinitions{} } +func (m *SecurityDefinitions) String() string { return proto.CompactTextString(m) } +func (*SecurityDefinitions) ProtoMessage() {} +func (*SecurityDefinitions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *SecurityDefinitions) GetSecurity() map[string]*SecurityScheme { + if m != nil { + return m.Security + } + return nil +} + +// `SecurityScheme` is a representation of OpenAPI v2 specification's +// Security Scheme object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securitySchemeObject +// +// Allows the definition of a security scheme that can be used by the +// operations. Supported schemes are basic authentication, an API key (either as +// a header or as a query parameter) and OAuth2's common flows (implicit, +// password, application and access code). +type SecurityScheme struct { + // Required. The type of the security scheme. Valid values are "basic", + // "apiKey" or "oauth2". + Type SecurityScheme_Type `protobuf:"varint,1,opt,name=type,enum=grpc.gateway.protoc_gen_swagger.options.SecurityScheme_Type" json:"type,omitempty"` + // A short description for security scheme. + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + // Required. The name of the header or query parameter to be used. + // + // Valid for apiKey. + Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` + // Required. The location of the API key. Valid values are "query" or "header". + // + // Valid for apiKey. + In SecurityScheme_In `protobuf:"varint,4,opt,name=in,enum=grpc.gateway.protoc_gen_swagger.options.SecurityScheme_In" json:"in,omitempty"` + // Required. The flow used by the OAuth2 security scheme. Valid values are + // "implicit", "password", "application" or "accessCode". + // + // Valid for oauth2. + Flow SecurityScheme_Flow `protobuf:"varint,5,opt,name=flow,enum=grpc.gateway.protoc_gen_swagger.options.SecurityScheme_Flow" json:"flow,omitempty"` + // Required. The authorization URL to be used for this flow. This SHOULD be in + // the form of a URL. + // + // Valid for oauth2/implicit and oauth2/accessCode. + AuthorizationUrl string `protobuf:"bytes,6,opt,name=authorization_url,json=authorizationUrl" json:"authorization_url,omitempty"` + // Required. The token URL to be used for this flow. This SHOULD be in the + // form of a URL. + // + // Valid for oauth2/password, oauth2/application and oauth2/accessCode. + TokenUrl string `protobuf:"bytes,7,opt,name=token_url,json=tokenUrl" json:"token_url,omitempty"` + // Required. The available scopes for the OAuth2 security scheme. + // + // Valid for oauth2. + Scopes *Scopes `protobuf:"bytes,8,opt,name=scopes" json:"scopes,omitempty"` +} + +func (m *SecurityScheme) Reset() { *m = SecurityScheme{} } +func (m *SecurityScheme) String() string { return proto.CompactTextString(m) } +func (*SecurityScheme) ProtoMessage() {} +func (*SecurityScheme) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *SecurityScheme) GetType() SecurityScheme_Type { + if m != nil { + return m.Type + } + return SecurityScheme_TYPE_INVALID +} + +func (m *SecurityScheme) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *SecurityScheme) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SecurityScheme) GetIn() SecurityScheme_In { + if m != nil { + return m.In + } + return SecurityScheme_IN_INVALID +} + +func (m *SecurityScheme) GetFlow() SecurityScheme_Flow { + if m != nil { + return m.Flow + } + return SecurityScheme_FLOW_INVALID +} + +func (m *SecurityScheme) GetAuthorizationUrl() string { + if m != nil { + return m.AuthorizationUrl + } + return "" +} + +func (m *SecurityScheme) GetTokenUrl() string { + if m != nil { + return m.TokenUrl + } + return "" +} + +func (m *SecurityScheme) GetScopes() *Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +// `SecurityRequirement` is a representation of OpenAPI v2 specification's +// Security Requirement object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityRequirementObject +// +// Lists the required security schemes to execute this operation. The object can +// have multiple security schemes declared in it which are all required (that +// is, there is a logical AND between the schemes). +// +// The name used for each property MUST correspond to a security scheme +// declared in the Security Definitions. +type SecurityRequirement struct { + // Each name must correspond to a security scheme which is declared in + // the Security Definitions. If the security scheme is of type "oauth2", + // then the value is a list of scope names required for the execution. + // For other security scheme types, the array MUST be empty. + SecurityRequirement map[string]*SecurityRequirement_SecurityRequirementValue `protobuf:"bytes,1,rep,name=security_requirement,json=securityRequirement" json:"security_requirement,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *SecurityRequirement) Reset() { *m = SecurityRequirement{} } +func (m *SecurityRequirement) String() string { return proto.CompactTextString(m) } +func (*SecurityRequirement) ProtoMessage() {} +func (*SecurityRequirement) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *SecurityRequirement) GetSecurityRequirement() map[string]*SecurityRequirement_SecurityRequirementValue { + if m != nil { + return m.SecurityRequirement + } + return nil +} + +// If the security scheme is of type "oauth2", then the value is a list of +// scope names required for the execution. For other security scheme types, +// the array MUST be empty. +type SecurityRequirement_SecurityRequirementValue struct { + Scope []string `protobuf:"bytes,1,rep,name=scope" json:"scope,omitempty"` +} + +func (m *SecurityRequirement_SecurityRequirementValue) Reset() { + *m = SecurityRequirement_SecurityRequirementValue{} +} +func (m *SecurityRequirement_SecurityRequirementValue) String() string { + return proto.CompactTextString(m) +} +func (*SecurityRequirement_SecurityRequirementValue) ProtoMessage() {} +func (*SecurityRequirement_SecurityRequirementValue) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{10, 0} +} + +func (m *SecurityRequirement_SecurityRequirementValue) GetScope() []string { + if m != nil { + return m.Scope + } + return nil +} + +// `Scopes` is a representation of OpenAPI v2 specification's Scopes object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#scopesObject +// +// Lists the available scopes for an OAuth2 security scheme. +type Scopes struct { + // Maps between a name of a scope to a short description of it (as the value + // of the property). + Scope map[string]string `protobuf:"bytes,1,rep,name=scope" json:"scope,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *Scopes) Reset() { *m = Scopes{} } +func (m *Scopes) String() string { return proto.CompactTextString(m) } +func (*Scopes) ProtoMessage() {} +func (*Scopes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *Scopes) GetScope() map[string]string { + if m != nil { + return m.Scope + } + return nil +} + +func init() { + proto.RegisterType((*Swagger)(nil), "grpc.gateway.protoc_gen_swagger.options.Swagger") + proto.RegisterType((*Operation)(nil), "grpc.gateway.protoc_gen_swagger.options.Operation") + proto.RegisterType((*Info)(nil), "grpc.gateway.protoc_gen_swagger.options.Info") + proto.RegisterType((*Contact)(nil), "grpc.gateway.protoc_gen_swagger.options.Contact") + proto.RegisterType((*ExternalDocumentation)(nil), "grpc.gateway.protoc_gen_swagger.options.ExternalDocumentation") + proto.RegisterType((*Schema)(nil), "grpc.gateway.protoc_gen_swagger.options.Schema") + proto.RegisterType((*JSONSchema)(nil), "grpc.gateway.protoc_gen_swagger.options.JSONSchema") + proto.RegisterType((*Tag)(nil), "grpc.gateway.protoc_gen_swagger.options.Tag") + proto.RegisterType((*SecurityDefinitions)(nil), "grpc.gateway.protoc_gen_swagger.options.SecurityDefinitions") + proto.RegisterType((*SecurityScheme)(nil), "grpc.gateway.protoc_gen_swagger.options.SecurityScheme") + proto.RegisterType((*SecurityRequirement)(nil), "grpc.gateway.protoc_gen_swagger.options.SecurityRequirement") + proto.RegisterType((*SecurityRequirement_SecurityRequirementValue)(nil), "grpc.gateway.protoc_gen_swagger.options.SecurityRequirement.SecurityRequirementValue") + proto.RegisterType((*Scopes)(nil), "grpc.gateway.protoc_gen_swagger.options.Scopes") + proto.RegisterEnum("grpc.gateway.protoc_gen_swagger.options.Swagger_SwaggerScheme", Swagger_SwaggerScheme_name, Swagger_SwaggerScheme_value) + proto.RegisterEnum("grpc.gateway.protoc_gen_swagger.options.JSONSchema_JSONSchemaSimpleTypes", JSONSchema_JSONSchemaSimpleTypes_name, JSONSchema_JSONSchemaSimpleTypes_value) + proto.RegisterEnum("grpc.gateway.protoc_gen_swagger.options.SecurityScheme_Type", SecurityScheme_Type_name, SecurityScheme_Type_value) + proto.RegisterEnum("grpc.gateway.protoc_gen_swagger.options.SecurityScheme_In", SecurityScheme_In_name, SecurityScheme_In_value) + proto.RegisterEnum("grpc.gateway.protoc_gen_swagger.options.SecurityScheme_Flow", SecurityScheme_Flow_name, SecurityScheme_Flow_value) +} + +func init() { proto.RegisterFile("protoc-gen-swagger/options/openapiv2.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 1664 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x5b, 0x73, 0xdb, 0xc6, + 0x15, 0x0e, 0xc8, 0x25, 0x09, 0x1e, 0x4a, 0xcc, 0x7a, 0x2d, 0xb7, 0x88, 0x12, 0xbb, 0x0a, 0x9a, + 0x4e, 0x35, 0xf6, 0x98, 0x4a, 0x95, 0x87, 0x66, 0x32, 0x9d, 0xce, 0x50, 0x12, 0x63, 0x03, 0x96, + 0x49, 0x16, 0xa4, 0xa2, 0xb8, 0x33, 0x1d, 0xcc, 0x1a, 0x5c, 0x52, 0x88, 0x71, 0x61, 0x70, 0x91, + 0xc4, 0xfe, 0x82, 0x3e, 0x77, 0xfa, 0xda, 0x9f, 0xd1, 0xa7, 0xf6, 0x67, 0xf4, 0x97, 0xb4, 0x6f, + 0x7d, 0xea, 0xec, 0x05, 0x24, 0x74, 0x49, 0x86, 0x91, 0xed, 0x27, 0xee, 0xf9, 0xce, 0x65, 0xf7, + 0x5c, 0xf6, 0x9c, 0x05, 0xe1, 0xf1, 0x3c, 0x89, 0xb3, 0xd8, 0x7b, 0x3a, 0x63, 0xd1, 0xd3, 0xf4, + 0x82, 0xce, 0x66, 0x2c, 0xd9, 0x8b, 0xe7, 0x99, 0x1f, 0x47, 0xe9, 0x5e, 0x3c, 0x67, 0x11, 0x9d, + 0xfb, 0xe7, 0xfb, 0x1d, 0x21, 0x44, 0x7e, 0x3d, 0x4b, 0xe6, 0x5e, 0x67, 0x46, 0x33, 0x76, 0x41, + 0x17, 0x12, 0xf3, 0xdc, 0x19, 0x8b, 0x5c, 0xa5, 0xd8, 0x51, 0x8a, 0xdb, 0x1f, 0xcd, 0xe2, 0x78, + 0x16, 0xb0, 0x3d, 0x21, 0xf2, 0x3a, 0x9f, 0xee, 0xd1, 0x48, 0xc9, 0x9b, 0xff, 0xa8, 0x41, 0x63, + 0x24, 0xc5, 0x89, 0x01, 0x0d, 0xa5, 0x69, 0x68, 0x3b, 0xda, 0x6e, 0xd3, 0x29, 0x48, 0xd2, 0x05, + 0xe4, 0x47, 0xd3, 0xd8, 0xa8, 0xec, 0x68, 0xbb, 0xad, 0xfd, 0xa7, 0x9d, 0x35, 0x37, 0xee, 0x58, + 0xd1, 0x34, 0x76, 0x84, 0x2a, 0x21, 0x80, 0xce, 0xe2, 0x34, 0x33, 0xaa, 0xc2, 0xb2, 0x58, 0x93, + 0x8f, 0xa1, 0xf9, 0x9a, 0xa6, 0xcc, 0x9d, 0xd3, 0xec, 0xcc, 0x40, 0x82, 0xa1, 0x73, 0x60, 0x48, + 0xb3, 0x33, 0xf2, 0x2d, 0x34, 0x52, 0xef, 0x8c, 0x85, 0x2c, 0x35, 0x6a, 0x3b, 0xd5, 0xdd, 0xf6, + 0xfe, 0xef, 0xd7, 0xde, 0x56, 0x39, 0x54, 0xfc, 0x8e, 0x84, 0x19, 0xa7, 0x30, 0x47, 0xb6, 0x41, + 0xf7, 0xe2, 0x28, 0xcd, 0xb9, 0xe9, 0xfa, 0x4e, 0x95, 0xef, 0x5a, 0xd0, 0x9c, 0x37, 0x4f, 0xe2, + 0x49, 0xee, 0xb1, 0xd4, 0x68, 0x48, 0x5e, 0x41, 0x93, 0x18, 0xb6, 0x52, 0xe6, 0xe5, 0x89, 0x9f, + 0x2d, 0xdc, 0x09, 0x9b, 0xfa, 0x91, 0x2f, 0xb6, 0x33, 0x5a, 0x22, 0x2a, 0xbf, 0x5b, 0xff, 0x78, + 0xca, 0xc8, 0xd1, 0xca, 0x86, 0x73, 0x3f, 0xbd, 0x09, 0x92, 0x6f, 0x41, 0x2f, 0x60, 0x63, 0x63, + 0xa7, 0x7a, 0xa7, 0x4d, 0x1c, 0xf6, 0x7d, 0xee, 0x27, 0x2c, 0x64, 0x51, 0xe6, 0x2c, 0xad, 0x11, + 0x0f, 0x36, 0xd9, 0x65, 0xc6, 0x92, 0x88, 0x06, 0xee, 0x24, 0xf6, 0x52, 0xa3, 0x2d, 0x7c, 0x58, + 0x3f, 0xc4, 0x3d, 0xa5, 0x7d, 0x14, 0x7b, 0x39, 0xb7, 0x4d, 0x39, 0xec, 0x6c, 0xb0, 0x15, 0x9c, + 0x9a, 0x07, 0xb0, 0x79, 0x25, 0x03, 0xa4, 0x05, 0x8d, 0x93, 0xfe, 0x8b, 0xfe, 0xe0, 0xb4, 0x8f, + 0x3f, 0x20, 0x3a, 0xa0, 0xe7, 0xe3, 0xf1, 0x10, 0x6b, 0xa4, 0x09, 0x35, 0xbe, 0x1a, 0xe1, 0x0a, + 0xa9, 0x43, 0xe5, 0x74, 0x84, 0xab, 0xa4, 0x01, 0xd5, 0xd3, 0xd1, 0x08, 0x23, 0x1b, 0xe9, 0x3a, + 0x6e, 0xda, 0x48, 0x6f, 0x62, 0xb0, 0x91, 0x0e, 0xb8, 0x65, 0x23, 0x7d, 0x13, 0xb7, 0xcd, 0x7f, + 0x56, 0xa1, 0x39, 0x98, 0xb3, 0x44, 0xec, 0xcb, 0x8b, 0x2b, 0xa3, 0xb3, 0xd4, 0xd0, 0x44, 0xc6, + 0xc4, 0x5a, 0x54, 0x73, 0x1e, 0x86, 0x34, 0x59, 0x88, 0xb2, 0xe5, 0xd5, 0x2c, 0x49, 0xb2, 0x03, + 0xad, 0x09, 0x4b, 0xbd, 0xc4, 0x17, 0xbe, 0xa8, 0x8a, 0x2c, 0x43, 0x37, 0xc3, 0x83, 0xde, 0x7d, + 0x78, 0xc8, 0xa7, 0xb0, 0x11, 0x17, 0x1e, 0xb8, 0xfe, 0xc4, 0xa8, 0xc9, 0x73, 0x2c, 0x31, 0x6b, + 0x72, 0xe7, 0x4a, 0x35, 0x56, 0x77, 0x07, 0x04, 0x6b, 0x59, 0xfb, 0x8f, 0x00, 0x26, 0x6c, 0x9e, + 0x30, 0x8f, 0x66, 0x6c, 0x22, 0x2a, 0x57, 0x77, 0x4a, 0xc8, 0xfb, 0x2b, 0xb9, 0x72, 0x26, 0xcd, + 0x7f, 0x6b, 0x80, 0x78, 0x6f, 0x20, 0x5b, 0x50, 0xcb, 0xfc, 0x2c, 0x60, 0xaa, 0xe1, 0x48, 0xe2, + 0x7a, 0x82, 0x2a, 0x37, 0x13, 0xb4, 0x0b, 0x38, 0x63, 0x49, 0x98, 0xba, 0xf1, 0xd4, 0x4d, 0x59, + 0x72, 0xee, 0x7b, 0x4c, 0xe5, 0xb1, 0x2d, 0xf0, 0xc1, 0x74, 0x24, 0x51, 0x62, 0x43, 0xc3, 0x8b, + 0xa3, 0x8c, 0x7a, 0x99, 0x4a, 0xe2, 0xe7, 0x6b, 0xfb, 0x73, 0x28, 0xf5, 0x9c, 0xc2, 0x00, 0x0f, + 0xeb, 0x39, 0x4b, 0x52, 0x7e, 0xa6, 0xba, 0x2c, 0x29, 0x45, 0xda, 0x48, 0xaf, 0xe1, 0xba, 0xd9, + 0x83, 0x86, 0xd2, 0xe1, 0x15, 0x19, 0xd1, 0xb0, 0xf0, 0x4b, 0xac, 0x09, 0x86, 0x6a, 0x9e, 0x04, + 0xca, 0x1d, 0xbe, 0xe4, 0xee, 0xb3, 0x90, 0xfa, 0x81, 0x3a, 0xbb, 0x24, 0xcc, 0x17, 0xf0, 0xe0, + 0xd6, 0xfa, 0xb9, 0x1e, 0x17, 0xed, 0x66, 0x5c, 0x6e, 0x6c, 0x61, 0xfe, 0xab, 0x02, 0x75, 0x71, + 0xfd, 0x28, 0x19, 0x43, 0xeb, 0xbb, 0x34, 0x8e, 0x5c, 0x51, 0x0b, 0x54, 0xa8, 0xb7, 0xf6, 0xbf, + 0x58, 0x3b, 0x1c, 0xf6, 0x68, 0xd0, 0x97, 0x96, 0x1c, 0xe0, 0x76, 0x94, 0xd5, 0xcf, 0x60, 0x73, + 0xe2, 0xf3, 0x13, 0x84, 0x7e, 0x44, 0xb3, 0x38, 0x51, 0x9b, 0x5f, 0x05, 0x79, 0xab, 0x4f, 0x18, + 0x9d, 0xb8, 0x71, 0x14, 0x2c, 0x84, 0xb7, 0xba, 0xa3, 0x73, 0x60, 0x10, 0x05, 0xb7, 0x74, 0xa3, + 0xda, 0x7b, 0xb8, 0x6e, 0x1d, 0x68, 0xb0, 0x4b, 0x1a, 0xce, 0x03, 0x26, 0x92, 0xd7, 0xda, 0xdf, + 0xea, 0xc8, 0xb1, 0xd8, 0x29, 0xc6, 0x62, 0xa7, 0x1b, 0x2d, 0x9c, 0x42, 0xc8, 0x46, 0x3a, 0xc2, + 0x35, 0xf3, 0x7f, 0x75, 0x80, 0x95, 0xe3, 0xab, 0x7a, 0xad, 0xfd, 0x48, 0xbd, 0xd6, 0x6f, 0xe6, + 0xc5, 0x80, 0xc6, 0x84, 0x4d, 0x69, 0x1e, 0x64, 0x46, 0x43, 0x56, 0x8e, 0x22, 0xc9, 0x2f, 0xa0, + 0x15, 0xe6, 0x41, 0xe6, 0xcf, 0x03, 0xe6, 0xc6, 0x53, 0x03, 0x76, 0xb4, 0x5d, 0xcd, 0x81, 0x02, + 0x1a, 0x4c, 0xb9, 0x6a, 0x48, 0x2f, 0xfd, 0x30, 0x0f, 0xc5, 0x75, 0xd5, 0x9c, 0x82, 0x24, 0x4f, + 0xe0, 0x1e, 0xbb, 0xf4, 0x82, 0x3c, 0xf5, 0xcf, 0x99, 0x5b, 0xc8, 0x6c, 0x88, 0xd8, 0xe2, 0x25, + 0xe3, 0xa5, 0x12, 0xe6, 0x66, 0xfc, 0x48, 0x88, 0x6c, 0x2a, 0x33, 0x92, 0xbc, 0x66, 0x46, 0xc9, + 0xb4, 0xaf, 0x9b, 0x51, 0xc2, 0x0f, 0x01, 0x42, 0x7a, 0xe9, 0x06, 0x2c, 0x9a, 0x65, 0x67, 0xc6, + 0x87, 0x3b, 0xda, 0x2e, 0x72, 0x9a, 0x21, 0xbd, 0x3c, 0x16, 0x80, 0x60, 0xfb, 0x51, 0xc1, 0xc6, + 0x8a, 0xed, 0x47, 0x8a, 0x6d, 0x40, 0x63, 0x4e, 0x33, 0x9e, 0x14, 0xe3, 0x9e, 0x0c, 0x83, 0x22, + 0x79, 0x7d, 0x70, 0xbb, 0x7e, 0xc6, 0xc2, 0xd4, 0xd8, 0x12, 0x7a, 0x7a, 0x48, 0x2f, 0x2d, 0x4e, + 0x0b, 0xa6, 0x1f, 0x29, 0xe6, 0x03, 0xc5, 0xf4, 0x23, 0xc9, 0xfc, 0x14, 0x36, 0xf2, 0xc8, 0xff, + 0x3e, 0x67, 0x8a, 0xff, 0x33, 0x71, 0xf2, 0x96, 0xc4, 0xa4, 0xc8, 0xaf, 0xa0, 0xcd, 0x8d, 0xcf, + 0x13, 0xde, 0x5b, 0x33, 0x9f, 0xa5, 0x86, 0x21, 0x8c, 0x6c, 0x86, 0xf4, 0x72, 0xb8, 0x04, 0x85, + 0x98, 0x1f, 0x95, 0xc5, 0x3e, 0x52, 0x62, 0x7e, 0x54, 0x12, 0xdb, 0x06, 0x3d, 0x91, 0x1d, 0x6e, + 0x62, 0x6c, 0xcb, 0xc6, 0x5b, 0xd0, 0xbc, 0x3e, 0x68, 0x92, 0xd0, 0x85, 0x61, 0x0a, 0x86, 0x24, + 0xc8, 0x9f, 0x00, 0x65, 0x8b, 0x39, 0x33, 0x7e, 0x29, 0xde, 0x31, 0xd6, 0x1d, 0x6e, 0x5c, 0x69, + 0x39, 0xf2, 0x79, 0x79, 0x8e, 0x17, 0x73, 0x96, 0x3a, 0xc2, 0xac, 0x79, 0x01, 0x0f, 0x6e, 0x65, + 0x5f, 0x9d, 0xb7, 0x4d, 0xa8, 0x75, 0x1d, 0xa7, 0xfb, 0x0a, 0x6b, 0x1c, 0x3f, 0x18, 0x0c, 0x8e, + 0x7b, 0xdd, 0x3e, 0xae, 0x70, 0xc2, 0xea, 0x8f, 0x7b, 0xcf, 0x7a, 0x0e, 0xae, 0xf2, 0xa1, 0xdc, + 0x3f, 0x39, 0x3e, 0xc6, 0x88, 0x00, 0xd4, 0xfb, 0x27, 0x2f, 0x0f, 0x7a, 0x0e, 0xae, 0xf1, 0xf5, + 0xe0, 0xc0, 0xee, 0x1d, 0x8e, 0x71, 0x9d, 0xaf, 0x47, 0x63, 0xc7, 0xea, 0x3f, 0xc3, 0x0d, 0x1b, + 0xe9, 0x1a, 0xae, 0xd8, 0x48, 0xaf, 0xe0, 0xaa, 0x8d, 0xf4, 0xaa, 0x18, 0xd7, 0x08, 0xd7, 0xae, + 0x0d, 0x6d, 0x82, 0xef, 0xdb, 0x48, 0xbf, 0x8f, 0xb7, 0x6c, 0xa4, 0xff, 0x1c, 0x1b, 0x36, 0xd2, + 0x3f, 0xc6, 0x9f, 0xd8, 0x48, 0xff, 0x04, 0x3f, 0xb4, 0x91, 0xfe, 0x10, 0x3f, 0xb2, 0x91, 0xfe, + 0x08, 0x9b, 0x36, 0xd2, 0x3f, 0xc3, 0x8f, 0x6d, 0xa4, 0x3f, 0xc6, 0x4f, 0x6c, 0xa4, 0x3f, 0xc1, + 0x1d, 0xf3, 0xaf, 0x1a, 0x54, 0xc7, 0x74, 0xb6, 0xc6, 0x3c, 0xb8, 0xd1, 0x41, 0xaa, 0xef, 0xbe, + 0x83, 0x48, 0x77, 0xcd, 0xff, 0x6a, 0x70, 0xff, 0x96, 0x17, 0x1c, 0x99, 0x96, 0x26, 0xa7, 0x26, + 0x26, 0xa7, 0xfd, 0x36, 0x2f, 0xc2, 0x25, 0xd6, 0x8b, 0xb2, 0x64, 0xb1, 0x9a, 0xa3, 0xdb, 0x19, + 0x6c, 0x5e, 0x61, 0xf1, 0x9e, 0xff, 0x86, 0x2d, 0xd4, 0x34, 0xe0, 0x4b, 0xf2, 0x12, 0x6a, 0xe7, + 0x34, 0xc8, 0x99, 0x7a, 0xaf, 0xff, 0xf6, 0x27, 0x9f, 0x43, 0xbd, 0x98, 0xa5, 0x95, 0xaf, 0x2a, + 0x5f, 0x6a, 0xfc, 0x3b, 0xa1, 0x7d, 0x95, 0x4b, 0x86, 0xaa, 0xaa, 0xf9, 0xc6, 0xed, 0x3b, 0x3c, + 0x13, 0xa4, 0x99, 0x0e, 0x2f, 0x55, 0x59, 0xc8, 0x6b, 0xe4, 0xb9, 0x18, 0xab, 0xd5, 0xd2, 0x58, + 0xb5, 0xa1, 0xe2, 0x47, 0x62, 0xb8, 0xb7, 0xf7, 0xbf, 0xba, 0xeb, 0x29, 0xac, 0xc8, 0xa9, 0xf8, + 0x11, 0xf7, 0x69, 0x1a, 0xc4, 0x17, 0xa2, 0xbd, 0xbf, 0x85, 0x4f, 0x5f, 0x07, 0xf1, 0x85, 0x23, + 0x2c, 0xf1, 0xee, 0x4a, 0xf3, 0xec, 0x2c, 0x4e, 0xfc, 0x3f, 0xcb, 0x97, 0x1e, 0x9f, 0xcf, 0x72, + 0x42, 0xe0, 0x2b, 0x8c, 0x93, 0x24, 0xe0, 0x8d, 0x2e, 0x8b, 0xdf, 0x30, 0x29, 0x24, 0x07, 0x85, + 0x2e, 0x00, 0xce, 0x7c, 0x06, 0xf5, 0xd4, 0x8b, 0xe7, 0x2c, 0x35, 0x74, 0x91, 0xd6, 0xbd, 0xf5, + 0x4f, 0x27, 0xd4, 0x1c, 0xa5, 0x6e, 0xbe, 0x00, 0xc4, 0x83, 0x4e, 0x30, 0x6c, 0x8c, 0x5f, 0x0d, + 0x7b, 0xae, 0xd5, 0xff, 0xa6, 0x7b, 0x6c, 0x1d, 0xe1, 0x0f, 0x48, 0x1b, 0x40, 0x20, 0x07, 0xdd, + 0x91, 0x75, 0x88, 0xb5, 0xa5, 0x44, 0x77, 0x68, 0xb9, 0x2f, 0x7a, 0xaf, 0x70, 0x85, 0x7c, 0x08, + 0x2d, 0x81, 0x0c, 0xba, 0x27, 0xe3, 0xe7, 0xfb, 0xb8, 0x6a, 0xfe, 0x06, 0x2a, 0x56, 0xc4, 0x15, + 0xad, 0x7e, 0xc9, 0xd0, 0x06, 0xe8, 0x56, 0xdf, 0xfd, 0xc3, 0x49, 0xcf, 0xe1, 0xfd, 0x66, 0x13, + 0x9a, 0x56, 0xdf, 0x7d, 0xde, 0xeb, 0x1e, 0xf5, 0x1c, 0x5c, 0x31, 0xbf, 0x03, 0xc4, 0x03, 0xc4, + 0xad, 0x7f, 0x7d, 0x3c, 0x38, 0x2d, 0xa9, 0xdd, 0x83, 0x4d, 0x89, 0xbc, 0x1c, 0x1e, 0x5b, 0x87, + 0xd6, 0x18, 0x6b, 0x4b, 0x68, 0xd8, 0x1d, 0x8d, 0x4e, 0x07, 0xce, 0x11, 0xae, 0x90, 0x2d, 0xc0, + 0x02, 0xea, 0x0e, 0xb9, 0x54, 0x77, 0x6c, 0x0d, 0xfa, 0xb8, 0xba, 0x42, 0x0f, 0x0f, 0x7b, 0xa3, + 0x91, 0x7b, 0x38, 0x38, 0xea, 0x61, 0x64, 0xfe, 0xa7, 0xb2, 0xba, 0xad, 0xa5, 0x77, 0x29, 0xf9, + 0x8b, 0x56, 0xfa, 0x98, 0x4b, 0x56, 0x0c, 0x75, 0x75, 0x4f, 0xde, 0xe6, 0xd1, 0x7b, 0x1b, 0x26, + 0x6f, 0xf1, 0xf2, 0x2b, 0xaf, 0xc4, 0xd9, 0xfe, 0x1c, 0x8c, 0x5b, 0x14, 0xbe, 0xe1, 0x57, 0x8f, + 0xcf, 0x13, 0x91, 0x34, 0xf5, 0x65, 0x23, 0x89, 0xed, 0xbf, 0x6b, 0xb7, 0xaa, 0xfc, 0x50, 0x3b, + 0x78, 0x73, 0xb5, 0x1d, 0xbc, 0x73, 0xdf, 0xc4, 0x51, 0xcb, 0xcd, 0xe2, 0x6f, 0x1a, 0x7f, 0x73, + 0xf2, 0x5a, 0x23, 0xc3, 0xb2, 0x03, 0xad, 0x9f, 0x72, 0x3f, 0x85, 0xbe, 0xfc, 0x91, 0xc1, 0x53, + 0xce, 0x7f, 0x09, 0xb0, 0x02, 0x6f, 0xf1, 0x76, 0xab, 0xec, 0x6d, 0xb3, 0x74, 0xac, 0x83, 0xc3, + 0x3f, 0x76, 0x67, 0x7e, 0x76, 0x96, 0xbf, 0xee, 0x78, 0x71, 0xb8, 0xc7, 0x0f, 0xf2, 0x94, 0x79, + 0x71, 0xba, 0x48, 0x33, 0xa6, 0x48, 0x75, 0xae, 0xbd, 0x1f, 0xfe, 0x13, 0xe6, 0x75, 0x5d, 0xf0, + 0xbe, 0xf8, 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0xb7, 0xe4, 0x3d, 0xa9, 0x11, 0x00, 0x00, +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.proto new file mode 100644 index 00000000..b3b9b89d --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/openapiv2.proto @@ -0,0 +1,338 @@ +syntax = "proto3"; + +package grpc.gateway.protoc_gen_swagger.options; + +option go_package = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options"; + +import "google/protobuf/any.proto"; + +// `Swagger` is a representation of OpenAPI v2 specification's Swagger object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#swaggerObject +// +// TODO(ivucica): document fields +message Swagger { + string swagger = 1; + Info info = 2; + string host = 3; + string base_path = 4; + enum SwaggerScheme { + UNKNOWN = 0; + HTTP = 1; + HTTPS = 2; + WS = 3; + WSS = 4; + } + repeated SwaggerScheme schemes = 5; + repeated string consumes = 6; + repeated string produces = 7; + // field 8 is reserved for 'paths'. + reserved 8; + // field 9 is reserved for 'definitions', which at this time are already + // exposed as and customizable as proto messages. + reserved 9; + // field 10 is reserved for 'responses'. + reserved 10; + SecurityDefinitions security_definitions = 11; + repeated SecurityRequirement security = 12; + // field 13 is reserved for 'tags', which are supposed to be exposed as and + // customizable as proto services. TODO(ivucica): add processing of proto + // service objects into OpenAPI v2 Tag objects. + reserved 13; + ExternalDocumentation external_docs = 14; +} + +// `Operation` is a representation of OpenAPI v2 specification's Operation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#operationObject +// +// TODO(ivucica): document fields +message Operation { + repeated string tags = 1; + string summary = 2; + string description = 3; + ExternalDocumentation external_docs = 4; + string operation_id = 5; + repeated string consumes = 6; + repeated string produces = 7; + // field 8 is reserved for 'parameters'. + reserved 8; + // field 9 is reserved for 'responses'. + reserved 9; + repeated string schemes = 10; + bool deprecated = 11; + repeated SecurityRequirement security = 12; +} + +// `Info` is a representation of OpenAPI v2 specification's Info object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#infoObject +// +// TODO(ivucica): document fields +message Info { + string title = 1; + string description = 2; + string terms_of_service = 3; + Contact contact = 4; + // field 5 is reserved for 'license'. + reserved 5; + string version = 6; +} + +// `Contact` is a representation of OpenAPI v2 specification's Contact object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#contactObject +// +// TODO(ivucica): document fields +message Contact { + string name = 1; + string url = 2; + string email = 3; +} + +// `ExternalDocumentation` is a representation of OpenAPI v2 specification's +// ExternalDocumentation object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#externalDocumentationObject +// +// TODO(ivucica): document fields +message ExternalDocumentation { + string description = 1; + string url = 2; +} + +// `Schema` is a representation of OpenAPI v2 specification's Schema object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +// TODO(ivucica): document fields +message Schema { + JSONSchema json_schema = 1; + string discriminator = 2; + bool read_only = 3; + // field 4 is reserved for 'xml'. + reserved 4; + ExternalDocumentation external_docs = 5; + google.protobuf.Any example = 6; +} + +// `JSONSchema` represents properties from JSON Schema taken, and as used, in +// the OpenAPI v2 spec. +// +// This includes changes made by OpenAPI v2. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#schemaObject +// +// See also: https://cswr.github.io/JsonSchema/spec/basic_types/, +// https://github.com/json-schema-org/json-schema-spec/blob/master/schema.json +// +// TODO(ivucica): document fields +message JSONSchema { + // field 1 is reserved for '$id', omitted from OpenAPI v2. + reserved 1; + // field 2 is reserved for '$schema', omitted from OpenAPI v2. + reserved 2; + // field 3 is reserved for '$ref', although it is unclear how it would be used. + reserved 3; + // field 4 is reserved for '$comment', omitted from OpenAPI v2. + reserved 4; + string title = 5; + string description = 6; + string default = 7; + // field 8 is reserved for 'readOnly', which has an OpenAPI v2-specific meaning and is defined there. + reserved 8; + // field 9 is reserved for 'examples', which is omitted from OpenAPI v2 in favor of 'example' field. + reserved 9; + double multiple_of = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + uint64 max_length = 15; + uint64 min_length = 16; + string pattern = 17; + // field 18 is reserved for 'additionalItems', omitted from OpenAPI v2. + reserved 18; + // field 19 is reserved for 'items', but in OpenAPI-specific way. TODO(ivucica): add 'items'? + reserved 19; + uint64 max_items = 20; + uint64 min_items = 21; + bool unique_items = 22; + // field 23 is reserved for 'contains', omitted from OpenAPI v2. + reserved 23; + uint64 max_properties = 24; + uint64 min_properties = 25; + repeated string required = 26; + // field 27 is reserved for 'additionalProperties', but in OpenAPI-specific way. TODO(ivucica): add 'additionalProperties'? + reserved 27; + // field 28 is reserved for 'definitions', omitted from OpenAPI v2. + reserved 28; + // field 29 is reserved for 'properties', but in OpenAPI-specific way. TODO(ivucica): add 'additionalProperties'? + reserved 29; + // following fields are reserved, as the properties have been omitted from OpenAPI v2: + // patternProperties, dependencies, propertyNames, const + reserved 30 to 33; + // Items in 'array' must be unique. + repeated string array = 34; + + enum JSONSchemaSimpleTypes { + UNKNOWN = 0; + ARRAY = 1; + BOOLEAN = 2; + INTEGER = 3; + NULL = 4; + NUMBER = 5; + OBJECT = 6; + STRING = 7; + } + + repeated JSONSchemaSimpleTypes type = 35; + // following fields are reserved, as the properties have been omitted from OpenAPI v2: + // format, contentMediaType, contentEncoding, if, then, else + reserved 36 to 41; + // field 42 is reserved for 'allOf', but in OpenAPI-specific way. TODO(ivucica): add 'allOf'? + reserved 42; + // following fields are reserved, as the properties have been omitted from OpenAPI v2: + // anyOf, oneOf, not + reserved 43 to 45; +} + +// `Tag` is a representation of OpenAPI v2 specification's Tag object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#tagObject +// +// TODO(ivucica): document fields +message Tag { + // field 1 is reserved for 'name'. In our generator, this is (to be) extracted + // from the name of proto service, and thus not exposed to the user, as + // changing tag object's name would break the link to the references to the + // tag in individual operation specifications. + // + // TODO(ivucica): Add 'name' property. Use it to allow override of the name of + // global Tag object, then use that name to reference the tag throughout the + // Swagger file. + reserved 1; + // TODO(ivucica): Description should be extracted from comments on the proto + // service object. + string description = 2; + ExternalDocumentation external_docs = 3; +} + +// `SecurityDefinitions` is a representation of OpenAPI v2 specification's +// Security Definitions object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject +// +// A declaration of the security schemes available to be used in the +// specification. This does not enforce the security schemes on the operations +// and only serves to provide the relevant details for each scheme. +message SecurityDefinitions { + // A single security scheme definition, mapping a "name" to the scheme it defines. + map security = 1; +} + +// `SecurityScheme` is a representation of OpenAPI v2 specification's +// Security Scheme object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securitySchemeObject +// +// Allows the definition of a security scheme that can be used by the +// operations. Supported schemes are basic authentication, an API key (either as +// a header or as a query parameter) and OAuth2's common flows (implicit, +// password, application and access code). +message SecurityScheme { + // Required. The type of the security scheme. Valid values are "basic", + // "apiKey" or "oauth2". + enum Type { + TYPE_INVALID = 0; + TYPE_BASIC = 1; + TYPE_API_KEY = 2; + TYPE_OAUTH2 = 3; + } + + // Required. The location of the API key. Valid values are "query" or "header". + enum In { + IN_INVALID = 0; + IN_QUERY = 1; + IN_HEADER = 2; + } + + // Required. The flow used by the OAuth2 security scheme. Valid values are + // "implicit", "password", "application" or "accessCode". + enum Flow { + FLOW_INVALID = 0; + FLOW_IMPLICIT = 1; + FLOW_PASSWORD = 2; + FLOW_APPLICATION = 3; + FLOW_ACCESS_CODE = 4; + } + + // Required. The type of the security scheme. Valid values are "basic", + // "apiKey" or "oauth2". + Type type = 1; + // A short description for security scheme. + string description = 2; + // Required. The name of the header or query parameter to be used. + // + // Valid for apiKey. + string name = 3; + // Required. The location of the API key. Valid values are "query" or "header". + // + // Valid for apiKey. + In in = 4; + // Required. The flow used by the OAuth2 security scheme. Valid values are + // "implicit", "password", "application" or "accessCode". + // + // Valid for oauth2. + Flow flow = 5; + // Required. The authorization URL to be used for this flow. This SHOULD be in + // the form of a URL. + // + // Valid for oauth2/implicit and oauth2/accessCode. + string authorization_url = 6; + // Required. The token URL to be used for this flow. This SHOULD be in the + // form of a URL. + // + // Valid for oauth2/password, oauth2/application and oauth2/accessCode. + string token_url = 7; + // Required. The available scopes for the OAuth2 security scheme. + // + // Valid for oauth2. + Scopes scopes = 8; +} + +// `SecurityRequirement` is a representation of OpenAPI v2 specification's +// Security Requirement object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityRequirementObject +// +// Lists the required security schemes to execute this operation. The object can +// have multiple security schemes declared in it which are all required (that +// is, there is a logical AND between the schemes). +// +// The name used for each property MUST correspond to a security scheme +// declared in the Security Definitions. +message SecurityRequirement { + // If the security scheme is of type "oauth2", then the value is a list of + // scope names required for the execution. For other security scheme types, + // the array MUST be empty. + message SecurityRequirementValue { + repeated string scope = 1; + } + // Each name must correspond to a security scheme which is declared in + // the Security Definitions. If the security scheme is of type "oauth2", + // then the value is a list of scope names required for the execution. + // For other security scheme types, the array MUST be empty. + map security_requirement = 1; +} + +// `Scopes` is a representation of OpenAPI v2 specification's Scopes object. +// +// See: https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#scopesObject +// +// Lists the available scopes for an OAuth2 security scheme. +message Scopes { + // Maps between a name of a scope to a short description of it (as the value + // of the property). + map scope = 1; +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/repositories.bzl b/vendor/github.com/grpc-ecosystem/grpc-gateway/repositories.bzl new file mode 100644 index 00000000..959b20cf --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/repositories.bzl @@ -0,0 +1,49 @@ +GOOGLEAPIS_GOOGLE_API_BUILD_CONTENTS = """ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "api_proto", + srcs = [ + "annotations.proto", + "http.proto", + ], + deps = ["@com_google_protobuf//:descriptor_proto"], +) + +go_proto_library( + name = "api_go_proto", + importpath = "google.golang.org/genproto/googleapis/api/annotations", + proto = ":api_proto", + deps = ["@com_github_golang_protobuf//protoc-gen-go/descriptor:go_default_library"], +) + +go_library( + name = "go_default_library", + embed = [":api_go_proto"], + importpath = "google.golang.org/genproto/googleapis/api/annotations", +) +""" + +def _googleapis_repository_impl(ctx): + googleapis_commit = "e1c0c726290a55065c0c46a62dacc9372939973b" + ctx.download_and_extract( + url = "https://github.com/googleapis/googleapis/archive/{commit}.tar.gz".format( + commit = googleapis_commit, + ), + sha256 = "9508971cb4a7c0fe03bc1bfafbd0abc9654c80b4c70e360a6c534938d06d8fb9", + stripPrefix = "googleapis-{}".format(googleapis_commit), + ) + + ctx.file("google/api/BUILD.bazel", GOOGLEAPIS_GOOGLE_API_BUILD_CONTENTS) + + +_googleapis_repository = repository_rule( + implementation = _googleapis_repository_impl, +) + + +def repositories(): + _googleapis_repository(name = "com_github_googleapis_googleapis") diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel new file mode 100644 index 00000000..e04db4f7 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel @@ -0,0 +1,82 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//visibility:public"]) + +go_library( + name = "go_default_library", + srcs = [ + "context.go", + "convert.go", + "doc.go", + "errors.go", + "handler.go", + "marshal_json.go", + "marshal_jsonpb.go", + "marshal_proto.go", + "marshaler.go", + "marshaler_registry.go", + "mux.go", + "pattern.go", + "proto2_convert.go", + "proto_errors.go", + "query.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime", + deps = [ + "//runtime/internal:go_default_library", + "//utilities:go_default_library", + "@com_github_golang_protobuf//jsonpb:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//ptypes:go_default_library", + "@com_github_golang_protobuf//ptypes/any:go_default_library", + "@com_github_golang_protobuf//ptypes/duration:go_default_library", + "@com_github_golang_protobuf//ptypes/timestamp:go_default_library", + "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//grpclog:go_default_library", + "@org_golang_google_grpc//metadata:go_default_library", + "@org_golang_google_grpc//status:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = ["pattern_test.go"], + embed = [":go_default_library"], + deps = ["//utilities:go_default_library"], +) + +go_test( + name = "go_default_xtest", + size = "small", + srcs = [ + "context_test.go", + "errors_test.go", + "handler_test.go", + "marshal_json_test.go", + "marshal_jsonpb_test.go", + "marshal_proto_test.go", + "marshaler_registry_test.go", + "mux_test.go", + "query_test.go", + ], + deps = [ + ":go_default_library", + "//examples/proto/examplepb:go_default_library", + "//runtime/internal:go_default_library", + "//utilities:go_default_library", + "@com_github_golang_protobuf//jsonpb:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + "@com_github_golang_protobuf//ptypes:go_default_library", + "@com_github_golang_protobuf//ptypes/duration:go_default_library", + "@com_github_golang_protobuf//ptypes/empty:go_default_library", + "@com_github_golang_protobuf//ptypes/struct:go_default_library", + "@com_github_golang_protobuf//ptypes/timestamp:go_default_library", + "@com_github_golang_protobuf//ptypes/wrappers:go_default_library", + "@org_golang_google_genproto//protobuf/field_mask:go_default_library", + "@org_golang_google_grpc//:go_default_library", + "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//metadata:go_default_library", + "@org_golang_google_grpc//status:go_default_library", + ], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go index 571e353d..a745074c 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "golang.org/x/net/context" + "context" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" @@ -61,10 +61,8 @@ func AnnotateContext(ctx context.Context, mux *ServeMux, req *http.Request) (con if strings.ToLower(key) == "authorization" { pairs = append(pairs, "authorization", val) } - if mux.incomingHeaderMatcher != nil { - if h, ok := mux.incomingHeaderMatcher(key); ok { - pairs = append(pairs, h, val) - } + if h, ok := mux.incomingHeaderMatcher(key); ok { + pairs = append(pairs, h, val) } } } @@ -93,8 +91,8 @@ func AnnotateContext(ctx context.Context, mux *ServeMux, req *http.Request) (con return ctx, nil } md := metadata.Pairs(pairs...) - if mux.metadataAnnotator != nil { - md = metadata.Join(md, mux.metadataAnnotator(ctx, req)) + for _, mda := range mux.metadataAnnotators { + md = metadata.Join(md, mda(ctx, req)) } return metadata.NewOutgoingContext(ctx, md), nil } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context_test.go index 955d6f13..e78a037d 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context_test.go @@ -6,8 +6,8 @@ import ( "testing" "time" + "context" "github.com/grpc-ecosystem/grpc-gateway/runtime" - "golang.org/x/net/context" "google.golang.org/grpc/metadata" ) @@ -170,3 +170,23 @@ func TestAnnotateContext_SupportsTimeouts(t *testing.T) { } } } +func TestAnnotateContext_SupportsCustomAnnotators(t *testing.T) { + md1 := func(context.Context, *http.Request) metadata.MD { return metadata.New(map[string]string{"foo": "bar"}) } + md2 := func(context.Context, *http.Request) metadata.MD { return metadata.New(map[string]string{"baz": "qux"}) } + expected := metadata.New(map[string]string{"foo": "bar", "baz": "qux"}) + request, err := http.NewRequest("GET", "http://example.com", nil) + if err != nil { + t.Fatalf(`http.NewRequest("GET", "http://example.com", nil failed with %v; want success`, err) + } + annotated, err := runtime.AnnotateContext(context.Background(), runtime.NewServeMux(runtime.WithMetadata(md1), runtime.WithMetadata(md2)), request) + if err != nil { + t.Errorf("runtime.AnnotateContext(ctx, %#v) failed with %v; want success", request, err) + return + } + actual, _ := metadata.FromOutgoingContext(annotated) + for key, e := range expected { + if a, ok := actual[key]; !ok || !reflect.DeepEqual(e, a) { + t.Errorf("metadata.MD[%s] = %v; want %v", key, a, e) + } + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go index 1af5cc4e..903ae234 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go @@ -1,7 +1,12 @@ package runtime import ( + "encoding/base64" "strconv" + + "github.com/golang/protobuf/jsonpb" + "github.com/golang/protobuf/ptypes/duration" + "github.com/golang/protobuf/ptypes/timestamp" ) // String just returns the given string. @@ -56,3 +61,27 @@ func Uint32(val string) (uint32, error) { } return uint32(i), nil } + +// Bytes converts the given string representation of a byte sequence into a slice of bytes +// A bytes sequence is encoded in URL-safe base64 without padding +func Bytes(val string) ([]byte, error) { + b, err := base64.StdEncoding.DecodeString(val) + if err != nil { + return nil, err + } + return b, nil +} + +// Timestamp converts the given RFC3339 formatted string into a timestamp.Timestamp. +func Timestamp(val string) (*timestamp.Timestamp, error) { + var r *timestamp.Timestamp + err := jsonpb.UnmarshalString(val, r) + return r, err +} + +// Duration converts the given string into a timestamp.Duration. +func Duration(val string) (*duration.Duration, error) { + var r *duration.Duration + err := jsonpb.UnmarshalString(val, r) + return r, err +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go index 8eebdcf4..d82dc7f8 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go @@ -4,14 +4,17 @@ import ( "io" "net/http" + "context" "github.com/golang/protobuf/proto" - "golang.org/x/net/context" + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/any" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/status" ) // HTTPStatusFromCode converts a gRPC error code into the corresponding HTTP response status. +// See: https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto func HTTPStatusFromCode(code codes.Code) int { switch code { case codes.OK: @@ -23,7 +26,7 @@ func HTTPStatusFromCode(code codes.Code) int { case codes.InvalidArgument: return http.StatusBadRequest case codes.DeadlineExceeded: - return http.StatusRequestTimeout + return http.StatusGatewayTimeout case codes.NotFound: return http.StatusNotFound case codes.AlreadyExists: @@ -33,9 +36,9 @@ func HTTPStatusFromCode(code codes.Code) int { case codes.Unauthenticated: return http.StatusUnauthorized case codes.ResourceExhausted: - return http.StatusForbidden + return http.StatusTooManyRequests case codes.FailedPrecondition: - return http.StatusPreconditionFailed + return http.StatusBadRequest case codes.Aborted: return http.StatusConflict case codes.OutOfRange: @@ -63,11 +66,12 @@ var ( ) type errorBody struct { - Error string `protobuf:"bytes,1,name=error" json:"error"` - Code int32 `protobuf:"varint,2,name=code" json:"code"` + Error string `protobuf:"bytes,1,name=error" json:"error"` + Code int32 `protobuf:"varint,2,name=code" json:"code"` + Details []*any.Any `protobuf:"bytes,3,rep,name=details" json:"details,omitempty"` } -//Make this also conform to proto.Message for builtin JSONPb Marshaler +// Make this also conform to proto.Message for builtin JSONPb Marshaler func (e *errorBody) Reset() { *e = errorBody{} } func (e *errorBody) String() string { return proto.CompactTextString(e) } func (*errorBody) ProtoMessage() {} @@ -94,6 +98,17 @@ func DefaultHTTPError(ctx context.Context, mux *ServeMux, marshaler Marshaler, w Code: int32(s.Code()), } + for _, detail := range s.Details() { + if det, ok := detail.(proto.Message); ok { + a, err := ptypes.MarshalAny(det) + if err != nil { + grpclog.Printf("Failed to marshal any: %v", err) + } else { + body.Details = append(body.Details, a) + } + } + } + buf, merr := marshaler.Marshal(body) if merr != nil { grpclog.Printf("Failed to marshal error message %q: %v", body, merr) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors_test.go index ee5dfb3c..2c7ecf18 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors_test.go @@ -8,8 +8,8 @@ import ( "strings" "testing" + "context" "github.com/grpc-ecosystem/grpc-gateway/runtime" - "golang.org/x/net/context" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go index 22eb1b3d..1b3c6503 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go @@ -6,9 +6,10 @@ import ( "net/http" "net/textproto" + "context" "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes/any" "github.com/grpc-ecosystem/grpc-gateway/runtime/internal" - "golang.org/x/net/context" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/status" @@ -34,49 +35,58 @@ func ForwardResponseStream(ctx context.Context, mux *ServeMux, marshaler Marshal w.Header().Set("Transfer-Encoding", "chunked") w.Header().Set("Content-Type", marshaler.ContentType()) if err := handleForwardResponseOptions(ctx, w, nil, opts); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) + HTTPError(ctx, mux, marshaler, w, req, err) return } - w.WriteHeader(http.StatusOK) - f.Flush() + + var delimiter []byte + if d, ok := marshaler.(Delimited); ok { + delimiter = d.Delimiter() + } else { + delimiter = []byte("\n") + } + + var wroteHeader bool for { resp, err := recv() if err == io.EOF { return } if err != nil { - handleForwardResponseStreamError(marshaler, w, err) + handleForwardResponseStreamError(wroteHeader, marshaler, w, err) return } if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil { - handleForwardResponseStreamError(marshaler, w, err) + handleForwardResponseStreamError(wroteHeader, marshaler, w, err) return } buf, err := marshaler.Marshal(streamChunk(resp, nil)) if err != nil { grpclog.Printf("Failed to marshal response chunk: %v", err) + handleForwardResponseStreamError(wroteHeader, marshaler, w, err) return } - if _, err = fmt.Fprintf(w, "%s\n", buf); err != nil { + if _, err = w.Write(buf); err != nil { grpclog.Printf("Failed to send response chunk: %v", err) return } + wroteHeader = true + if _, err = w.Write(delimiter); err != nil { + grpclog.Printf("Failed to send delimiter chunk: %v", err) + return + } f.Flush() } } func handleForwardResponseServerMetadata(w http.ResponseWriter, mux *ServeMux, md ServerMetadata) { for k, vs := range md.HeaderMD { - hKey := fmt.Sprintf("%s%s", MetadataHeaderPrefix, k) - if mux.outgoingHeaderMatcher != nil { - if h, ok := mux.outgoingHeaderMatcher(k); ok { - hKey = h + if h, ok := mux.outgoingHeaderMatcher(k); ok { + for _, v := range vs { + w.Header().Add(h, v) } } - for i := range vs { - w.Header().Add(hKey, vs[i]) - } } } @@ -90,8 +100,8 @@ func handleForwardResponseTrailerHeader(w http.ResponseWriter, md ServerMetadata func handleForwardResponseTrailer(w http.ResponseWriter, md ServerMetadata) { for k, vs := range md.TrailerMD { tKey := fmt.Sprintf("%s%s", MetadataTrailerPrefix, k) - for i := range vs { - w.Header().Add(tKey, vs[i]) + for _, v := range vs { + w.Header().Add(tKey, v) } } } @@ -138,13 +148,20 @@ func handleForwardResponseOptions(ctx context.Context, w http.ResponseWriter, re return nil } -func handleForwardResponseStreamError(marshaler Marshaler, w http.ResponseWriter, err error) { +func handleForwardResponseStreamError(wroteHeader bool, marshaler Marshaler, w http.ResponseWriter, err error) { buf, merr := marshaler.Marshal(streamChunk(nil, err)) if merr != nil { grpclog.Printf("Failed to marshal an error: %v", merr) return } - if _, werr := fmt.Fprintf(w, "%s\n", buf); werr != nil { + if !wroteHeader { + s, ok := status.FromError(err) + if !ok { + s = status.New(codes.Unknown, err.Error()) + } + w.WriteHeader(HTTPStatusFromCode(s.Code())) + } + if _, werr := w.Write(buf); werr != nil { grpclog.Printf("Failed to notify error to client: %v", werr) return } @@ -153,16 +170,21 @@ func handleForwardResponseStreamError(marshaler Marshaler, w http.ResponseWriter func streamChunk(result proto.Message, err error) map[string]proto.Message { if err != nil { grpcCode := codes.Unknown + grpcMessage := err.Error() + var grpcDetails []*any.Any if s, ok := status.FromError(err); ok { grpcCode = s.Code() + grpcMessage = s.Message() + grpcDetails = s.Proto().GetDetails() } httpCode := HTTPStatusFromCode(grpcCode) return map[string]proto.Message{ "error": &internal.StreamError{ GrpcCode: int32(grpcCode), HttpCode: int32(httpCode), - Message: err.Error(), + Message: grpcMessage, HttpStatus: http.StatusText(httpCode), + Details: grpcDetails, }, } } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler_test.go new file mode 100644 index 00000000..6a712a4e --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler_test.go @@ -0,0 +1,228 @@ +package runtime_test + +import ( + "io" + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" + + "context" + "github.com/golang/protobuf/proto" + pb "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/runtime/internal" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func TestForwardResponseStream(t *testing.T) { + type msg struct { + pb proto.Message + err error + } + tests := []struct { + name string + msgs []msg + statusCode int + }{{ + name: "encoding", + msgs: []msg{ + {&pb.SimpleMessage{Id: "One"}, nil}, + {&pb.SimpleMessage{Id: "Two"}, nil}, + }, + statusCode: http.StatusOK, + }, { + name: "empty", + statusCode: http.StatusOK, + }, { + name: "error", + msgs: []msg{{nil, grpc.Errorf(codes.OutOfRange, "400")}}, + statusCode: http.StatusBadRequest, + }, { + name: "stream_error", + msgs: []msg{ + {&pb.SimpleMessage{Id: "One"}, nil}, + {nil, grpc.Errorf(codes.OutOfRange, "400")}, + }, + statusCode: http.StatusOK, + }} + + newTestRecv := func(t *testing.T, msgs []msg) func() (proto.Message, error) { + var count int + return func() (proto.Message, error) { + if count == len(msgs) { + return nil, io.EOF + } else if count > len(msgs) { + t.Errorf("recv() called %d times for %d messages", count, len(msgs)) + } + count++ + msg := msgs[count-1] + return msg.pb, msg.err + } + } + ctx := runtime.NewServerMetadataContext(context.Background(), runtime.ServerMetadata{}) + marshaler := &runtime.JSONPb{} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + recv := newTestRecv(t, tt.msgs) + req := httptest.NewRequest("GET", "http://example.com/foo", nil) + resp := httptest.NewRecorder() + + runtime.ForwardResponseStream(ctx, runtime.NewServeMux(), marshaler, resp, req, recv) + + w := resp.Result() + if w.StatusCode != tt.statusCode { + t.Errorf("StatusCode %d want %d", w.StatusCode, tt.statusCode) + } + if h := w.Header.Get("Transfer-Encoding"); h != "chunked" { + t.Errorf("ForwardResponseStream missing header chunked") + } + body, err := ioutil.ReadAll(w.Body) + if err != nil { + t.Errorf("Failed to read response body with %v", err) + } + w.Body.Close() + + var want []byte + for i, msg := range tt.msgs { + if msg.err != nil { + if i == 0 { + // Skip non-stream errors + t.Skip("checking error encodings") + } + st, _ := status.FromError(msg.err) + httpCode := runtime.HTTPStatusFromCode(st.Code()) + b, err := marshaler.Marshal(map[string]proto.Message{ + "error": &internal.StreamError{ + GrpcCode: int32(st.Code()), + HttpCode: int32(httpCode), + Message: st.Message(), + HttpStatus: http.StatusText(httpCode), + Details: st.Proto().GetDetails(), + }, + }) + if err != nil { + t.Errorf("marshaler.Marshal() failed %v", err) + } + errBytes := body[len(want):] + if string(errBytes) != string(b) { + t.Errorf("ForwardResponseStream() = \"%s\" want \"%s\"", errBytes, b) + } + + return + } + b, err := marshaler.Marshal(map[string]proto.Message{"result": msg.pb}) + if err != nil { + t.Errorf("marshaler.Marshal() failed %v", err) + } + want = append(want, b...) + want = append(want, marshaler.Delimiter()...) + } + + if string(body) != string(want) { + t.Errorf("ForwardResponseStream() = \"%s\" want \"%s\"", body, want) + } + }) + } +} + +// A custom marshaler implementation, that doesn't implement the delimited interface +type CustomMarshaler struct { + m *runtime.JSONPb +} + +func (c *CustomMarshaler) Marshal(v interface{}) ([]byte, error) { return c.m.Marshal(v) } +func (c *CustomMarshaler) Unmarshal(data []byte, v interface{}) error { return c.m.Unmarshal(data, v) } +func (c *CustomMarshaler) NewDecoder(r io.Reader) runtime.Decoder { return c.m.NewDecoder(r) } +func (c *CustomMarshaler) NewEncoder(w io.Writer) runtime.Encoder { return c.m.NewEncoder(w) } +func (c *CustomMarshaler) ContentType() string { return c.m.ContentType() } + +func TestForwardResponseStreamCustomMarshaler(t *testing.T) { + type msg struct { + pb proto.Message + err error + } + tests := []struct { + name string + msgs []msg + statusCode int + }{{ + name: "encoding", + msgs: []msg{ + {&pb.SimpleMessage{Id: "One"}, nil}, + {&pb.SimpleMessage{Id: "Two"}, nil}, + }, + statusCode: http.StatusOK, + }, { + name: "empty", + statusCode: http.StatusOK, + }, { + name: "error", + msgs: []msg{{nil, grpc.Errorf(codes.OutOfRange, "400")}}, + statusCode: http.StatusBadRequest, + }, { + name: "stream_error", + msgs: []msg{ + {&pb.SimpleMessage{Id: "One"}, nil}, + {nil, grpc.Errorf(codes.OutOfRange, "400")}, + }, + statusCode: http.StatusOK, + }} + + newTestRecv := func(t *testing.T, msgs []msg) func() (proto.Message, error) { + var count int + return func() (proto.Message, error) { + if count == len(msgs) { + return nil, io.EOF + } else if count > len(msgs) { + t.Errorf("recv() called %d times for %d messages", count, len(msgs)) + } + count++ + msg := msgs[count-1] + return msg.pb, msg.err + } + } + ctx := runtime.NewServerMetadataContext(context.Background(), runtime.ServerMetadata{}) + marshaler := &CustomMarshaler{&runtime.JSONPb{}} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + recv := newTestRecv(t, tt.msgs) + req := httptest.NewRequest("GET", "http://example.com/foo", nil) + resp := httptest.NewRecorder() + + runtime.ForwardResponseStream(ctx, runtime.NewServeMux(), marshaler, resp, req, recv) + + w := resp.Result() + if w.StatusCode != tt.statusCode { + t.Errorf("StatusCode %d want %d", w.StatusCode, tt.statusCode) + } + if h := w.Header.Get("Transfer-Encoding"); h != "chunked" { + t.Errorf("ForwardResponseStream missing header chunked") + } + body, err := ioutil.ReadAll(w.Body) + if err != nil { + t.Errorf("Failed to read response body with %v", err) + } + w.Body.Close() + + var want []byte + for _, msg := range tt.msgs { + if msg.err != nil { + t.Skip("checking erorr encodings") + } + b, err := marshaler.Marshal(map[string]proto.Message{"result": msg.pb}) + if err != nil { + t.Errorf("marshaler.Marshal() failed %v", err) + } + want = append(want, b...) + want = append(want, "\n"...) + } + + if string(body) != string(want) { + t.Errorf("ForwardResponseStream() = \"%s\" want \"%s\"", body, want) + } + }) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/BUILD.bazel new file mode 100644 index 00000000..937e6608 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/BUILD.bazel @@ -0,0 +1,23 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +package(default_visibility = ["//runtime:__subpackages__"]) + +proto_library( + name = "internal_proto", + srcs = ["stream_chunk.proto"], + deps = ["@com_google_protobuf//:any_proto"], +) + +go_proto_library( + name = "internal_go_proto", + importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime/internal", + proto = ":internal_proto", + deps = ["@com_github_golang_protobuf//ptypes/any:go_default_library"], +) + +go_library( + name = "go_default_library", + embed = [":internal_go_proto"], + importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime/internal", +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.pb.go index 44550f39..82af3a61 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.pb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.pb.go @@ -15,6 +15,7 @@ package internal import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -30,10 +31,11 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // StreamError is a response type which is returned when // streaming rpc returns an error. type StreamError struct { - GrpcCode int32 `protobuf:"varint,1,opt,name=grpc_code,json=grpcCode" json:"grpc_code,omitempty"` - HttpCode int32 `protobuf:"varint,2,opt,name=http_code,json=httpCode" json:"http_code,omitempty"` - Message string `protobuf:"bytes,3,opt,name=message" json:"message,omitempty"` - HttpStatus string `protobuf:"bytes,4,opt,name=http_status,json=httpStatus" json:"http_status,omitempty"` + GrpcCode int32 `protobuf:"varint,1,opt,name=grpc_code,json=grpcCode" json:"grpc_code,omitempty"` + HttpCode int32 `protobuf:"varint,2,opt,name=http_code,json=httpCode" json:"http_code,omitempty"` + Message string `protobuf:"bytes,3,opt,name=message" json:"message,omitempty"` + HttpStatus string `protobuf:"bytes,4,opt,name=http_status,json=httpStatus" json:"http_status,omitempty"` + Details []*google_protobuf.Any `protobuf:"bytes,5,rep,name=details" json:"details,omitempty"` } func (m *StreamError) Reset() { *m = StreamError{} } @@ -69,6 +71,13 @@ func (m *StreamError) GetHttpStatus() string { return "" } +func (m *StreamError) GetDetails() []*google_protobuf.Any { + if m != nil { + return m.Details + } + return nil +} + func init() { proto.RegisterType((*StreamError)(nil), "grpc.gateway.runtime.StreamError") } @@ -76,17 +85,20 @@ func init() { func init() { proto.RegisterFile("runtime/internal/stream_chunk.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 181 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x8e, 0xbf, 0xee, 0x82, 0x30, - 0x14, 0x85, 0xd3, 0xdf, 0x1f, 0x85, 0xcb, 0x46, 0x1c, 0x9a, 0x38, 0x48, 0x74, 0x61, 0x82, 0xc1, - 0x37, 0xd0, 0xf8, 0x02, 0xb0, 0xb9, 0x90, 0x0a, 0x37, 0x40, 0x94, 0x96, 0xdc, 0x5e, 0x62, 0x5c, - 0x7d, 0x72, 0xd3, 0x22, 0xe3, 0xf9, 0xbe, 0x73, 0x92, 0x03, 0x07, 0x9a, 0x34, 0xf7, 0x03, 0xe6, - 0xbd, 0x66, 0x24, 0xad, 0x1e, 0xb9, 0x65, 0x42, 0x35, 0x54, 0x75, 0x37, 0xe9, 0x7b, 0x36, 0x92, - 0x61, 0x13, 0x6f, 0x5a, 0x1a, 0xeb, 0xac, 0x55, 0x8c, 0x4f, 0xf5, 0xca, 0xbe, 0x8b, 0xfd, 0x5b, - 0x40, 0x54, 0xfa, 0xf2, 0x85, 0xc8, 0x50, 0xbc, 0x85, 0xd0, 0xf5, 0xaa, 0xda, 0x34, 0x28, 0x45, - 0x22, 0xd2, 0xff, 0x22, 0x70, 0xe0, 0x6c, 0x1a, 0x74, 0xb2, 0x63, 0x1e, 0x67, 0xf9, 0x33, 0x4b, - 0x07, 0xbc, 0x94, 0xb0, 0x1e, 0xd0, 0x5a, 0xd5, 0xa2, 0xfc, 0x4d, 0x44, 0x1a, 0x16, 0x4b, 0x8c, - 0x77, 0x10, 0xf9, 0x99, 0x65, 0xc5, 0x93, 0x95, 0x7f, 0xde, 0x82, 0x43, 0xa5, 0x27, 0x27, 0xb8, - 0x06, 0xcb, 0xf3, 0xdb, 0xca, 0xbf, 0x3d, 0x7e, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa9, 0x07, 0x92, - 0xb6, 0xd4, 0x00, 0x00, 0x00, + // 226 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x90, 0xc1, 0x4e, 0x84, 0x30, + 0x10, 0x86, 0x83, 0xeb, 0xba, 0xbb, 0xc3, 0x8d, 0xec, 0xa1, 0xea, 0x41, 0xa2, 0x17, 0x4e, 0x25, + 0xd1, 0x27, 0x50, 0xe3, 0x0b, 0xb0, 0x37, 0x2f, 0x9b, 0x59, 0x18, 0x0b, 0x11, 0x5a, 0x32, 0x1d, + 0x62, 0x78, 0x2d, 0x9f, 0xd0, 0xb4, 0xc8, 0xb1, 0xdf, 0xf7, 0xff, 0x93, 0x3f, 0x85, 0x27, 0x9e, + 0xac, 0x74, 0x03, 0x95, 0x9d, 0x15, 0x62, 0x8b, 0x7d, 0xe9, 0x85, 0x09, 0x87, 0x73, 0xdd, 0x4e, + 0xf6, 0x5b, 0x8f, 0xec, 0xc4, 0x65, 0x47, 0xc3, 0x63, 0xad, 0x0d, 0x0a, 0xfd, 0xe0, 0xac, 0xff, + 0x1b, 0x77, 0xb7, 0xc6, 0x39, 0xd3, 0x53, 0x19, 0x33, 0x97, 0xe9, 0xab, 0x44, 0x3b, 0x2f, 0x85, + 0xc7, 0xdf, 0x04, 0xd2, 0x53, 0xbc, 0xf3, 0xc1, 0xec, 0x38, 0xbb, 0x87, 0x43, 0x38, 0x71, 0xae, + 0x5d, 0x43, 0x2a, 0xc9, 0x93, 0x62, 0x5b, 0xed, 0x03, 0x78, 0x77, 0x0d, 0x05, 0xd9, 0x8a, 0x8c, + 0x8b, 0xbc, 0x5a, 0x64, 0x00, 0x51, 0x2a, 0xd8, 0x0d, 0xe4, 0x3d, 0x1a, 0x52, 0x9b, 0x3c, 0x29, + 0x0e, 0xd5, 0xfa, 0xcc, 0x1e, 0x20, 0x8d, 0x35, 0x2f, 0x28, 0x93, 0x57, 0xd7, 0xd1, 0x42, 0x40, + 0xa7, 0x48, 0x32, 0x0d, 0xbb, 0x86, 0x04, 0xbb, 0xde, 0xab, 0x6d, 0xbe, 0x29, 0xd2, 0xe7, 0xa3, + 0x5e, 0x16, 0xeb, 0x75, 0xb1, 0x7e, 0xb5, 0x73, 0xb5, 0x86, 0xde, 0xe0, 0x73, 0xbf, 0x7e, 0xc2, + 0xe5, 0x26, 0x46, 0x5e, 0xfe, 0x02, 0x00, 0x00, 0xff, 0xff, 0x16, 0x75, 0x92, 0x08, 0x1f, 0x01, + 0x00, 0x00, } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.proto index f7fba56c..55f42ce6 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.proto +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/internal/stream_chunk.proto @@ -2,6 +2,8 @@ syntax = "proto3"; package grpc.gateway.runtime; option go_package = "internal"; +import "google/protobuf/any.proto"; + // StreamError is a response type which is returned when // streaming rpc returns an error. message StreamError { @@ -9,4 +11,5 @@ message StreamError { int32 http_code = 2; string message = 3; string http_status = 4; + repeated google.protobuf.Any details = 5; } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go index 0acd2ca2..b3a21418 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go @@ -35,3 +35,8 @@ func (j *JSONBuiltin) NewDecoder(r io.Reader) Decoder { func (j *JSONBuiltin) NewEncoder(w io.Writer) Encoder { return json.NewEncoder(w) } + +// Delimiter for newline encoded JSON streams. +func (j *JSONBuiltin) Delimiter() []byte { + return []byte("\n") +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json_test.go index e6efa291..6ab9699e 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json_test.go @@ -12,7 +12,7 @@ import ( structpb "github.com/golang/protobuf/ptypes/struct" "github.com/golang/protobuf/ptypes/timestamp" "github.com/golang/protobuf/ptypes/wrappers" - "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" "github.com/grpc-ecosystem/grpc-gateway/runtime" ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go index 49f13f7f..0a0d130b 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go @@ -50,7 +50,7 @@ func (j *JSONPb) marshalTo(w io.Writer, v interface{}) error { } // marshalNonProto marshals a non-message field of a protobuf message. -// This function does not correctly marshals arbitary data structure into JSON, +// This function does not correctly marshals arbitrary data structure into JSON, // but it is only capable of marshaling non-message field values of protobuf, // i.e. primitive types, enums; pointers to primitives or enums; maps from // integer/string types to primitives/enums/pointers to messages. @@ -182,3 +182,8 @@ type protoEnum interface { } var typeProtoMessage = reflect.TypeOf((*proto.Message)(nil)).Elem() + +// Delimiter for newline encoded JSON streams. +func (j *JSONPb) Delimiter() []byte { + return []byte("\n") +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb_test.go index 482a45bf..679283b4 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb_test.go @@ -13,7 +13,7 @@ import ( structpb "github.com/golang/protobuf/ptypes/struct" "github.com/golang/protobuf/ptypes/timestamp" "github.com/golang/protobuf/ptypes/wrappers" - "github.com/grpc-ecosystem/grpc-gateway/examples/examplepb" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" "github.com/grpc-ecosystem/grpc-gateway/runtime" ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go new file mode 100644 index 00000000..f65d1a26 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go @@ -0,0 +1,62 @@ +package runtime + +import ( + "io" + + "errors" + "github.com/golang/protobuf/proto" + "io/ioutil" +) + +// ProtoMarshaller is a Marshaller which marshals/unmarshals into/from serialize proto bytes +type ProtoMarshaller struct{} + +// ContentType always returns "application/octet-stream". +func (*ProtoMarshaller) ContentType() string { + return "application/octet-stream" +} + +// Marshal marshals "value" into Proto +func (*ProtoMarshaller) Marshal(value interface{}) ([]byte, error) { + message, ok := value.(proto.Message) + if !ok { + return nil, errors.New("unable to marshal non proto field") + } + return proto.Marshal(message) +} + +// Unmarshal unmarshals proto "data" into "value" +func (*ProtoMarshaller) Unmarshal(data []byte, value interface{}) error { + message, ok := value.(proto.Message) + if !ok { + return errors.New("unable to unmarshal non proto field") + } + return proto.Unmarshal(data, message) +} + +// NewDecoder returns a Decoder which reads proto stream from "reader". +func (marshaller *ProtoMarshaller) NewDecoder(reader io.Reader) Decoder { + return DecoderFunc(func(value interface{}) error { + buffer, err := ioutil.ReadAll(reader) + if err != nil { + return err + } + return marshaller.Unmarshal(buffer, value) + }) +} + +// NewEncoder returns an Encoder which writes proto stream into "writer". +func (marshaller *ProtoMarshaller) NewEncoder(writer io.Writer) Encoder { + return EncoderFunc(func(value interface{}) error { + buffer, err := marshaller.Marshal(value) + if err != nil { + return err + } + _, err = writer.Write(buffer) + if err != nil { + return err + } + + return nil + }) +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto_test.go new file mode 100644 index 00000000..535f3991 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto_test.go @@ -0,0 +1,91 @@ +package runtime_test + +import ( + "reflect" + "testing" + + "bytes" + "github.com/golang/protobuf/ptypes/timestamp" + "github.com/grpc-ecosystem/grpc-gateway/examples/proto/examplepb" + "github.com/grpc-ecosystem/grpc-gateway/runtime" +) + +var message = &examplepb.ABitOfEverything{ + SingleNested: &examplepb.ABitOfEverything_Nested{}, + RepeatedStringValue: nil, + MappedStringValue: nil, + MappedNestedValue: nil, + RepeatedEnumValue: nil, + TimestampValue: ×tamp.Timestamp{}, + Uuid: "6EC2446F-7E89-4127-B3E6-5C05E6BECBA7", + Nested: []*examplepb.ABitOfEverything_Nested{ + { + Name: "foo", + Amount: 12345, + }, + }, + Uint64Value: 0xFFFFFFFFFFFFFFFF, + EnumValue: examplepb.NumericEnum_ONE, + OneofValue: &examplepb.ABitOfEverything_OneofString{ + OneofString: "bar", + }, + MapValue: map[string]examplepb.NumericEnum{ + "a": examplepb.NumericEnum_ONE, + "b": examplepb.NumericEnum_ZERO, + }, +} + +func TestProtoMarshalUnmarshal(t *testing.T) { + marshaller := runtime.ProtoMarshaller{} + + // Marshal + buffer, err := marshaller.Marshal(message) + if err != nil { + t.Fatalf("Marshalling returned error: %s", err.Error()) + } + + // Unmarshal + unmarshalled := &examplepb.ABitOfEverything{} + err = marshaller.Unmarshal(buffer, unmarshalled) + if err != nil { + t.Fatalf("Unmarshalling returned error: %s", err.Error()) + } + + if !reflect.DeepEqual(unmarshalled, message) { + t.Errorf( + "Unmarshalled didn't match original message: (original = %v) != (unmarshalled = %v)", + unmarshalled, + message, + ) + } +} + +func TestProtoEncoderDecodert(t *testing.T) { + marshaller := runtime.ProtoMarshaller{} + + var buf bytes.Buffer + + encoder := marshaller.NewEncoder(&buf) + decoder := marshaller.NewDecoder(&buf) + + // Encode + err := encoder.Encode(message) + if err != nil { + t.Fatalf("Encoding returned error: %s", err.Error()) + } + + // Decode + unencoded := &examplepb.ABitOfEverything{} + err = decoder.Decode(unencoded) + if err != nil { + t.Fatalf("Unmarshalling returned error: %s", err.Error()) + } + + if !reflect.DeepEqual(unencoded, message) { + t.Errorf( + "Unencoded didn't match original message: (original = %v) != (unencoded = %v)", + unencoded, + message, + ) + } +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go index 6d434f13..98fe6e88 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go @@ -40,3 +40,9 @@ type EncoderFunc func(v interface{}) error // Encode delegates invocations to the underlying function itself. func (f EncoderFunc) Encode(v interface{}) error { return f(v) } + +// Delimited defines the streaming delimiter. +type Delimited interface { + // Delimiter returns the record seperator for the stream. + Delimiter() []byte +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go index 928f0733..5cc53ae4 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go @@ -68,7 +68,7 @@ func (m marshalerRegistry) add(mime string, marshaler Marshaler) error { // It allows for a mapping of case-sensitive Content-Type MIME type string to runtime.Marshaler interfaces. // // For example, you could allow the client to specify the use of the runtime.JSONPb marshaler -// with a "applicaton/jsonpb" Content-Type and the use of the runtime.JSONBuiltin marshaler +// with a "application/jsonpb" Content-Type and the use of the runtime.JSONBuiltin marshaler // with a "application/json" Content-Type. // "*" can be used to match any Content-Type. // This can be attached to a ServerMux with the marshaler option. diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go index dda3db20..1d4c7576 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go @@ -1,12 +1,13 @@ package runtime import ( + "fmt" "net/http" "net/textproto" "strings" + "context" "github.com/golang/protobuf/proto" - "golang.org/x/net/context" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" @@ -24,7 +25,7 @@ type ServeMux struct { marshalers marshalerRegistry incomingHeaderMatcher HeaderMatcherFunc outgoingHeaderMatcher HeaderMatcherFunc - metadataAnnotator func(context.Context, *http.Request) metadata.MD + metadataAnnotators []func(context.Context, *http.Request) metadata.MD protoErrorHandler ProtoErrorHandlerFunc } @@ -86,7 +87,7 @@ func WithOutgoingHeaderMatcher(fn HeaderMatcherFunc) ServeMuxOption { // is reading token from cookie and adding it in gRPC context. func WithMetadata(annotator func(context.Context, *http.Request) metadata.MD) ServeMuxOption { return func(serveMux *ServeMux) { - serveMux.metadataAnnotator = annotator + serveMux.metadataAnnotators = append(serveMux.metadataAnnotators, annotator) } } @@ -107,7 +108,6 @@ func NewServeMux(opts ...ServeMuxOption) *ServeMux { handlers: make(map[string][]handler), forwardResponseOptions: make([]func(context.Context, http.ResponseWriter, proto.Message) error, 0), marshalers: makeMarshalerMIMERegistry(), - incomingHeaderMatcher: DefaultHeaderMatcher, } for _, opt := range opts { @@ -126,6 +126,16 @@ func NewServeMux(opts ...ServeMuxOption) *ServeMux { } } + if serveMux.incomingHeaderMatcher == nil { + serveMux.incomingHeaderMatcher = DefaultHeaderMatcher + } + + if serveMux.outgoingHeaderMatcher == nil { + serveMux.outgoingHeaderMatcher = func(key string) (string, bool) { + return fmt.Sprintf("%s%s", MetadataHeaderPrefix, key), true + } + } + return serveMux } @@ -136,8 +146,7 @@ func (s *ServeMux) Handle(meth string, pat Pattern, h HandlerFunc) { // ServeHTTP dispatches the request to the first handler whose pattern matches to r.Method and r.Path. func (s *ServeMux) ServeHTTP(w http.ResponseWriter, r *http.Request) { - // TODO: use r.Context for go 1.7+ - ctx := context.Background() + ctx := r.Context() path := r.URL.Path if !strings.HasPrefix(path, "/") { diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go index b1b08927..059928c2 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go @@ -4,7 +4,7 @@ import ( "io" "net/http" - "golang.org/x/net/context" + "context" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/status" diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go index 4b031219..07d0ff8c 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go @@ -1,9 +1,11 @@ package runtime import ( + "encoding/base64" "fmt" "net/url" "reflect" + "regexp" "strconv" "strings" "time" @@ -17,6 +19,15 @@ import ( // A value is ignored if its key starts with one of the elements in "filter". func PopulateQueryParameters(msg proto.Message, values url.Values, filter *utilities.DoubleArray) error { for key, values := range values { + re, err := regexp.Compile("^(.*)\\[(.*)\\]$") + if err != nil { + return err + } + match := re.FindStringSubmatch(key) + if len(match) == 3 { + key = match[1] + values = append([]string{match[2]}, values...) + } fieldPath := strings.Split(key, ".") if filter.HasCommonPrefix(fieldPath) { continue @@ -64,10 +75,14 @@ func populateFieldValueFromPath(msg proto.Message, fieldPath []string, values [] } m = f case reflect.Slice: - // TODO(yugui) Support []byte if !isLast { return fmt.Errorf("unexpected repeated field in %s", strings.Join(fieldPath, ".")) } + // Handle []byte + if f.Type().Elem().Kind() == reflect.Uint8 { + m = f + break + } return populateRepeatedField(f, values, props) case reflect.Ptr: if f.IsNil() { @@ -79,6 +94,11 @@ func populateFieldValueFromPath(msg proto.Message, fieldPath []string, values [] case reflect.Struct: m = f continue + case reflect.Map: + if !isLast { + return fmt.Errorf("unexpected nested field %s in %s", fieldPath[i+1], strings.Join(fieldPath[:i+1], ".")) + } + return populateMapField(f, values, props) default: return fmt.Errorf("unexpected type %s in %T", f.Type(), msg) } @@ -113,10 +133,48 @@ func fieldByProtoName(m reflect.Value, name string) (reflect.Value, *proto.Prope if p.OrigName == name { return m.FieldByName(p.Name), p, nil } + if p.JSONName == name { + return m.FieldByName(p.Name), p, nil + } } return reflect.Value{}, nil, nil } +func populateMapField(f reflect.Value, values []string, props *proto.Properties) error { + if len(values) != 2 { + return fmt.Errorf("more than one value provided for key %s in map %s", values[0], props.Name) + } + + key, value := values[0], values[1] + keyType := f.Type().Key() + valueType := f.Type().Elem() + if f.IsNil() { + f.Set(reflect.MakeMap(f.Type())) + } + + keyConv, ok := convFromType[keyType.Kind()] + if !ok { + return fmt.Errorf("unsupported key type %s in map %s", keyType, props.Name) + } + valueConv, ok := convFromType[valueType.Kind()] + if !ok { + return fmt.Errorf("unsupported value type %s in map %s", valueType, props.Name) + } + + keyV := keyConv.Call([]reflect.Value{reflect.ValueOf(key)}) + if err := keyV[1].Interface(); err != nil { + return err.(error) + } + valueV := valueConv.Call([]reflect.Value{reflect.ValueOf(value)}) + if err := valueV[1].Interface(); err != nil { + return err.(error) + } + + f.SetMapIndex(keyV[0].Convert(keyType), valueV[0].Convert(valueType)) + + return nil +} + func populateRepeatedField(f reflect.Value, values []string, props *proto.Properties) error { elemType := f.Type().Elem() @@ -141,11 +199,13 @@ func populateRepeatedField(f reflect.Value, values []string, props *proto.Proper } func populateField(f reflect.Value, value string, props *proto.Properties) error { - // Handle well known type + i := f.Addr().Interface() + + // Handle protobuf well known types type wkt interface { XXX_WellKnownType() string } - if wkt, ok := f.Addr().Interface().(wkt); ok { + if wkt, ok := i.(wkt); ok { switch wkt.XXX_WellKnownType() { case "Timestamp": if value == "null" { @@ -161,6 +221,66 @@ func populateField(f reflect.Value, value string, props *proto.Properties) error f.Field(0).SetInt(int64(t.Unix())) f.Field(1).SetInt(int64(t.Nanosecond())) return nil + case "DoubleValue": + fallthrough + case "FloatValue": + float64Val, err := strconv.ParseFloat(value, 64) + if err != nil { + return fmt.Errorf("bad DoubleValue: %s", value) + } + f.Field(0).SetFloat(float64Val) + return nil + case "Int64Value": + fallthrough + case "Int32Value": + int64Val, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return fmt.Errorf("bad DoubleValue: %s", value) + } + f.Field(0).SetInt(int64Val) + return nil + case "UInt64Value": + fallthrough + case "UInt32Value": + uint64Val, err := strconv.ParseUint(value, 10, 64) + if err != nil { + return fmt.Errorf("bad DoubleValue: %s", value) + } + f.Field(0).SetUint(uint64Val) + return nil + case "BoolValue": + if value == "true" { + f.Field(0).SetBool(true) + } else if value == "false" { + f.Field(0).SetBool(false) + } else { + return fmt.Errorf("bad BoolValue: %s", value) + } + return nil + case "StringValue": + f.Field(0).SetString(value) + return nil + case "BytesValue": + bytesVal, err := base64.StdEncoding.DecodeString(value) + if err != nil { + return fmt.Errorf("bad BytesValue: %s", value) + } + f.Field(0).SetBytes(bytesVal) + return nil + } + } + + // Handle google well known types + if gwkt, ok := i.(proto.Message); ok { + switch proto.MessageName(gwkt) { + case "google.protobuf.FieldMask": + p := f.Field(0) + for _, v := range strings.Split(value, ",") { + if v != "" { + p.Set(reflect.Append(p, reflect.ValueOf(v))) + } + } + return nil } } @@ -232,6 +352,6 @@ var ( reflect.Int32: reflect.ValueOf(Int32), reflect.Uint64: reflect.ValueOf(Uint64), reflect.Uint32: reflect.ValueOf(Uint32), - // TODO(yugui) Support []byte + reflect.Slice: reflect.ValueOf(Bytes), } ) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query_test.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query_test.go index 07262fad..969a8ca1 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query_test.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query_test.go @@ -6,14 +6,15 @@ import ( "net/url" "reflect" "testing" - "time" "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/golang/protobuf/ptypes/wrappers" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/grpc-ecosystem/grpc-gateway/utilities" + "google.golang.org/genproto/protobuf/field_mask" ) func TestPopulateParameters(t *testing.T) { @@ -24,6 +25,9 @@ func TestPopulateParameters(t *testing.T) { t.Fatalf("Couldn't setup timestamp in Protobuf format: %v", err) } + fieldmaskStr := "float_value,double_value" + fieldmaskPb := &field_mask.FieldMask{[]string{"float_value", "double_value"}} + for _, spec := range []struct { values url.Values filter *utilities.DoubleArray @@ -32,33 +36,148 @@ func TestPopulateParameters(t *testing.T) { }{ { values: url.Values{ - "float_value": {"1.5"}, - "double_value": {"2.5"}, - "int64_value": {"-1"}, - "int32_value": {"-2"}, - "uint64_value": {"3"}, - "uint32_value": {"4"}, - "bool_value": {"true"}, - "string_value": {"str"}, - "repeated_value": {"a", "b", "c"}, - "enum_value": {"1"}, - "repeated_enum": {"1", "2", "0"}, - "timestamp_value": {timeStr}, + "float_value": {"1.5"}, + "double_value": {"2.5"}, + "int64_value": {"-1"}, + "int32_value": {"-2"}, + "uint64_value": {"3"}, + "uint32_value": {"4"}, + "bool_value": {"true"}, + "string_value": {"str"}, + "bytes_value": {"Ynl0ZXM="}, + "repeated_value": {"a", "b", "c"}, + "enum_value": {"1"}, + "repeated_enum": {"1", "2", "0"}, + "timestamp_value": {timeStr}, + "fieldmask_value": {fieldmaskStr}, + "wrapper_float_value": {"1.5"}, + "wrapper_double_value": {"2.5"}, + "wrapper_int64_value": {"-1"}, + "wrapper_int32_value": {"-2"}, + "wrapper_u_int64_value": {"3"}, + "wrapper_u_int32_value": {"4"}, + "wrapper_bool_value": {"true"}, + "wrapper_string_value": {"str"}, + "wrapper_bytes_value": {"Ynl0ZXM="}, + "map_value[key]": {"value"}, + "map_value[second]": {"bar"}, + "map_value[third]": {"zzz"}, + "map_value[fourth]": {""}, + `map_value[~!@#$%^&*()]`: {"value"}, + "map_value2[key]": {"-2"}, + "map_value3[-2]": {"value"}, + "map_value4[key]": {"-1"}, + "map_value5[-1]": {"value"}, + "map_value6[key]": {"3"}, + "map_value7[3]": {"value"}, + "map_value8[key]": {"4"}, + "map_value9[4]": {"value"}, + "map_value10[key]": {"1.5"}, + "map_value11[1.5]": {"value"}, + "map_value12[key]": {"2.5"}, + "map_value13[2.5]": {"value"}, + "map_value14[key]": {"true"}, + "map_value15[true]": {"value"}, }, filter: utilities.NewDoubleArray(nil), want: &proto3Message{ - FloatValue: 1.5, - DoubleValue: 2.5, - Int64Value: -1, - Int32Value: -2, - Uint64Value: 3, - Uint32Value: 4, - BoolValue: true, - StringValue: "str", - RepeatedValue: []string{"a", "b", "c"}, - EnumValue: EnumValue_Y, - RepeatedEnum: []EnumValue{EnumValue_Y, EnumValue_Z, EnumValue_X}, - TimestampValue: timePb, + FloatValue: 1.5, + DoubleValue: 2.5, + Int64Value: -1, + Int32Value: -2, + Uint64Value: 3, + Uint32Value: 4, + BoolValue: true, + StringValue: "str", + BytesValue: []byte("bytes"), + RepeatedValue: []string{"a", "b", "c"}, + EnumValue: EnumValue_Y, + RepeatedEnum: []EnumValue{EnumValue_Y, EnumValue_Z, EnumValue_X}, + TimestampValue: timePb, + FieldMaskValue: fieldmaskPb, + WrapperFloatValue: &wrappers.FloatValue{1.5}, + WrapperDoubleValue: &wrappers.DoubleValue{2.5}, + WrapperInt64Value: &wrappers.Int64Value{-1}, + WrapperInt32Value: &wrappers.Int32Value{-2}, + WrapperUInt64Value: &wrappers.UInt64Value{3}, + WrapperUInt32Value: &wrappers.UInt32Value{4}, + WrapperBoolValue: &wrappers.BoolValue{true}, + WrapperStringValue: &wrappers.StringValue{"str"}, + WrapperBytesValue: &wrappers.BytesValue{[]byte("bytes")}, + MapValue: map[string]string{ + "key": "value", + "second": "bar", + "third": "zzz", + "fourth": "", + `~!@#$%^&*()`: "value", + }, + MapValue2: map[string]int32{"key": -2}, + MapValue3: map[int32]string{-2: "value"}, + MapValue4: map[string]int64{"key": -1}, + MapValue5: map[int64]string{-1: "value"}, + MapValue6: map[string]uint32{"key": 3}, + MapValue7: map[uint32]string{3: "value"}, + MapValue8: map[string]uint64{"key": 4}, + MapValue9: map[uint64]string{4: "value"}, + MapValue10: map[string]float32{"key": 1.5}, + MapValue11: map[float32]string{1.5: "value"}, + MapValue12: map[string]float64{"key": 2.5}, + MapValue13: map[float64]string{2.5: "value"}, + MapValue14: map[string]bool{"key": true}, + MapValue15: map[bool]string{true: "value"}, + }, + }, + { + values: url.Values{ + "floatValue": {"1.5"}, + "doubleValue": {"2.5"}, + "int64Value": {"-1"}, + "int32Value": {"-2"}, + "uint64Value": {"3"}, + "uint32Value": {"4"}, + "boolValue": {"true"}, + "stringValue": {"str"}, + "bytesValue": {"Ynl0ZXM="}, + "repeatedValue": {"a", "b", "c"}, + "enumValue": {"1"}, + "repeatedEnum": {"1", "2", "0"}, + "timestampValue": {timeStr}, + "fieldmaskValue": {fieldmaskStr}, + "wrapperFloatValue": {"1.5"}, + "wrapperDoubleValue": {"2.5"}, + "wrapperInt64Value": {"-1"}, + "wrapperInt32Value": {"-2"}, + "wrapperUInt64Value": {"3"}, + "wrapperUInt32Value": {"4"}, + "wrapperBoolValue": {"true"}, + "wrapperStringValue": {"str"}, + "wrapperBytesValue": {"Ynl0ZXM="}, + }, + filter: utilities.NewDoubleArray(nil), + want: &proto3Message{ + FloatValue: 1.5, + DoubleValue: 2.5, + Int64Value: -1, + Int32Value: -2, + Uint64Value: 3, + Uint32Value: 4, + BoolValue: true, + StringValue: "str", + BytesValue: []byte("bytes"), + RepeatedValue: []string{"a", "b", "c"}, + EnumValue: EnumValue_Y, + RepeatedEnum: []EnumValue{EnumValue_Y, EnumValue_Z, EnumValue_X}, + TimestampValue: timePb, + FieldMaskValue: fieldmaskPb, + WrapperFloatValue: &wrappers.FloatValue{1.5}, + WrapperDoubleValue: &wrappers.DoubleValue{2.5}, + WrapperInt64Value: &wrappers.Int64Value{-1}, + WrapperInt32Value: &wrappers.Int32Value{-2}, + WrapperUInt64Value: &wrappers.UInt64Value{3}, + WrapperUInt32Value: &wrappers.UInt32Value{4}, + WrapperBoolValue: &wrappers.BoolValue{true}, + WrapperStringValue: &wrappers.StringValue{"str"}, + WrapperBytesValue: &wrappers.BytesValue{[]byte("bytes")}, }, }, { @@ -101,6 +220,35 @@ func TestPopulateParameters(t *testing.T) { RepeatedEnum: []EnumValue{EnumValue_Y, EnumValue_Z, EnumValue_X}, }, }, + { + values: url.Values{ + "floatValue": {"1.5"}, + "doubleValue": {"2.5"}, + "int64Value": {"-1"}, + "int32Value": {"-2"}, + "uint64Value": {"3"}, + "uint32Value": {"4"}, + "boolValue": {"true"}, + "stringValue": {"str"}, + "repeatedValue": {"a", "b", "c"}, + "enumValue": {"1"}, + "repeatedEnum": {"1", "2", "0"}, + }, + filter: utilities.NewDoubleArray(nil), + want: &proto2Message{ + FloatValue: proto.Float32(1.5), + DoubleValue: proto.Float64(2.5), + Int64Value: proto.Int64(-1), + Int32Value: proto.Int32(-2), + Uint64Value: proto.Uint64(3), + Uint32Value: proto.Uint32(4), + BoolValue: proto.Bool(true), + StringValue: proto.String("str"), + RepeatedValue: []string{"a", "b", "c"}, + EnumValue: EnumValue_Y, + RepeatedEnum: []EnumValue{EnumValue_Y, EnumValue_Z, EnumValue_X}, + }, + }, { values: url.Values{ "nested.nested.nested.repeated_value": {"a", "b", "c"}, @@ -108,11 +256,17 @@ func TestPopulateParameters(t *testing.T) { "nested.nested.string_value": {"t"}, "nested.string_value": {"u"}, "nested_non_null.string_value": {"v"}, + "nested.nested.map_value[first]": {"foo"}, + "nested.nested.map_value[second]": {"bar"}, }, filter: utilities.NewDoubleArray(nil), want: &proto3Message{ Nested: &proto2Message{ Nested: &proto3Message{ + MapValue: map[string]string{ + "first": "foo", + "second": "bar", + }, Nested: &proto2Message{ RepeatedValue: []string{"a", "b", "c"}, StringValue: proto.String("s"), @@ -368,21 +522,47 @@ func TestPopulateQueryParametersWithInvalidNestedParameters(t *testing.T) { } type proto3Message struct { - Nested *proto2Message `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - NestedNonNull proto2Message `protobuf:"bytes,15,opt,name=nested_non_null" json:"nested_non_null,omitempty"` - FloatValue float32 `protobuf:"fixed32,2,opt,name=float_value" json:"float_value,omitempty"` - DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value" json:"double_value,omitempty"` - Int64Value int64 `protobuf:"varint,4,opt,name=int64_value" json:"int64_value,omitempty"` - Int32Value int32 `protobuf:"varint,5,opt,name=int32_value" json:"int32_value,omitempty"` - Uint64Value uint64 `protobuf:"varint,6,opt,name=uint64_value" json:"uint64_value,omitempty"` - Uint32Value uint32 `protobuf:"varint,7,opt,name=uint32_value" json:"uint32_value,omitempty"` - BoolValue bool `protobuf:"varint,8,opt,name=bool_value" json:"bool_value,omitempty"` - StringValue string `protobuf:"bytes,9,opt,name=string_value" json:"string_value,omitempty"` - RepeatedValue []string `protobuf:"bytes,10,rep,name=repeated_value" json:"repeated_value,omitempty"` - EnumValue EnumValue `protobuf:"varint,11,opt,name=enum_value,json=enumValue,enum=runtime_test_api.EnumValue" json:"enum_value,omitempty"` - RepeatedEnum []EnumValue `protobuf:"varint,12,rep,packed,name=repeated_enum,json=repeated_enum,enum=runtime_test_api.EnumValue" json:"repeated_enum,omitempty"` - TimestampValue *timestamp.Timestamp `protobuf:"bytes,16,opt,name=timestamp_value" json:"timestamp_value,omitempty"` - OneofValue proto3Message_OneofValue `protobuf_oneof:"oneof_value"` + Nested *proto2Message `protobuf:"bytes,1,opt,name=nested,json=nested" json:"nested,omitempty"` + NestedNonNull proto2Message `protobuf:"bytes,15,opt,name=nested_non_null,json=nestedNonNull" json:"nested_non_null,omitempty"` + FloatValue float32 `protobuf:"fixed32,2,opt,name=float_value,json=floatValue" json:"float_value,omitempty"` + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + Int64Value int64 `protobuf:"varint,4,opt,name=int64_value,json=int64Value" json:"int64_value,omitempty"` + Int32Value int32 `protobuf:"varint,5,opt,name=int32_value,json=int32Value" json:"int32_value,omitempty"` + Uint64Value uint64 `protobuf:"varint,6,opt,name=uint64_value,json=uint64Value" json:"uint64_value,omitempty"` + Uint32Value uint32 `protobuf:"varint,7,opt,name=uint32_value,json=uint32Value" json:"uint32_value,omitempty"` + BoolValue bool `protobuf:"varint,8,opt,name=bool_value,json=boolValue" json:"bool_value,omitempty"` + StringValue string `protobuf:"bytes,9,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + BytesValue []byte `protobuf:"bytes,25,opt,name=bytes_value,json=bytesValue" json:"bytes_value,omitempty"` + RepeatedValue []string `protobuf:"bytes,10,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` + EnumValue EnumValue `protobuf:"varint,11,opt,name=enum_value,json=enumValue,enum=runtime_test_api.EnumValue" json:"enum_value,omitempty"` + RepeatedEnum []EnumValue `protobuf:"varint,12,rep,packed,name=repeated_enum,json=repeatedEnum,enum=runtime_test_api.EnumValue" json:"repeated_enum,omitempty"` + TimestampValue *timestamp.Timestamp `protobuf:"bytes,16,opt,name=timestamp_value,json=timestampValue" json:"timestamp_value,omitempty"` + FieldMaskValue *field_mask.FieldMask `protobuf:"bytes,27,opt,name=fieldmask_value,json=fieldmaskValue" json:"fieldmask_value,omitempty"` + OneofValue proto3Message_OneofValue `protobuf_oneof:"oneof_value"` + WrapperDoubleValue *wrappers.DoubleValue `protobuf:"bytes,17,opt,name=wrapper_double_value,json=wrapperDoubleValue" json:"wrapper_double_value,omitempty"` + WrapperFloatValue *wrappers.FloatValue `protobuf:"bytes,18,opt,name=wrapper_float_value,json=wrapperFloatValue" json:"wrapper_float_value,omitempty"` + WrapperInt64Value *wrappers.Int64Value `protobuf:"bytes,19,opt,name=wrapper_int64_value,json=wrapperInt64Value" json:"wrapper_int64_value,omitempty"` + WrapperInt32Value *wrappers.Int32Value `protobuf:"bytes,20,opt,name=wrapper_int32_value,json=wrapperInt32Value" json:"wrapper_int32_value,omitempty"` + WrapperUInt64Value *wrappers.UInt64Value `protobuf:"bytes,21,opt,name=wrapper_u_int64_value,json=wrapperUInt64Value" json:"wrapper_u_int64_value,omitempty"` + WrapperUInt32Value *wrappers.UInt32Value `protobuf:"bytes,22,opt,name=wrapper_u_int32_value,json=wrapperUInt32Value" json:"wrapper_u_int32_value,omitempty"` + WrapperBoolValue *wrappers.BoolValue `protobuf:"bytes,23,opt,name=wrapper_bool_value,json=wrapperBoolValue" json:"wrapper_bool_value,omitempty"` + WrapperStringValue *wrappers.StringValue `protobuf:"bytes,24,opt,name=wrapper_string_value,json=wrapperStringValue" json:"wrapper_string_value,omitempty"` + WrapperBytesValue *wrappers.BytesValue `protobuf:"bytes,26,opt,name=wrapper_bytes_value,json=wrapperBytesValue" json:"wrapper_bytes_value,omitempty"` + MapValue map[string]string `protobuf:"bytes,27,opt,name=map_value,json=mapValue" json:"map_value,omitempty"` + MapValue2 map[string]int32 `protobuf:"bytes,28,opt,name=map_value2,json=mapValue2" json:"map_value2,omitempty"` + MapValue3 map[int32]string `protobuf:"bytes,29,opt,name=map_value3,json=mapValue3" json:"map_value3,omitempty"` + MapValue4 map[string]int64 `protobuf:"bytes,30,opt,name=map_value4,json=mapValue4" json:"map_value4,omitempty"` + MapValue5 map[int64]string `protobuf:"bytes,31,opt,name=map_value5,json=mapValue5" json:"map_value5,omitempty"` + MapValue6 map[string]uint32 `protobuf:"bytes,32,opt,name=map_value6,json=mapValue6" json:"map_value6,omitempty"` + MapValue7 map[uint32]string `protobuf:"bytes,33,opt,name=map_value7,json=mapValue7" json:"map_value7,omitempty"` + MapValue8 map[string]uint64 `protobuf:"bytes,34,opt,name=map_value8,json=mapValue8" json:"map_value8,omitempty"` + MapValue9 map[uint64]string `protobuf:"bytes,35,opt,name=map_value9,json=mapValue9" json:"map_value9,omitempty"` + MapValue10 map[string]float32 `protobuf:"bytes,36,opt,name=map_value10,json=mapValue10" json:"map_value10,omitempty"` + MapValue11 map[float32]string `protobuf:"bytes,37,opt,name=map_value11,json=mapValue11" json:"map_value11,omitempty"` + MapValue12 map[string]float64 `protobuf:"bytes,38,opt,name=map_value12,json=mapValue12" json:"map_value12,omitempty"` + MapValue13 map[float64]string `protobuf:"bytes,39,opt,name=map_value13,json=mapValue13" json:"map_value13,omitempty"` + MapValue14 map[string]bool `protobuf:"bytes,40,opt,name=map_value14,json=mapValue14" json:"map_value14,omitempty"` + MapValue15 map[bool]string `protobuf:"bytes,41,opt,name=map_value15,json=mapValue15" json:"map_value15,omitempty"` } func (m *proto3Message) Reset() { *m = proto3Message{} } @@ -501,18 +681,18 @@ func _proto3Message_OneofSizer(msg proto.Message) (n int) { } type proto2Message struct { - Nested *proto3Message `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - FloatValue *float32 `protobuf:"fixed32,2,opt,name=float_value" json:"float_value,omitempty"` - DoubleValue *float64 `protobuf:"fixed64,3,opt,name=double_value" json:"double_value,omitempty"` - Int64Value *int64 `protobuf:"varint,4,opt,name=int64_value" json:"int64_value,omitempty"` - Int32Value *int32 `protobuf:"varint,5,opt,name=int32_value" json:"int32_value,omitempty"` - Uint64Value *uint64 `protobuf:"varint,6,opt,name=uint64_value" json:"uint64_value,omitempty"` - Uint32Value *uint32 `protobuf:"varint,7,opt,name=uint32_value" json:"uint32_value,omitempty"` - BoolValue *bool `protobuf:"varint,8,opt,name=bool_value" json:"bool_value,omitempty"` - StringValue *string `protobuf:"bytes,9,opt,name=string_value" json:"string_value,omitempty"` - RepeatedValue []string `protobuf:"bytes,10,rep,name=repeated_value" json:"repeated_value,omitempty"` + Nested *proto3Message `protobuf:"bytes,1,opt,name=nested,json=nested" json:"nested,omitempty"` + FloatValue *float32 `protobuf:"fixed32,2,opt,name=float_value,json=floatValue" json:"float_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + Int64Value *int64 `protobuf:"varint,4,opt,name=int64_value,json=int64Value" json:"int64_value,omitempty"` + Int32Value *int32 `protobuf:"varint,5,opt,name=int32_value,json=int32Value" json:"int32_value,omitempty"` + Uint64Value *uint64 `protobuf:"varint,6,opt,name=uint64_value,json=uint64Value" json:"uint64_value,omitempty"` + Uint32Value *uint32 `protobuf:"varint,7,opt,name=uint32_value,json=uint32Value" json:"uint32_value,omitempty"` + BoolValue *bool `protobuf:"varint,8,opt,name=bool_value,json=boolValue" json:"bool_value,omitempty"` + StringValue *string `protobuf:"bytes,9,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + RepeatedValue []string `protobuf:"bytes,10,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` EnumValue EnumValue `protobuf:"varint,11,opt,name=enum_value,json=enumValue,enum=runtime_test_api.EnumValue" json:"enum_value,omitempty"` - RepeatedEnum []EnumValue `protobuf:"varint,12,rep,packed,name=repeated_enum,json=repeated_enum,enum=runtime_test_api.EnumValue" json:"repeated_enum,omitempty"` + RepeatedEnum []EnumValue `protobuf:"varint,12,rep,packed,name=repeated_enum,json=repeatedEnum,enum=runtime_test_api.EnumValue" json:"repeated_enum,omitempty"` XXX_unrecognized []byte `json:"-"` } diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/code.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/code.proto new file mode 100644 index 00000000..8fef4117 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/code.proto @@ -0,0 +1,186 @@ +// Copyright 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +option go_package = "google.golang.org/genproto/googleapis/rpc/code;code"; +option java_multiple_files = true; +option java_outer_classname = "CodeProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + + +// The canonical error codes for Google APIs. +// +// +// Sometimes multiple error codes may apply. Services should return +// the most specific error code that applies. For example, prefer +// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply. +// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`. +enum Code { + // Not an error; returned on success + // + // HTTP Mapping: 200 OK + OK = 0; + + // The operation was cancelled, typically by the caller. + // + // HTTP Mapping: 499 Client Closed Request + CANCELLED = 1; + + // Unknown error. For example, this error may be returned when + // a `Status` value received from another address space belongs to + // an error space that is not known in this address space. Also + // errors raised by APIs that do not return enough error information + // may be converted to this error. + // + // HTTP Mapping: 500 Internal Server Error + UNKNOWN = 2; + + // The client specified an invalid argument. Note that this differs + // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments + // that are problematic regardless of the state of the system + // (e.g., a malformed file name). + // + // HTTP Mapping: 400 Bad Request + INVALID_ARGUMENT = 3; + + // The deadline expired before the operation could complete. For operations + // that change the state of the system, this error may be returned + // even if the operation has completed successfully. For example, a + // successful response from a server could have been delayed long + // enough for the deadline to expire. + // + // HTTP Mapping: 504 Gateway Timeout + DEADLINE_EXCEEDED = 4; + + // Some requested entity (e.g., file or directory) was not found. + // + // Note to server developers: if a request is denied for an entire class + // of users, such as gradual feature rollout or undocumented whitelist, + // `NOT_FOUND` may be used. If a request is denied for some users within + // a class of users, such as user-based access control, `PERMISSION_DENIED` + // must be used. + // + // HTTP Mapping: 404 Not Found + NOT_FOUND = 5; + + // The entity that a client attempted to create (e.g., file or directory) + // already exists. + // + // HTTP Mapping: 409 Conflict + ALREADY_EXISTS = 6; + + // The caller does not have permission to execute the specified + // operation. `PERMISSION_DENIED` must not be used for rejections + // caused by exhausting some resource (use `RESOURCE_EXHAUSTED` + // instead for those errors). `PERMISSION_DENIED` must not be + // used if the caller can not be identified (use `UNAUTHENTICATED` + // instead for those errors). This error code does not imply the + // request is valid or the requested entity exists or satisfies + // other pre-conditions. + // + // HTTP Mapping: 403 Forbidden + PERMISSION_DENIED = 7; + + // The request does not have valid authentication credentials for the + // operation. + // + // HTTP Mapping: 401 Unauthorized + UNAUTHENTICATED = 16; + + // Some resource has been exhausted, perhaps a per-user quota, or + // perhaps the entire file system is out of space. + // + // HTTP Mapping: 429 Too Many Requests + RESOURCE_EXHAUSTED = 8; + + // The operation was rejected because the system is not in a state + // required for the operation's execution. For example, the directory + // to be deleted is non-empty, an rmdir operation is applied to + // a non-directory, etc. + // + // Service implementors can use the following guidelines to decide + // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`: + // (a) Use `UNAVAILABLE` if the client can retry just the failing call. + // (b) Use `ABORTED` if the client should retry at a higher level + // (e.g., when a client-specified test-and-set fails, indicating the + // client should restart a read-modify-write sequence). + // (c) Use `FAILED_PRECONDITION` if the client should not retry until + // the system state has been explicitly fixed. E.g., if an "rmdir" + // fails because the directory is non-empty, `FAILED_PRECONDITION` + // should be returned since the client should not retry unless + // the files are deleted from the directory. + // + // HTTP Mapping: 400 Bad Request + FAILED_PRECONDITION = 9; + + // The operation was aborted, typically due to a concurrency issue such as + // a sequencer check failure or transaction abort. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 409 Conflict + ABORTED = 10; + + // The operation was attempted past the valid range. E.g., seeking or + // reading past end-of-file. + // + // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may + // be fixed if the system state changes. For example, a 32-bit file + // system will generate `INVALID_ARGUMENT` if asked to read at an + // offset that is not in the range [0,2^32-1], but it will generate + // `OUT_OF_RANGE` if asked to read from an offset past the current + // file size. + // + // There is a fair bit of overlap between `FAILED_PRECONDITION` and + // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific + // error) when it applies so that callers who are iterating through + // a space can easily look for an `OUT_OF_RANGE` error to detect when + // they are done. + // + // HTTP Mapping: 400 Bad Request + OUT_OF_RANGE = 11; + + // The operation is not implemented or is not supported/enabled in this + // service. + // + // HTTP Mapping: 501 Not Implemented + UNIMPLEMENTED = 12; + + // Internal errors. This means that some invariants expected by the + // underlying system have been broken. This error code is reserved + // for serious errors. + // + // HTTP Mapping: 500 Internal Server Error + INTERNAL = 13; + + // The service is currently unavailable. This is most likely a + // transient condition, which can be corrected by retrying with + // a backoff. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 503 Service Unavailable + UNAVAILABLE = 14; + + // Unrecoverable data loss or corruption. + // + // HTTP Mapping: 500 Internal Server Error + DATA_LOSS = 15; +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/error_details.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/error_details.proto new file mode 100644 index 00000000..f24ae009 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/error_details.proto @@ -0,0 +1,200 @@ +// Copyright 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/rpc/errdetails;errdetails"; +option java_multiple_files = true; +option java_outer_classname = "ErrorDetailsProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + + +// Describes when the clients can retry a failed request. Clients could ignore +// the recommendation here or retry when this information is missing from error +// responses. +// +// It's always recommended that clients should use exponential backoff when +// retrying. +// +// Clients should wait until `retry_delay` amount of time has passed since +// receiving the error response before retrying. If retrying requests also +// fail, clients should use an exponential backoff scheme to gradually increase +// the delay between retries based on `retry_delay`, until either a maximum +// number of retires have been reached or a maximum retry delay cap has been +// reached. +message RetryInfo { + // Clients should wait at least this long between retrying the same request. + google.protobuf.Duration retry_delay = 1; +} + +// Describes additional debugging info. +message DebugInfo { + // The stack trace entries indicating where the error occurred. + repeated string stack_entries = 1; + + // Additional debugging information provided by the server. + string detail = 2; +} + +// Describes how a quota check failed. +// +// For example if a daily limit was exceeded for the calling project, +// a service could respond with a QuotaFailure detail containing the project +// id and the description of the quota limit that was exceeded. If the +// calling project hasn't enabled the service in the developer console, then +// a service could respond with the project id and set `service_disabled` +// to true. +// +// Also see RetryDetail and Help types for other details about handling a +// quota failure. +message QuotaFailure { + // A message type used to describe a single quota violation. For example, a + // daily quota or a custom quota that was exceeded. + message Violation { + // The subject on which the quota check failed. + // For example, "clientip:" or "project:". + string subject = 1; + + // A description of how the quota check failed. Clients can use this + // description to find more about the quota configuration in the service's + // public documentation, or find the relevant quota limit to adjust through + // developer console. + // + // For example: "Service disabled" or "Daily Limit for read operations + // exceeded". + string description = 2; + } + + // Describes all quota violations. + repeated Violation violations = 1; +} + +// Describes what preconditions have failed. +// +// For example, if an RPC failed because it required the Terms of Service to be +// acknowledged, it could list the terms of service violation in the +// PreconditionFailure message. +message PreconditionFailure { + // A message type used to describe a single precondition failure. + message Violation { + // The type of PreconditionFailure. We recommend using a service-specific + // enum type to define the supported precondition violation types. For + // example, "TOS" for "Terms of Service violation". + string type = 1; + + // The subject, relative to the type, that failed. + // For example, "google.com/cloud" relative to the "TOS" type would + // indicate which terms of service is being referenced. + string subject = 2; + + // A description of how the precondition failed. Developers can use this + // description to understand how to fix the failure. + // + // For example: "Terms of service not accepted". + string description = 3; + } + + // Describes all precondition violations. + repeated Violation violations = 1; +} + +// Describes violations in a client request. This error type focuses on the +// syntactic aspects of the request. +message BadRequest { + // A message type used to describe a single bad request field. + message FieldViolation { + // A path leading to a field in the request body. The value will be a + // sequence of dot-separated identifiers that identify a protocol buffer + // field. E.g., "field_violations.field" would identify this field. + string field = 1; + + // A description of why the request element is bad. + string description = 2; + } + + // Describes all violations in a client request. + repeated FieldViolation field_violations = 1; +} + +// Contains metadata about the request that clients can attach when filing a bug +// or providing other forms of feedback. +message RequestInfo { + // An opaque string that should only be interpreted by the service generating + // it. For example, it can be used to identify requests in the service's logs. + string request_id = 1; + + // Any data that was used to serve this request. For example, an encrypted + // stack trace that can be sent back to the service provider for debugging. + string serving_data = 2; +} + +// Describes the resource that is being accessed. +message ResourceInfo { + // A name for the type of resource being accessed, e.g. "sql table", + // "cloud storage bucket", "file", "Google calendar"; or the type URL + // of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic". + string resource_type = 1; + + // The name of the resource being accessed. For example, a shared calendar + // name: "example.com_4fghdhgsrgh@group.calendar.google.com", if the current + // error is [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + string resource_name = 2; + + // The owner of the resource (optional). + // For example, "user:" or "project:". + string owner = 3; + + // Describes what error is encountered when accessing this resource. + // For example, updating a cloud project may require the `writer` permission + // on the developer console project. + string description = 4; +} + +// Provides links to documentation or for performing an out of band action. +// +// For example, if a quota check failed with an error indicating the calling +// project hasn't enabled the accessed service, this can contain a URL pointing +// directly to the right place in the developer console to flip the bit. +message Help { + // Describes a URL link. + message Link { + // Describes what the link offers. + string description = 1; + + // The URL of the link. + string url = 2; + } + + // URL(s) pointing to additional information on handling the current error. + repeated Link links = 1; +} + +// Provides a localized error message that is safe to return to the user +// which can be attached to an RPC error. +message LocalizedMessage { + // The locale used following the specification defined at + // http://www.rfc-editor.org/rfc/bcp/bcp47.txt. + // Examples are: "en-US", "fr-CH", "es-MX" + string locale = 1; + + // The localized error message in the above locale. + string message = 2; +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/status.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/status.proto new file mode 100644 index 00000000..0839ee96 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/rpc/status.proto @@ -0,0 +1,92 @@ +// Copyright 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + + +// The `Status` type defines a logical error model that is suitable for different +// programming environments, including REST APIs and RPC APIs. It is used by +// [gRPC](https://github.com/grpc). The error model is designed to be: +// +// - Simple to use and understand for most users +// - Flexible enough to meet unexpected needs +// +// # Overview +// +// The `Status` message contains three pieces of data: error code, error message, +// and error details. The error code should be an enum value of +// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed. The +// error message should be a developer-facing English message that helps +// developers *understand* and *resolve* the error. If a localized user-facing +// error message is needed, put the localized message in the error details or +// localize it in the client. The optional error details may contain arbitrary +// information about the error. There is a predefined set of error detail types +// in the package `google.rpc` that can be used for common error conditions. +// +// # Language mapping +// +// The `Status` message is the logical representation of the error model, but it +// is not necessarily the actual wire format. When the `Status` message is +// exposed in different client libraries and different wire protocols, it can be +// mapped differently. For example, it will likely be mapped to some exceptions +// in Java, but more likely mapped to some error codes in C. +// +// # Other uses +// +// The error model and the `Status` message can be used in a variety of +// environments, either with or without APIs, to provide a +// consistent developer experience across different environments. +// +// Example uses of this error model include: +// +// - Partial errors. If a service needs to return partial errors to the client, +// it may embed the `Status` in the normal response to indicate the partial +// errors. +// +// - Workflow errors. A typical workflow has multiple steps. Each step may +// have a `Status` message for error reporting. +// +// - Batch operations. If a client uses batch request and batch response, the +// `Status` message should be used directly inside batch response, one for +// each error sub-response. +// +// - Asynchronous operations. If an API call embeds asynchronous operation +// results in its response, the status of those operations should be +// represented directly using the `Status` message. +// +// - Logging. If some API errors are stored in logs, the message `Status` could +// be used directly after any stripping needed for security/privacy reasons. +message Status { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel new file mode 100644 index 00000000..58d291c3 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel @@ -0,0 +1,20 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package(default_visibility = ["//visibility:public"]) + +go_library( + name = "go_default_library", + srcs = [ + "doc.go", + "pattern.go", + "trie.go", + ], + importpath = "github.com/grpc-ecosystem/grpc-gateway/utilities", +) + +go_test( + name = "go_default_xtest", + size = "small", + srcs = ["trie_test.go"], + deps = [":go_default_library"], +) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go index 28ad9461..dfe7de48 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go @@ -17,6 +17,6 @@ const ( OpConcatN // OpCapture pops an item and binds it to the variable OpCapture - // OpEnd is the least postive invalid opcode. + // OpEnd is the least positive invalid opcode. OpEnd ) diff --git a/vendor/github.com/guregu/null/.gitignore b/vendor/github.com/guregu/null/.gitignore index 1b3ac108..e9eb644a 100644 --- a/vendor/github.com/guregu/null/.gitignore +++ b/vendor/github.com/guregu/null/.gitignore @@ -1 +1,2 @@ -coverage.out \ No newline at end of file +coverage.out +.idea/ diff --git a/vendor/github.com/guregu/null/README.md b/vendor/github.com/guregu/null/README.md index 62bf48be..7bd336e6 100644 --- a/vendor/github.com/guregu/null/README.md +++ b/vendor/github.com/guregu/null/README.md @@ -1,4 +1,4 @@ -## null [![GoDoc](https://godoc.org/github.com/guregu/null?status.svg)](https://godoc.org/github.com/guregu/null) [![Coverage](http://gocover.io/_badge/github.com/guregu/null)](http://gocover.io/github.com/guregu/null) +## null [![GoDoc](https://godoc.org/github.com/guregu/null?status.svg)](https://godoc.org/github.com/guregu/null) [![CircleCI](https://circleci.com/gh/guregu/null.svg?style=svg)](https://circleci.com/gh/guregu/null) `import "gopkg.in/guregu/null.v3"` null is a library with reasonable options for dealing with nullable SQL and JSON values diff --git a/vendor/github.com/guregu/null/bool.go b/vendor/github.com/guregu/null/bool.go index 1ec13961..6d782ef2 100644 --- a/vendor/github.com/guregu/null/bool.go +++ b/vendor/github.com/guregu/null/bool.go @@ -38,6 +38,11 @@ func BoolFromPtr(b *bool) Bool { return NewBool(*b, true) } +// ValueOrZero returns the inner value if valid, otherwise false. +func (b Bool) ValueOrZero() bool { + return b.Valid && b.Bool +} + // UnmarshalJSON implements json.Unmarshaler. // It supports number and null input. // 0 will not be considered a null Bool. diff --git a/vendor/github.com/guregu/null/bool_test.go b/vendor/github.com/guregu/null/bool_test.go index 1dc992ad..c1e25948 100644 --- a/vendor/github.com/guregu/null/bool_test.go +++ b/vendor/github.com/guregu/null/bool_test.go @@ -176,6 +176,18 @@ func TestBoolScan(t *testing.T) { assertNullBool(t, null, "scanned null") } +func TestBoolValueOrZero(t *testing.T) { + valid := NewBool(true, true) + if valid.ValueOrZero() != true { + t.Error("unexpected ValueOrZero", valid.ValueOrZero()) + } + + invalid := NewBool(true, false) + if invalid.ValueOrZero() != false { + t.Error("unexpected ValueOrZero", invalid.ValueOrZero()) + } +} + func assertBool(t *testing.T, b Bool, from string) { if b.Bool != true { t.Errorf("bad %s bool: %v ≠ %v\n", from, b.Bool, true) diff --git a/vendor/github.com/guregu/null/float.go b/vendor/github.com/guregu/null/float.go index 1f57b959..e48f29ba 100644 --- a/vendor/github.com/guregu/null/float.go +++ b/vendor/github.com/guregu/null/float.go @@ -4,6 +4,7 @@ import ( "database/sql" "encoding/json" "fmt" + "math" "reflect" "strconv" ) @@ -38,6 +39,14 @@ func FloatFromPtr(f *float64) Float { return NewFloat(*f, true) } +// ValueOrZero returns the inner value if valid, otherwise zero. +func (f Float) ValueOrZero() float64 { + if !f.Valid { + return 0 + } + return f.Float64 +} + // UnmarshalJSON implements json.Unmarshaler. // It supports number and null input. // 0 will not be considered a null Float. @@ -51,6 +60,13 @@ func (f *Float) UnmarshalJSON(data []byte) error { switch x := v.(type) { case float64: f.Float64 = float64(x) + case string: + str := string(x) + if len(str) == 0 { + f.Valid = false + return nil + } + f.Float64, err = strconv.ParseFloat(str, 64) case map[string]interface{}: err = json.Unmarshal(data, &f.NullFloat64) case nil: @@ -84,6 +100,12 @@ func (f Float) MarshalJSON() ([]byte, error) { if !f.Valid { return []byte("null"), nil } + if math.IsInf(f.Float64, 0) || math.IsNaN(f.Float64) { + return nil, &json.UnsupportedValueError{ + Value: reflect.ValueOf(f.Float64), + Str: strconv.FormatFloat(f.Float64, 'g', -1, 64), + } + } return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil } diff --git a/vendor/github.com/guregu/null/float_test.go b/vendor/github.com/guregu/null/float_test.go index abf13689..cf0c1635 100644 --- a/vendor/github.com/guregu/null/float_test.go +++ b/vendor/github.com/guregu/null/float_test.go @@ -2,12 +2,15 @@ package null import ( "encoding/json" + "math" "testing" ) var ( - floatJSON = []byte(`1.2345`) - nullFloatJSON = []byte(`{"Float64":1.2345,"Valid":true}`) + floatJSON = []byte(`1.2345`) + floatStringJSON = []byte(`"1.2345"`) + floatBlankJSON = []byte(`""`) + nullFloatJSON = []byte(`{"Float64":1.2345,"Valid":true}`) ) func TestFloatFrom(t *testing.T) { @@ -36,16 +39,26 @@ func TestUnmarshalFloat(t *testing.T) { maybePanic(err) assertFloat(t, f, "float json") + var sf Float + err = json.Unmarshal(floatStringJSON, &sf) + maybePanic(err) + assertFloat(t, sf, "string float json") + var nf Float err = json.Unmarshal(nullFloatJSON, &nf) maybePanic(err) - assertFloat(t, nf, "sq.NullFloat64 json") + assertFloat(t, nf, "sql.NullFloat64 json") var null Float err = json.Unmarshal(nullJSON, &null) maybePanic(err) assertNullFloat(t, null, "null json") + var blank Float + err = json.Unmarshal(floatBlankJSON, &blank) + maybePanic(err) + assertNullFloat(t, blank, "null blank string json") + var badType Float err = json.Unmarshal(boolJSON, &badType) if err == nil { @@ -147,12 +160,43 @@ func TestFloatScan(t *testing.T) { maybePanic(err) assertFloat(t, f, "scanned float") + var sf Float + err = sf.Scan("1.2345") + maybePanic(err) + assertFloat(t, sf, "scanned string float") + var null Float err = null.Scan(nil) maybePanic(err) assertNullFloat(t, null, "scanned null") } +func TestFloatInfNaN(t *testing.T) { + nan := NewFloat(math.NaN(), true) + _, err := nan.MarshalJSON() + if err == nil { + t.Error("expected error for NaN, got nil") + } + + inf := NewFloat(math.Inf(1), true) + _, err = inf.MarshalJSON() + if err == nil { + t.Error("expected error for Inf, got nil") + } +} + +func TestFloatValueOrZero(t *testing.T) { + valid := NewFloat(1.2345, true) + if valid.ValueOrZero() != 1.2345 { + t.Error("unexpected ValueOrZero", valid.ValueOrZero()) + } + + invalid := NewFloat(1.2345, false) + if invalid.ValueOrZero() != 0 { + t.Error("unexpected ValueOrZero", invalid.ValueOrZero()) + } +} + func assertFloat(t *testing.T, f Float, from string) { if f.Float64 != 1.2345 { t.Errorf("bad %s float: %f ≠ %f\n", from, f.Float64, 1.2345) diff --git a/vendor/github.com/guregu/null/int.go b/vendor/github.com/guregu/null/int.go index 981d17b0..91f26644 100644 --- a/vendor/github.com/guregu/null/int.go +++ b/vendor/github.com/guregu/null/int.go @@ -38,6 +38,14 @@ func IntFromPtr(i *int64) Int { return NewInt(*i, true) } +// ValueOrZero returns the inner value if valid, otherwise zero. +func (i Int) ValueOrZero() int64 { + if !i.Valid { + return 0 + } + return i.Int64 +} + // UnmarshalJSON implements json.Unmarshaler. // It supports number and null input. // 0 will not be considered a null Int. @@ -48,10 +56,17 @@ func (i *Int) UnmarshalJSON(data []byte) error { if err = json.Unmarshal(data, &v); err != nil { return err } - switch v.(type) { + switch x := v.(type) { case float64: // Unmarshal again, directly to int64, to avoid intermediate float64 err = json.Unmarshal(data, &i.Int64) + case string: + str := string(x) + if len(str) == 0 { + i.Valid = false + return nil + } + i.Int64, err = strconv.ParseInt(str, 10, 64) case map[string]interface{}: err = json.Unmarshal(data, &i.NullInt64) case nil: diff --git a/vendor/github.com/guregu/null/int_test.go b/vendor/github.com/guregu/null/int_test.go index 2dd37a75..b8dda623 100644 --- a/vendor/github.com/guregu/null/int_test.go +++ b/vendor/github.com/guregu/null/int_test.go @@ -8,8 +8,9 @@ import ( ) var ( - intJSON = []byte(`12345`) - nullIntJSON = []byte(`{"Int64":12345,"Valid":true}`) + intJSON = []byte(`12345`) + intStringJSON = []byte(`"12345"`) + nullIntJSON = []byte(`{"Int64":12345,"Valid":true}`) ) func TestIntFrom(t *testing.T) { @@ -38,10 +39,20 @@ func TestUnmarshalInt(t *testing.T) { maybePanic(err) assertInt(t, i, "int json") + var si Int + err = json.Unmarshal(intStringJSON, &si) + maybePanic(err) + assertInt(t, si, "int string json") + var ni Int err = json.Unmarshal(nullIntJSON, &ni) maybePanic(err) - assertInt(t, ni, "sq.NullInt64 json") + assertInt(t, ni, "sql.NullInt64 json") + + var bi Int + err = json.Unmarshal(floatBlankJSON, &bi) + maybePanic(err) + assertNullInt(t, bi, "blank json string") var null Int err = json.Unmarshal(nullJSON, &null) @@ -180,6 +191,18 @@ func TestIntScan(t *testing.T) { assertNullInt(t, null, "scanned null") } +func TestIntValueOrZero(t *testing.T) { + valid := NewInt(12345, true) + if valid.ValueOrZero() != 12345 { + t.Error("unexpected ValueOrZero", valid.ValueOrZero()) + } + + invalid := NewInt(12345, false) + if invalid.ValueOrZero() != 0 { + t.Error("unexpected ValueOrZero", invalid.ValueOrZero()) + } +} + func assertInt(t *testing.T, i Int, from string) { if i.Int64 != 12345 { t.Errorf("bad %s int: %d ≠ %d\n", from, i.Int64, 12345) diff --git a/vendor/github.com/guregu/null/string.go b/vendor/github.com/guregu/null/string.go index 554aac82..49e617f5 100644 --- a/vendor/github.com/guregu/null/string.go +++ b/vendor/github.com/guregu/null/string.go @@ -30,6 +30,14 @@ func StringFromPtr(s *string) String { return NewString(*s, true) } +// ValueOrZero returns the inner value if valid, otherwise zero. +func (s String) ValueOrZero() string { + if !s.Valid { + return "" + } + return s.String +} + // NewString creates a new String func NewString(s string, valid bool) String { return String{ diff --git a/vendor/github.com/guregu/null/string_test.go b/vendor/github.com/guregu/null/string_test.go index c02d6b09..808c436f 100644 --- a/vendor/github.com/guregu/null/string_test.go +++ b/vendor/github.com/guregu/null/string_test.go @@ -178,6 +178,18 @@ func TestStringScan(t *testing.T) { assertNullStr(t, null, "scanned null") } +func TestStringValueOrZero(t *testing.T) { + valid := NewString("test", true) + if valid.ValueOrZero() != "test" { + t.Error("unexpected ValueOrZero", valid.ValueOrZero()) + } + + invalid := NewString("test", false) + if invalid.ValueOrZero() != "" { + t.Error("unexpected ValueOrZero", invalid.ValueOrZero()) + } +} + func maybePanic(err error) { if err != nil { panic(err) diff --git a/vendor/github.com/guregu/null/time.go b/vendor/github.com/guregu/null/time.go index a4d84392..5e9c8d25 100644 --- a/vendor/github.com/guregu/null/time.go +++ b/vendor/github.com/guregu/null/time.go @@ -60,6 +60,14 @@ func TimeFromPtr(t *time.Time) Time { return NewTime(*t, true) } +// ValueOrZero returns the inner value if valid, otherwise zero. +func (t Time) ValueOrZero() time.Time { + if !t.Valid { + return time.Time{} + } + return t.Time +} + // MarshalJSON implements json.Marshaler. // It will encode null if this time is null. func (t Time) MarshalJSON() ([]byte, error) { diff --git a/vendor/github.com/guregu/null/time_test.go b/vendor/github.com/guregu/null/time_test.go index e3c0bfd2..545f18e9 100644 --- a/vendor/github.com/guregu/null/time_test.go +++ b/vendor/github.com/guregu/null/time_test.go @@ -159,6 +159,19 @@ func TestTimeScanValue(t *testing.T) { assertNullTime(t, wrong, "scanned wrong") } +func TestTimeValueOrZero(t *testing.T) { + valid := TimeFrom(timeValue) + if valid.ValueOrZero() != valid.Time || valid.ValueOrZero().IsZero() { + t.Error("unexpected ValueOrZero", valid.ValueOrZero()) + } + + invalid := valid + invalid.Valid = false + if !invalid.ValueOrZero().IsZero() { + t.Error("unexpected ValueOrZero", invalid.ValueOrZero()) + } +} + func assertTime(t *testing.T, ti Time, from string) { if ti.Time != timeValue { t.Errorf("bad %v time: %v ≠ %v\n", from, ti.Time, timeValue) diff --git a/vendor/github.com/guregu/null/zero/float.go b/vendor/github.com/guregu/null/zero/float.go index 1bb852fe..e998543f 100644 --- a/vendor/github.com/guregu/null/zero/float.go +++ b/vendor/github.com/guregu/null/zero/float.go @@ -4,6 +4,7 @@ import ( "database/sql" "encoding/json" "fmt" + "math" "reflect" "strconv" ) @@ -51,6 +52,13 @@ func (f *Float) UnmarshalJSON(data []byte) error { switch x := v.(type) { case float64: f.Float64 = x + case string: + str := string(x) + if len(str) == 0 { + f.Valid = false + return nil + } + f.Float64, err = strconv.ParseFloat(str, 64) case map[string]interface{}: err = json.Unmarshal(data, &f.NullFloat64) case nil: @@ -85,6 +93,12 @@ func (f Float) MarshalJSON() ([]byte, error) { if !f.Valid { n = 0 } + if math.IsInf(f.Float64, 0) || math.IsNaN(f.Float64) { + return nil, &json.UnsupportedValueError{ + Value: reflect.ValueOf(f.Float64), + Str: strconv.FormatFloat(f.Float64, 'g', -1, 64), + } + } return []byte(strconv.FormatFloat(n, 'f', -1, 64)), nil } diff --git a/vendor/github.com/guregu/null/zero/float_test.go b/vendor/github.com/guregu/null/zero/float_test.go index 5867aa29..6c30ee18 100644 --- a/vendor/github.com/guregu/null/zero/float_test.go +++ b/vendor/github.com/guregu/null/zero/float_test.go @@ -2,12 +2,15 @@ package zero import ( "encoding/json" + "math" "testing" ) var ( - floatJSON = []byte(`1.2345`) - nullFloatJSON = []byte(`{"Float64":1.2345,"Valid":true}`) + floatJSON = []byte(`1.2345`) + floatStringJSON = []byte(`"1.2345"`) + floatBlankJSON = []byte(`""`) + nullFloatJSON = []byte(`{"Float64":1.2345,"Valid":true}`) ) func TestFloatFrom(t *testing.T) { @@ -36,11 +39,21 @@ func TestUnmarshalFloat(t *testing.T) { maybePanic(err) assertFloat(t, f, "float json") + var sf Float + err = json.Unmarshal(floatStringJSON, &sf) + maybePanic(err) + assertFloat(t, sf, "string float json") + var nf Float err = json.Unmarshal(nullFloatJSON, &nf) maybePanic(err) assertFloat(t, nf, "sql.NullFloat64 json") + var blank Float + err = json.Unmarshal(floatBlankJSON, &blank) + maybePanic(err) + assertNullFloat(t, blank, "null blank string json") + var zero Float err = json.Unmarshal(zeroJSON, &zero) maybePanic(err) @@ -164,6 +177,20 @@ func TestFloatScan(t *testing.T) { assertNullFloat(t, null, "scanned null") } +func TestFloatInfNaN(t *testing.T) { + nan := NewFloat(math.NaN(), true) + _, err := nan.MarshalJSON() + if err == nil { + t.Error("expected error for NaN, got nil") + } + + inf := NewFloat(math.Inf(1), true) + _, err = inf.MarshalJSON() + if err == nil { + t.Error("expected error for Inf, got nil") + } +} + func assertFloat(t *testing.T, f Float, from string) { if f.Float64 != 1.2345 { t.Errorf("bad %s float: %f ≠ %f\n", from, f.Float64, 1.2345) diff --git a/vendor/github.com/guregu/null/zero/int.go b/vendor/github.com/guregu/null/zero/int.go index 3645c41f..98d09cb8 100644 --- a/vendor/github.com/guregu/null/zero/int.go +++ b/vendor/github.com/guregu/null/zero/int.go @@ -49,10 +49,17 @@ func (i *Int) UnmarshalJSON(data []byte) error { if err = json.Unmarshal(data, &v); err != nil { return err } - switch v.(type) { + switch x := v.(type) { case float64: // Unmarshal again, directly to int64, to avoid intermediate float64 err = json.Unmarshal(data, &i.Int64) + case string: + str := string(x) + if len(str) == 0 { + i.Valid = false + return nil + } + i.Int64, err = strconv.ParseInt(str, 10, 64) case map[string]interface{}: err = json.Unmarshal(data, &i.NullInt64) case nil: diff --git a/vendor/github.com/guregu/null/zero/int_test.go b/vendor/github.com/guregu/null/zero/int_test.go index b81cad0a..e0e191e2 100644 --- a/vendor/github.com/guregu/null/zero/int_test.go +++ b/vendor/github.com/guregu/null/zero/int_test.go @@ -8,9 +8,10 @@ import ( ) var ( - intJSON = []byte(`12345`) - nullIntJSON = []byte(`{"Int64":12345,"Valid":true}`) - zeroJSON = []byte(`0`) + intJSON = []byte(`12345`) + intStringJSON = []byte(`"12345"`) + nullIntJSON = []byte(`{"Int64":12345,"Valid":true}`) + zeroJSON = []byte(`0`) ) func TestIntFrom(t *testing.T) { @@ -39,11 +40,21 @@ func TestUnmarshalInt(t *testing.T) { maybePanic(err) assertInt(t, i, "int json") + var si Int + err = json.Unmarshal(intStringJSON, &si) + maybePanic(err) + assertInt(t, si, "int string json") + var ni Int err = json.Unmarshal(nullIntJSON, &ni) maybePanic(err) assertInt(t, ni, "sql.NullInt64 json") + var bi Int + err = json.Unmarshal(floatBlankJSON, &bi) + maybePanic(err) + assertNullInt(t, bi, "blank json string") + var zero Int err = json.Unmarshal(zeroJSON, &zero) maybePanic(err) diff --git a/vendor/github.com/hashicorp/golang-lru/2q.go b/vendor/github.com/hashicorp/golang-lru/2q.go index 337d9632..e474cd07 100644 --- a/vendor/github.com/hashicorp/golang-lru/2q.go +++ b/vendor/github.com/hashicorp/golang-lru/2q.go @@ -30,9 +30,9 @@ type TwoQueueCache struct { size int recentSize int - recent *simplelru.LRU - frequent *simplelru.LRU - recentEvict *simplelru.LRU + recent simplelru.LRUCache + frequent simplelru.LRUCache + recentEvict simplelru.LRUCache lock sync.RWMutex } @@ -84,7 +84,8 @@ func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCa return c, nil } -func (c *TwoQueueCache) Get(key interface{}) (interface{}, bool) { +// Get looks up a key's value from the cache. +func (c *TwoQueueCache) Get(key interface{}) (value interface{}, ok bool) { c.lock.Lock() defer c.lock.Unlock() @@ -105,6 +106,7 @@ func (c *TwoQueueCache) Get(key interface{}) (interface{}, bool) { return nil, false } +// Add adds a value to the cache. func (c *TwoQueueCache) Add(key, value interface{}) { c.lock.Lock() defer c.lock.Unlock() @@ -160,12 +162,15 @@ func (c *TwoQueueCache) ensureSpace(recentEvict bool) { c.frequent.RemoveOldest() } +// Len returns the number of items in the cache. func (c *TwoQueueCache) Len() int { c.lock.RLock() defer c.lock.RUnlock() return c.recent.Len() + c.frequent.Len() } +// Keys returns a slice of the keys in the cache. +// The frequently used keys are first in the returned slice. func (c *TwoQueueCache) Keys() []interface{} { c.lock.RLock() defer c.lock.RUnlock() @@ -174,6 +179,7 @@ func (c *TwoQueueCache) Keys() []interface{} { return append(k1, k2...) } +// Remove removes the provided key from the cache. func (c *TwoQueueCache) Remove(key interface{}) { c.lock.Lock() defer c.lock.Unlock() @@ -188,6 +194,7 @@ func (c *TwoQueueCache) Remove(key interface{}) { } } +// Purge is used to completely clear the cache. func (c *TwoQueueCache) Purge() { c.lock.Lock() defer c.lock.Unlock() @@ -196,13 +203,17 @@ func (c *TwoQueueCache) Purge() { c.recentEvict.Purge() } +// Contains is used to check if the cache contains a key +// without updating recency or frequency. func (c *TwoQueueCache) Contains(key interface{}) bool { c.lock.RLock() defer c.lock.RUnlock() return c.frequent.Contains(key) || c.recent.Contains(key) } -func (c *TwoQueueCache) Peek(key interface{}) (interface{}, bool) { +// Peek is used to inspect the cache value of a key +// without updating recency or frequency. +func (c *TwoQueueCache) Peek(key interface{}) (value interface{}, ok bool) { c.lock.RLock() defer c.lock.RUnlock() if val, ok := c.frequent.Peek(key); ok { diff --git a/vendor/github.com/hashicorp/golang-lru/arc.go b/vendor/github.com/hashicorp/golang-lru/arc.go index a2a25281..555225a2 100644 --- a/vendor/github.com/hashicorp/golang-lru/arc.go +++ b/vendor/github.com/hashicorp/golang-lru/arc.go @@ -18,11 +18,11 @@ type ARCCache struct { size int // Size is the total capacity of the cache p int // P is the dynamic preference towards T1 or T2 - t1 *simplelru.LRU // T1 is the LRU for recently accessed items - b1 *simplelru.LRU // B1 is the LRU for evictions from t1 + t1 simplelru.LRUCache // T1 is the LRU for recently accessed items + b1 simplelru.LRUCache // B1 is the LRU for evictions from t1 - t2 *simplelru.LRU // T2 is the LRU for frequently accessed items - b2 *simplelru.LRU // B2 is the LRU for evictions from t2 + t2 simplelru.LRUCache // T2 is the LRU for frequently accessed items + b2 simplelru.LRUCache // B2 is the LRU for evictions from t2 lock sync.RWMutex } @@ -60,11 +60,11 @@ func NewARC(size int) (*ARCCache, error) { } // Get looks up a key's value from the cache. -func (c *ARCCache) Get(key interface{}) (interface{}, bool) { +func (c *ARCCache) Get(key interface{}) (value interface{}, ok bool) { c.lock.Lock() defer c.lock.Unlock() - // Ff the value is contained in T1 (recent), then + // If the value is contained in T1 (recent), then // promote it to T2 (frequent) if val, ok := c.t1.Peek(key); ok { c.t1.Remove(key) @@ -153,7 +153,7 @@ func (c *ARCCache) Add(key, value interface{}) { // Remove from B2 c.b2.Remove(key) - // Add the key to the frequntly used list + // Add the key to the frequently used list c.t2.Add(key, value) return } @@ -247,7 +247,7 @@ func (c *ARCCache) Contains(key interface{}) bool { // Peek is used to inspect the cache value of a key // without updating recency or frequency. -func (c *ARCCache) Peek(key interface{}) (interface{}, bool) { +func (c *ARCCache) Peek(key interface{}) (value interface{}, ok bool) { c.lock.RLock() defer c.lock.RUnlock() if val, ok := c.t1.Peek(key); ok { diff --git a/vendor/github.com/hashicorp/golang-lru/doc.go b/vendor/github.com/hashicorp/golang-lru/doc.go new file mode 100644 index 00000000..2547df97 --- /dev/null +++ b/vendor/github.com/hashicorp/golang-lru/doc.go @@ -0,0 +1,21 @@ +// Package lru provides three different LRU caches of varying sophistication. +// +// Cache is a simple LRU cache. It is based on the +// LRU implementation in groupcache: +// https://github.com/golang/groupcache/tree/master/lru +// +// TwoQueueCache tracks frequently used and recently used entries separately. +// This avoids a burst of accesses from taking out frequently used entries, +// at the cost of about 2x computational overhead and some extra bookkeeping. +// +// ARCCache is an adaptive replacement cache. It tracks recent evictions as +// well as recent usage in both the frequent and recent caches. Its +// computational overhead is comparable to TwoQueueCache, but the memory +// overhead is linear with the size of the cache. +// +// ARC has been patented by IBM, so do not use it if that is problematic for +// your program. +// +// All caches in this package take locks while operating, and are therefore +// thread-safe for consumers. +package lru diff --git a/vendor/github.com/hashicorp/golang-lru/lru.go b/vendor/github.com/hashicorp/golang-lru/lru.go index a6285f98..c8d9b0a2 100644 --- a/vendor/github.com/hashicorp/golang-lru/lru.go +++ b/vendor/github.com/hashicorp/golang-lru/lru.go @@ -1,6 +1,3 @@ -// This package provides a simple LRU cache. It is based on the -// LRU implementation in groupcache: -// https://github.com/golang/groupcache/tree/master/lru package lru import ( @@ -11,11 +8,11 @@ import ( // Cache is a thread-safe fixed size LRU cache. type Cache struct { - lru *simplelru.LRU + lru simplelru.LRUCache lock sync.RWMutex } -// New creates an LRU of the given size +// New creates an LRU of the given size. func New(size int) (*Cache, error) { return NewWithEvict(size, nil) } @@ -33,7 +30,7 @@ func NewWithEvict(size int, onEvicted func(key interface{}, value interface{})) return c, nil } -// Purge is used to completely clear the cache +// Purge is used to completely clear the cache. func (c *Cache) Purge() { c.lock.Lock() c.lru.Purge() @@ -41,30 +38,30 @@ func (c *Cache) Purge() { } // Add adds a value to the cache. Returns true if an eviction occurred. -func (c *Cache) Add(key, value interface{}) bool { +func (c *Cache) Add(key, value interface{}) (evicted bool) { c.lock.Lock() defer c.lock.Unlock() return c.lru.Add(key, value) } // Get looks up a key's value from the cache. -func (c *Cache) Get(key interface{}) (interface{}, bool) { +func (c *Cache) Get(key interface{}) (value interface{}, ok bool) { c.lock.Lock() defer c.lock.Unlock() return c.lru.Get(key) } -// Check if a key is in the cache, without updating the recent-ness -// or deleting it for being stale. +// Contains checks if a key is in the cache, without updating the +// recent-ness or deleting it for being stale. func (c *Cache) Contains(key interface{}) bool { c.lock.RLock() defer c.lock.RUnlock() return c.lru.Contains(key) } -// Returns the key value (or undefined if not found) without updating +// Peek returns the key value (or undefined if not found) without updating // the "recently used"-ness of the key. -func (c *Cache) Peek(key interface{}) (interface{}, bool) { +func (c *Cache) Peek(key interface{}) (value interface{}, ok bool) { c.lock.RLock() defer c.lock.RUnlock() return c.lru.Peek(key) @@ -73,16 +70,15 @@ func (c *Cache) Peek(key interface{}) (interface{}, bool) { // ContainsOrAdd checks if a key is in the cache without updating the // recent-ness or deleting it for being stale, and if not, adds the value. // Returns whether found and whether an eviction occurred. -func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evict bool) { +func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool) { c.lock.Lock() defer c.lock.Unlock() if c.lru.Contains(key) { return true, false - } else { - evict := c.lru.Add(key, value) - return false, evict } + evicted = c.lru.Add(key, value) + return false, evicted } // Remove removes the provided key from the cache. diff --git a/vendor/github.com/hashicorp/golang-lru/lru_test.go b/vendor/github.com/hashicorp/golang-lru/lru_test.go index 2b31218b..e7e23505 100644 --- a/vendor/github.com/hashicorp/golang-lru/lru_test.go +++ b/vendor/github.com/hashicorp/golang-lru/lru_test.go @@ -72,7 +72,7 @@ func TestLRU(t *testing.T) { if k != v { t.Fatalf("Evict values not equal (%v!=%v)", k, v) } - evictCounter += 1 + evictCounter++ } l, err := NewWithEvict(128, onEvicted) if err != nil { @@ -136,7 +136,7 @@ func TestLRU(t *testing.T) { func TestLRUAdd(t *testing.T) { evictCounter := 0 onEvicted := func(k interface{}, v interface{}) { - evictCounter += 1 + evictCounter++ } l, err := NewWithEvict(1, onEvicted) diff --git a/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go b/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go index cb416b39..5673773b 100644 --- a/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go +++ b/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go @@ -36,7 +36,7 @@ func NewLRU(size int, onEvict EvictCallback) (*LRU, error) { return c, nil } -// Purge is used to completely clear the cache +// Purge is used to completely clear the cache. func (c *LRU) Purge() { for k, v := range c.items { if c.onEvict != nil { @@ -48,7 +48,7 @@ func (c *LRU) Purge() { } // Add adds a value to the cache. Returns true if an eviction occurred. -func (c *LRU) Add(key, value interface{}) bool { +func (c *LRU) Add(key, value interface{}) (evicted bool) { // Check for existing item if ent, ok := c.items[key]; ok { c.evictList.MoveToFront(ent) @@ -78,17 +78,18 @@ func (c *LRU) Get(key interface{}) (value interface{}, ok bool) { return } -// Check if a key is in the cache, without updating the recent-ness +// Contains checks if a key is in the cache, without updating the recent-ness // or deleting it for being stale. func (c *LRU) Contains(key interface{}) (ok bool) { _, ok = c.items[key] return ok } -// Returns the key value (or undefined if not found) without updating +// Peek returns the key value (or undefined if not found) without updating // the "recently used"-ness of the key. func (c *LRU) Peek(key interface{}) (value interface{}, ok bool) { - if ent, ok := c.items[key]; ok { + var ent *list.Element + if ent, ok = c.items[key]; ok { return ent.Value.(*entry).value, true } return nil, ok @@ -96,7 +97,7 @@ func (c *LRU) Peek(key interface{}) (value interface{}, ok bool) { // Remove removes the provided key from the cache, returning if the // key was contained. -func (c *LRU) Remove(key interface{}) bool { +func (c *LRU) Remove(key interface{}) (present bool) { if ent, ok := c.items[key]; ok { c.removeElement(ent) return true @@ -105,7 +106,7 @@ func (c *LRU) Remove(key interface{}) bool { } // RemoveOldest removes the oldest item from the cache. -func (c *LRU) RemoveOldest() (interface{}, interface{}, bool) { +func (c *LRU) RemoveOldest() (key interface{}, value interface{}, ok bool) { ent := c.evictList.Back() if ent != nil { c.removeElement(ent) @@ -116,7 +117,7 @@ func (c *LRU) RemoveOldest() (interface{}, interface{}, bool) { } // GetOldest returns the oldest entry -func (c *LRU) GetOldest() (interface{}, interface{}, bool) { +func (c *LRU) GetOldest() (key interface{}, value interface{}, ok bool) { ent := c.evictList.Back() if ent != nil { kv := ent.Value.(*entry) diff --git a/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go new file mode 100644 index 00000000..744cac01 --- /dev/null +++ b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go @@ -0,0 +1,37 @@ +package simplelru + + +// LRUCache is the interface for simple LRU cache. +type LRUCache interface { + // Adds a value to the cache, returns true if an eviction occurred and + // updates the "recently used"-ness of the key. + Add(key, value interface{}) bool + + // Returns key's value from the cache and + // updates the "recently used"-ness of the key. #value, isFound + Get(key interface{}) (value interface{}, ok bool) + + // Check if a key exsists in cache without updating the recent-ness. + Contains(key interface{}) (ok bool) + + // Returns key's value without updating the "recently used"-ness of the key. + Peek(key interface{}) (value interface{}, ok bool) + + // Removes a key from the cache. + Remove(key interface{}) bool + + // Removes the oldest entry from cache. + RemoveOldest() (interface{}, interface{}, bool) + + // Returns the oldest entry from the cache. #key, value, isFound + GetOldest() (interface{}, interface{}, bool) + + // Returns a slice of the keys in the cache, from oldest to newest. + Keys() []interface{} + + // Returns the number of items in the cache. + Len() int + + // Clear all cache entries + Purge() +} diff --git a/vendor/github.com/hashicorp/golang-lru/simplelru/lru_test.go b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_test.go index a958934f..ca5676e1 100644 --- a/vendor/github.com/hashicorp/golang-lru/simplelru/lru_test.go +++ b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_test.go @@ -8,7 +8,7 @@ func TestLRU(t *testing.T) { if k != v { t.Fatalf("Evict values not equal (%v!=%v)", k, v) } - evictCounter += 1 + evictCounter++ } l, err := NewLRU(128, onEvicted) if err != nil { @@ -112,7 +112,7 @@ func TestLRU_GetOldest_RemoveOldest(t *testing.T) { func TestLRU_Add(t *testing.T) { evictCounter := 0 onEvicted := func(k interface{}, v interface{}) { - evictCounter += 1 + evictCounter++ } l, err := NewLRU(1, onEvicted) diff --git a/vendor/github.com/lib/pq/.travis.sh b/vendor/github.com/lib/pq/.travis.sh index ebf44703..a297dc45 100755 --- a/vendor/github.com/lib/pq/.travis.sh +++ b/vendor/github.com/lib/pq/.travis.sh @@ -70,4 +70,17 @@ postgresql_uninstall() { sudo rm -rf /var/lib/postgresql } +megacheck_install() { + # Lock megacheck version at $MEGACHECK_VERSION to prevent spontaneous + # new error messages in old code. + go get -d honnef.co/go/tools/... + git -C $GOPATH/src/honnef.co/go/tools/ checkout $MEGACHECK_VERSION + go install honnef.co/go/tools/cmd/megacheck + megacheck --version +} + +golint_install() { + go get github.com/golang/lint/golint +} + $1 diff --git a/vendor/github.com/lib/pq/.travis.yml b/vendor/github.com/lib/pq/.travis.yml index 1a4656c5..18556e08 100644 --- a/vendor/github.com/lib/pq/.travis.yml +++ b/vendor/github.com/lib/pq/.travis.yml @@ -1,10 +1,9 @@ language: go go: - - 1.5.x - - 1.6.x - - 1.7.x - 1.8.x + - 1.9.x + - 1.10.x - master sudo: true @@ -15,7 +14,9 @@ env: - PQGOSSLTESTS=1 - PQSSLCERTTEST_PATH=$PWD/certs - PGHOST=127.0.0.1 + - MEGACHECK_VERSION=2017.2.2 matrix: + - PGVERSION=10 - PGVERSION=9.6 - PGVERSION=9.5 - PGVERSION=9.4 @@ -30,6 +31,8 @@ before_install: - ./.travis.sh postgresql_install - ./.travis.sh postgresql_configure - ./.travis.sh client_configure + - ./.travis.sh megacheck_install + - ./.travis.sh golint_install - go get golang.org/x/tools/cmd/goimports before_script: @@ -41,5 +44,7 @@ script: - > goimports -d -e $(find -name '*.go') | awk '{ print } END { exit NR == 0 ? 0 : 1 }' - go vet ./... + - megacheck -go 1.8 ./... + - golint ./... - PQTEST_BINARY_PARAMETERS=no go test -race -v ./... - PQTEST_BINARY_PARAMETERS=yes go test -race -v ./... diff --git a/vendor/github.com/lib/pq/README.md b/vendor/github.com/lib/pq/README.md index 7670fc87..781c89ee 100644 --- a/vendor/github.com/lib/pq/README.md +++ b/vendor/github.com/lib/pq/README.md @@ -1,5 +1,6 @@ # pq - A pure Go postgres driver for Go's database/sql package +[![GoDoc](https://godoc.org/github.com/lib/pq?status.svg)](https://godoc.org/github.com/lib/pq) [![Build Status](https://travis-ci.org/lib/pq.svg?branch=master)](https://travis-ci.org/lib/pq) ## Install diff --git a/vendor/github.com/lib/pq/array.go b/vendor/github.com/lib/pq/array.go index e7b2145d..e4933e22 100644 --- a/vendor/github.com/lib/pq/array.go +++ b/vendor/github.com/lib/pq/array.go @@ -13,7 +13,7 @@ import ( var typeByteSlice = reflect.TypeOf([]byte{}) var typeDriverValuer = reflect.TypeOf((*driver.Valuer)(nil)).Elem() -var typeSqlScanner = reflect.TypeOf((*sql.Scanner)(nil)).Elem() +var typeSQLScanner = reflect.TypeOf((*sql.Scanner)(nil)).Elem() // Array returns the optimal driver.Valuer and sql.Scanner for an array or // slice of any dimension. @@ -278,7 +278,7 @@ func (GenericArray) evaluateDestination(rt reflect.Type) (reflect.Type, func([]b // TODO calculate the assign function for other types // TODO repeat this section on the element type of arrays or slices (multidimensional) { - if reflect.PtrTo(rt).Implements(typeSqlScanner) { + if reflect.PtrTo(rt).Implements(typeSQLScanner) { // dest is always addressable because it is an element of a slice. assign = func(src []byte, dest reflect.Value) (err error) { ss := dest.Addr().Interface().(sql.Scanner) @@ -587,7 +587,7 @@ func appendArrayElement(b []byte, rv reflect.Value) ([]byte, string, error) { } } - var del string = "," + var del = "," var err error var iv interface{} = rv.Interface() diff --git a/vendor/github.com/lib/pq/array_test.go b/vendor/github.com/lib/pq/array_test.go index 10b84318..f724bcd8 100644 --- a/vendor/github.com/lib/pq/array_test.go +++ b/vendor/github.com/lib/pq/array_test.go @@ -89,9 +89,7 @@ func TestParseArrayError(t *testing.T) { } func TestArrayScanner(t *testing.T) { - var s sql.Scanner - - s = Array(&[]bool{}) + var s sql.Scanner = Array(&[]bool{}) if _, ok := s.(*BoolArray); !ok { t.Errorf("Expected *BoolArray, got %T", s) } @@ -126,9 +124,7 @@ func TestArrayScanner(t *testing.T) { } func TestArrayValuer(t *testing.T) { - var v driver.Valuer - - v = Array([]bool{}) + var v driver.Valuer = Array([]bool{}) if _, ok := v.(*BoolArray); !ok { t.Errorf("Expected *BoolArray, got %T", v) } @@ -1193,9 +1189,7 @@ func TestGenericArrayValue(t *testing.T) { } func TestGenericArrayValueErrors(t *testing.T) { - var v []interface{} - - v = []interface{}{func() {}} + v := []interface{}{func() {}} if _, err := (GenericArray{v}).Value(); err == nil { t.Errorf("Expected error for %q, got nil", v) } diff --git a/vendor/github.com/lib/pq/bench_test.go b/vendor/github.com/lib/pq/bench_test.go index e71f41d0..33d7a02f 100644 --- a/vendor/github.com/lib/pq/bench_test.go +++ b/vendor/github.com/lib/pq/bench_test.go @@ -5,6 +5,7 @@ package pq import ( "bufio" "bytes" + "context" "database/sql" "database/sql/driver" "io" @@ -156,7 +157,7 @@ func benchMockQuery(b *testing.B, c *conn, query string) { b.Fatal(err) } defer stmt.Close() - rows, err := stmt.Query(nil) + rows, err := stmt.(driver.StmtQueryContext).QueryContext(context.Background(), nil) if err != nil { b.Fatal(err) } @@ -266,7 +267,7 @@ func BenchmarkMockPreparedSelectSeries(b *testing.B) { } func benchPreparedMockQuery(b *testing.B, c *conn, stmt driver.Stmt) { - rows, err := stmt.Query(nil) + rows, err := stmt.(driver.StmtQueryContext).QueryContext(context.Background(), nil) if err != nil { b.Fatal(err) } diff --git a/vendor/github.com/lib/pq/conn.go b/vendor/github.com/lib/pq/conn.go index 3b322bc0..43c8df29 100644 --- a/vendor/github.com/lib/pq/conn.go +++ b/vendor/github.com/lib/pq/conn.go @@ -27,16 +27,20 @@ var ( ErrNotSupported = errors.New("pq: Unsupported command") ErrInFailedTransaction = errors.New("pq: Could not complete operation in a failed transaction") ErrSSLNotSupported = errors.New("pq: SSL is not enabled on the server") - ErrSSLKeyHasWorldPermissions = errors.New("pq: Private key file has group or world access. Permissions should be u=rw (0600) or less.") - ErrCouldNotDetectUsername = errors.New("pq: Could not detect default username. Please provide one explicitly.") + ErrSSLKeyHasWorldPermissions = errors.New("pq: Private key file has group or world access. Permissions should be u=rw (0600) or less") + ErrCouldNotDetectUsername = errors.New("pq: Could not detect default username. Please provide one explicitly") errUnexpectedReady = errors.New("unexpected ReadyForQuery") errNoRowsAffected = errors.New("no RowsAffected available after the empty statement") - errNoLastInsertId = errors.New("no LastInsertId available after the empty statement") + errNoLastInsertID = errors.New("no LastInsertId available after the empty statement") ) +// Driver is the Postgres database driver. type Driver struct{} +// Open opens a new connection to the database. name is a connection string. +// Most users should only use it through database/sql package from the standard +// library. func (d *Driver) Open(name string) (driver.Conn, error) { return Open(name) } @@ -78,6 +82,8 @@ func (s transactionStatus) String() string { panic("not reached") } +// Dialer is the dialer interface. It can be used to obtain more control over +// how pq creates network connections. type Dialer interface { Dial(network, address string) (net.Conn, error) DialTimeout(network, address string, timeout time.Duration) (net.Conn, error) @@ -131,7 +137,7 @@ type conn struct { } // Handle driver-side settings in parsed connection string. -func (c *conn) handleDriverSettings(o values) (err error) { +func (cn *conn) handleDriverSettings(o values) (err error) { boolSetting := func(key string, val *bool) error { if value, ok := o[key]; ok { if value == "yes" { @@ -145,18 +151,14 @@ func (c *conn) handleDriverSettings(o values) (err error) { return nil } - err = boolSetting("disable_prepared_binary_result", &c.disablePreparedBinaryResult) + err = boolSetting("disable_prepared_binary_result", &cn.disablePreparedBinaryResult) if err != nil { return err } - err = boolSetting("binary_parameters", &c.binaryParameters) - if err != nil { - return err - } - return nil + return boolSetting("binary_parameters", &cn.binaryParameters) } -func (c *conn) handlePgpass(o values) { +func (cn *conn) handlePgpass(o values) { // if a password was supplied, do not process .pgpass if _, ok := o["password"]; ok { return @@ -165,11 +167,16 @@ func (c *conn) handlePgpass(o values) { if filename == "" { // XXX this code doesn't work on Windows where the default filename is // XXX %APPDATA%\postgresql\pgpass.conf - user, err := user.Current() - if err != nil { - return + // Prefer $HOME over user.Current due to glibc bug: golang.org/issue/13470 + userHome := os.Getenv("HOME") + if userHome == "" { + user, err := user.Current() + if err != nil { + return + } + userHome = user.HomeDir } - filename = filepath.Join(user.HomeDir, ".pgpass") + filename = filepath.Join(userHome, ".pgpass") } fileinfo, err := os.Stat(filename) if err != nil { @@ -229,18 +236,22 @@ func (c *conn) handlePgpass(o values) { } } -func (c *conn) writeBuf(b byte) *writeBuf { - c.scratch[0] = b +func (cn *conn) writeBuf(b byte) *writeBuf { + cn.scratch[0] = b return &writeBuf{ - buf: c.scratch[:5], + buf: cn.scratch[:5], pos: 1, } } +// Open opens a new connection to the database. name is a connection string. +// Most users should only use it through database/sql package from the standard +// library. func Open(name string) (_ driver.Conn, err error) { return DialOpen(defaultDialer{}, name) } +// DialOpen opens a new connection to the database using a dialer. func DialOpen(d Dialer, name string) (_ driver.Conn, err error) { // Handle any panics during connection initialization. Note that we // specifically do *not* want to use errRecover(), as that would turn any @@ -310,9 +321,8 @@ func DialOpen(d Dialer, name string) (_ driver.Conn, err error) { u, err := userCurrent() if err != nil { return nil, err - } else { - o["user"] = u } + o["user"] = u } cn := &conn{ @@ -329,7 +339,20 @@ func DialOpen(d Dialer, name string) (_ driver.Conn, err error) { if err != nil { return nil, err } - cn.ssl(o) + + err = cn.ssl(o) + if err != nil { + return nil, err + } + + // cn.startup panics on error. Make sure we don't leak cn.c. + panicking := true + defer func() { + if panicking { + cn.c.Close() + } + }() + cn.buf = bufio.NewReader(cn.c) cn.startup(o) @@ -337,6 +360,7 @@ func DialOpen(d Dialer, name string) (_ driver.Conn, err error) { if timeout, ok := o["connect_timeout"]; ok && timeout != "0" { err = cn.c.SetDeadline(time.Time{}) } + panicking = false return cn, err } @@ -698,7 +722,7 @@ var emptyRows noRows var _ driver.Result = noRows{} func (noRows) LastInsertId() (int64, error) { - return 0, errNoLastInsertId + return 0, errNoLastInsertID } func (noRows) RowsAffected() (int64, error) { @@ -707,7 +731,7 @@ func (noRows) RowsAffected() (int64, error) { // Decides which column formats to use for a prepared statement. The input is // an array of type oids, one element per result column. -func decideColumnFormats(colTyps []oid.Oid, forceText bool) (colFmts []format, colFmtData []byte) { +func decideColumnFormats(colTyps []fieldDesc, forceText bool) (colFmts []format, colFmtData []byte) { if len(colTyps) == 0 { return nil, colFmtDataAllText } @@ -719,8 +743,8 @@ func decideColumnFormats(colTyps []oid.Oid, forceText bool) (colFmts []format, c allBinary := true allText := true - for i, o := range colTyps { - switch o { + for i, t := range colTyps { + switch t.OID { // This is the list of types to use binary mode for when receiving them // through a prepared statement. If a type appears in this list, it // must also be implemented in binaryDecode in encode.go. @@ -840,16 +864,15 @@ func (cn *conn) query(query string, args []driver.Value) (_ *rows, err error) { rows.colNames, rows.colFmts, rows.colTyps = cn.readPortalDescribeResponse() cn.postExecuteWorkaround() return rows, nil - } else { - st := cn.prepareTo(query, "") - st.exec(args) - return &rows{ - cn: cn, - colNames: st.colNames, - colTyps: st.colTyps, - colFmts: st.colFmts, - }, nil } + st := cn.prepareTo(query, "") + st.exec(args) + return &rows{ + cn: cn, + colNames: st.colNames, + colTyps: st.colTyps, + colFmts: st.colFmts, + }, nil } // Implement the optional "Execer" interface for one-shot queries @@ -876,17 +899,16 @@ func (cn *conn) Exec(query string, args []driver.Value) (res driver.Result, err cn.postExecuteWorkaround() res, _, err = cn.readExecuteResponse("Execute") return res, err - } else { - // Use the unnamed statement to defer planning until bind - // time, or else value-based selectivity estimates cannot be - // used. - st := cn.prepareTo(query, "") - r, err := st.Exec(args) - if err != nil { - panic(err) - } - return r, err } + // Use the unnamed statement to defer planning until bind + // time, or else value-based selectivity estimates cannot be + // used. + st := cn.prepareTo(query, "") + r, err := st.Exec(args) + if err != nil { + panic(err) + } + return r, err } func (cn *conn) send(m *writeBuf) { @@ -1011,30 +1033,35 @@ func (cn *conn) recv1() (t byte, r *readBuf) { return t, r } -func (cn *conn) ssl(o values) { - upgrade := ssl(o) +func (cn *conn) ssl(o values) error { + upgrade, err := ssl(o) + if err != nil { + return err + } + if upgrade == nil { // Nothing to do - return + return nil } w := cn.writeBuf(0) w.int32(80877103) - if err := cn.sendStartupPacket(w); err != nil { - panic(err) + if err = cn.sendStartupPacket(w); err != nil { + return err } b := cn.scratch[:1] - _, err := io.ReadFull(cn.c, b) + _, err = io.ReadFull(cn.c, b) if err != nil { - panic(err) + return err } if b[0] != 'S' { - panic(ErrSSLNotSupported) + return ErrSSLNotSupported } - cn.c = upgrade(cn.c) + cn.c, err = upgrade(cn.c) + return err } // isDriverSetting returns true iff a setting is purely for configuring the @@ -1147,10 +1174,10 @@ const formatText format = 0 const formatBinary format = 1 // One result-column format code with the value 1 (i.e. all binary). -var colFmtDataAllBinary []byte = []byte{0, 1, 0, 1} +var colFmtDataAllBinary = []byte{0, 1, 0, 1} // No result-column format codes (i.e. all text). -var colFmtDataAllText []byte = []byte{0, 0} +var colFmtDataAllText = []byte{0, 0} type stmt struct { cn *conn @@ -1158,7 +1185,7 @@ type stmt struct { colNames []string colFmts []format colFmtData []byte - colTyps []oid.Oid + colTyps []fieldDesc paramTyps []oid.Oid closed bool } @@ -1321,7 +1348,7 @@ type rows struct { cn *conn finish func() colNames []string - colTyps []oid.Oid + colTyps []fieldDesc colFmts []format done bool rb readBuf @@ -1339,7 +1366,12 @@ func (rs *rows) Close() error { switch err { case nil: case io.EOF: - return nil + // rs.Next can return io.EOF on both 'Z' (ready for query) and 'T' (row + // description, used with HasNextResultSet). We need to fetch messages until + // we hit a 'Z', which is done by waiting for done to be set. + if rs.done { + return nil + } default: return err } @@ -1404,7 +1436,7 @@ func (rs *rows) Next(dest []driver.Value) (err error) { dest[i] = nil continue } - dest[i] = decode(&conn.parameterStatus, rs.rb.next(l), rs.colTyps[i], rs.colFmts[i]) + dest[i] = decode(&conn.parameterStatus, rs.rb.next(l), rs.colTyps[i].OID, rs.colFmts[i]) } return case 'T': @@ -1429,7 +1461,8 @@ func (rs *rows) NextResultSet() error { // // tblname := "my_table" // data := "my_data" -// err = db.Exec(fmt.Sprintf("INSERT INTO %s VALUES ($1)", pq.QuoteIdentifier(tblname)), data) +// quoted := pq.QuoteIdentifier(tblname) +// err := db.Exec(fmt.Sprintf("INSERT INTO %s VALUES ($1)", quoted), data) // // Any double quotes in name will be escaped. The quoted identifier will be // case sensitive when used in a query. If the input string contains a zero @@ -1510,7 +1543,7 @@ func (cn *conn) sendBinaryModeQuery(query string, args []driver.Value) { cn.send(b) } -func (c *conn) processParameterStatus(r *readBuf) { +func (cn *conn) processParameterStatus(r *readBuf) { var err error param := r.string() @@ -1521,13 +1554,13 @@ func (c *conn) processParameterStatus(r *readBuf) { var minor int _, err = fmt.Sscanf(r.string(), "%d.%d.%d", &major1, &major2, &minor) if err == nil { - c.parameterStatus.serverVersion = major1*10000 + major2*100 + minor + cn.parameterStatus.serverVersion = major1*10000 + major2*100 + minor } case "TimeZone": - c.parameterStatus.currentLocation, err = time.LoadLocation(r.string()) + cn.parameterStatus.currentLocation, err = time.LoadLocation(r.string()) if err != nil { - c.parameterStatus.currentLocation = nil + cn.parameterStatus.currentLocation = nil } default: @@ -1535,8 +1568,8 @@ func (c *conn) processParameterStatus(r *readBuf) { } } -func (c *conn) processReadyForQuery(r *readBuf) { - c.txnStatus = transactionStatus(r.byte()) +func (cn *conn) processReadyForQuery(r *readBuf) { + cn.txnStatus = transactionStatus(r.byte()) } func (cn *conn) readReadyForQuery() { @@ -1551,9 +1584,9 @@ func (cn *conn) readReadyForQuery() { } } -func (c *conn) processBackendKeyData(r *readBuf) { - c.processID = r.int32() - c.secretKey = r.int32() +func (cn *conn) processBackendKeyData(r *readBuf) { + cn.processID = r.int32() + cn.secretKey = r.int32() } func (cn *conn) readParseResponse() { @@ -1571,7 +1604,7 @@ func (cn *conn) readParseResponse() { } } -func (cn *conn) readStatementDescribeResponse() (paramTyps []oid.Oid, colNames []string, colTyps []oid.Oid) { +func (cn *conn) readStatementDescribeResponse() (paramTyps []oid.Oid, colNames []string, colTyps []fieldDesc) { for { t, r := cn.recv1() switch t { @@ -1597,7 +1630,7 @@ func (cn *conn) readStatementDescribeResponse() (paramTyps []oid.Oid, colNames [ } } -func (cn *conn) readPortalDescribeResponse() (colNames []string, colFmts []format, colTyps []oid.Oid) { +func (cn *conn) readPortalDescribeResponse() (colNames []string, colFmts []format, colTyps []fieldDesc) { t, r := cn.recv1() switch t { case 'T': @@ -1693,31 +1726,33 @@ func (cn *conn) readExecuteResponse(protocolState string) (res driver.Result, co } } -func parseStatementRowDescribe(r *readBuf) (colNames []string, colTyps []oid.Oid) { +func parseStatementRowDescribe(r *readBuf) (colNames []string, colTyps []fieldDesc) { n := r.int16() colNames = make([]string, n) - colTyps = make([]oid.Oid, n) + colTyps = make([]fieldDesc, n) for i := range colNames { colNames[i] = r.string() r.next(6) - colTyps[i] = r.oid() - r.next(6) + colTyps[i].OID = r.oid() + colTyps[i].Len = r.int16() + colTyps[i].Mod = r.int32() // format code not known when describing a statement; always 0 r.next(2) } return } -func parsePortalRowDescribe(r *readBuf) (colNames []string, colFmts []format, colTyps []oid.Oid) { +func parsePortalRowDescribe(r *readBuf) (colNames []string, colFmts []format, colTyps []fieldDesc) { n := r.int16() colNames = make([]string, n) colFmts = make([]format, n) - colTyps = make([]oid.Oid, n) + colTyps = make([]fieldDesc, n) for i := range colNames { colNames[i] = r.string() r.next(6) - colTyps[i] = r.oid() - r.next(6) + colTyps[i].OID = r.oid() + colTyps[i].Len = r.int16() + colTyps[i].Mod = r.int32() colFmts[i] = format(r.int16()) } return diff --git a/vendor/github.com/lib/pq/conn_go18.go b/vendor/github.com/lib/pq/conn_go18.go index ab97a104..a5254f2b 100644 --- a/vendor/github.com/lib/pq/conn_go18.go +++ b/vendor/github.com/lib/pq/conn_go18.go @@ -108,7 +108,10 @@ func (cn *conn) cancel() error { can := conn{ c: c, } - can.ssl(cn.opts) + err = can.ssl(cn.opts) + if err != nil { + return err + } w := can.writeBuf(0) w.int32(80877102) // cancel request code diff --git a/vendor/github.com/lib/pq/conn_test.go b/vendor/github.com/lib/pq/conn_test.go index 8c6187fc..e654b85b 100644 --- a/vendor/github.com/lib/pq/conn_test.go +++ b/vendor/github.com/lib/pq/conn_test.go @@ -1,6 +1,7 @@ package pq import ( + "context" "database/sql" "database/sql/driver" "fmt" @@ -28,7 +29,7 @@ func forceBinaryParameters() bool { } } -func openTestConnConninfo(conninfo string) (*sql.DB, error) { +func testConninfo(conninfo string) string { defaultTo := func(envvar string, value string) { if os.Getenv(envvar) == "" { os.Setenv(envvar, value) @@ -43,8 +44,11 @@ func openTestConnConninfo(conninfo string) (*sql.DB, error) { !strings.HasPrefix(conninfo, "postgresql://") { conninfo = conninfo + " binary_parameters=yes" } + return conninfo +} - return sql.Open("postgres", conninfo) +func openTestConnConninfo(conninfo string) (*sql.DB, error) { + return sql.Open("postgres", testConninfo(conninfo)) } func openTestConn(t Fatalistic) *sql.DB { @@ -136,7 +140,7 @@ func TestOpenURL(t *testing.T) { testURL("postgresql://") } -const pgpass_file = "/tmp/pqgotest_pgpass" +const pgpassFile = "/tmp/pqgotest_pgpass" func TestPgpass(t *testing.T) { if os.Getenv("TRAVIS") != "true" { @@ -172,10 +176,10 @@ func TestPgpass(t *testing.T) { txn.Rollback() } testAssert("", "ok", "missing .pgpass, unexpected error %#v") - os.Setenv("PGPASSFILE", pgpass_file) + os.Setenv("PGPASSFILE", pgpassFile) testAssert("host=/tmp", "fail", ", unexpected error %#v") - os.Remove(pgpass_file) - pgpass, err := os.OpenFile(pgpass_file, os.O_RDWR|os.O_CREATE, 0644) + os.Remove(pgpassFile) + pgpass, err := os.OpenFile(pgpassFile, os.O_RDWR|os.O_CREATE, 0644) if err != nil { t.Fatalf("Unexpected error writing pgpass file %#v", err) } @@ -213,7 +217,7 @@ localhost:*:*:*:pass_C // wrong permissions for the pgpass file means it should be ignored assertPassword(values{"host": "example.com", "user": "foo"}, "") // fix the permissions and check if it has taken effect - os.Chmod(pgpass_file, 0600) + os.Chmod(pgpassFile, 0600) assertPassword(values{"host": "server", "dbname": "some_db", "user": "some_user"}, "pass_A") assertPassword(values{"host": "example.com", "user": "foo"}, "pass_fallback") assertPassword(values{"host": "example.com", "dbname": "some_db", "user": "some_user"}, "pass_B") @@ -221,7 +225,7 @@ localhost:*:*:*:pass_C assertPassword(values{"host": "", "user": "some_user"}, "pass_C") assertPassword(values{"host": "/tmp", "user": "some_user"}, "pass_C") // cleanup - os.Remove(pgpass_file) + os.Remove(pgpassFile) os.Setenv("PGPASSFILE", "") } @@ -393,8 +397,8 @@ func TestEmptyQuery(t *testing.T) { if _, err := res.RowsAffected(); err != errNoRowsAffected { t.Fatalf("expected %s, got %v", errNoRowsAffected, err) } - if _, err := res.LastInsertId(); err != errNoLastInsertId { - t.Fatalf("expected %s, got %v", errNoLastInsertId, err) + if _, err := res.LastInsertId(); err != errNoLastInsertID { + t.Fatalf("expected %s, got %v", errNoLastInsertID, err) } rows, err := db.Query("") if err != nil { @@ -425,8 +429,8 @@ func TestEmptyQuery(t *testing.T) { if _, err := res.RowsAffected(); err != errNoRowsAffected { t.Fatalf("expected %s, got %v", errNoRowsAffected, err) } - if _, err := res.LastInsertId(); err != errNoLastInsertId { - t.Fatalf("expected %s, got %v", errNoLastInsertId, err) + if _, err := res.LastInsertId(); err != errNoLastInsertID { + t.Fatalf("expected %s, got %v", errNoLastInsertID, err) } rows, err = stmt.Query() if err != nil { @@ -637,6 +641,57 @@ func TestErrorDuringStartup(t *testing.T) { } } +type testConn struct { + closed bool + net.Conn +} + +func (c *testConn) Close() error { + c.closed = true + return c.Conn.Close() +} + +type testDialer struct { + conns []*testConn +} + +func (d *testDialer) Dial(ntw, addr string) (net.Conn, error) { + c, err := net.Dial(ntw, addr) + if err != nil { + return nil, err + } + tc := &testConn{Conn: c} + d.conns = append(d.conns, tc) + return tc, nil +} + +func (d *testDialer) DialTimeout(ntw, addr string, timeout time.Duration) (net.Conn, error) { + c, err := net.DialTimeout(ntw, addr, timeout) + if err != nil { + return nil, err + } + tc := &testConn{Conn: c} + d.conns = append(d.conns, tc) + return tc, nil +} + +func TestErrorDuringStartupClosesConn(t *testing.T) { + // Don't use the normal connection setup, this is intended to + // blow up in the startup packet from a non-existent user. + var d testDialer + c, err := DialOpen(&d, testConninfo("user=thisuserreallydoesntexist")) + if err == nil { + c.Close() + t.Fatal("expected dial error") + } + if len(d.conns) != 1 { + t.Fatalf("got len(d.conns) = %d, want = %d", len(d.conns), 1) + } + if !d.conns[0].closed { + t.Error("connection leaked") + } +} + func TestBadConn(t *testing.T) { var err error @@ -935,12 +990,14 @@ func TestParseErrorInExtendedQuery(t *testing.T) { db := openTestConn(t) defer db.Close() - rows, err := db.Query("PARSE_ERROR $1", 1) - if err == nil { - t.Fatal("expected error") + _, err := db.Query("PARSE_ERROR $1", 1) + pqErr, _ := err.(*Error) + // Expecting a syntax error. + if err == nil || pqErr == nil || pqErr.Code != "42601" { + t.Fatalf("expected syntax error, got %s", err) } - rows, err = db.Query("SELECT 1") + rows, err := db.Query("SELECT 1") if err != nil { t.Fatal(err) } @@ -1053,16 +1110,16 @@ func TestIssue282(t *testing.T) { db := openTestConn(t) defer db.Close() - var search_path string + var searchPath string err := db.QueryRow(` SET LOCAL search_path TO pg_catalog; SET LOCAL search_path TO pg_catalog; - SHOW search_path`).Scan(&search_path) + SHOW search_path`).Scan(&searchPath) if err != nil { t.Fatal(err) } - if search_path != "pg_catalog" { - t.Fatalf("unexpected search_path %s", search_path) + if searchPath != "pg_catalog" { + t.Fatalf("unexpected search_path %s", searchPath) } } @@ -1205,16 +1262,11 @@ func TestParseComplete(t *testing.T) { tpc("SELECT foo", "", 0, true) // invalid row count } -func TestExecerInterface(t *testing.T) { - // Gin up a straw man private struct just for the type check - cn := &conn{c: nil} - var cni interface{} = cn - - _, ok := cni.(driver.Execer) - if !ok { - t.Fatal("Driver doesn't implement Execer") - } -} +// Test interface conformance. +var ( + _ driver.ExecerContext = (*conn)(nil) + _ driver.QueryerContext = (*conn)(nil) +) func TestNullAfterNonNull(t *testing.T) { db := openTestConn(t) @@ -1392,36 +1444,29 @@ func TestParseOpts(t *testing.T) { } func TestRuntimeParameters(t *testing.T) { - type RuntimeTestResult int - const ( - ResultUnknown RuntimeTestResult = iota - ResultSuccess - ResultError // other error - ) - tests := []struct { - conninfo string - param string - expected string - expectedOutcome RuntimeTestResult + conninfo string + param string + expected string + success bool }{ // invalid parameter - {"DOESNOTEXIST=foo", "", "", ResultError}, + {"DOESNOTEXIST=foo", "", "", false}, // we can only work with a specific value for these two - {"client_encoding=SQL_ASCII", "", "", ResultError}, - {"datestyle='ISO, YDM'", "", "", ResultError}, + {"client_encoding=SQL_ASCII", "", "", false}, + {"datestyle='ISO, YDM'", "", "", false}, // "options" should work exactly as it does in libpq - {"options='-c search_path=pqgotest'", "search_path", "pqgotest", ResultSuccess}, + {"options='-c search_path=pqgotest'", "search_path", "pqgotest", true}, // pq should override client_encoding in this case - {"options='-c client_encoding=SQL_ASCII'", "client_encoding", "UTF8", ResultSuccess}, + {"options='-c client_encoding=SQL_ASCII'", "client_encoding", "UTF8", true}, // allow client_encoding to be set explicitly - {"client_encoding=UTF8", "client_encoding", "UTF8", ResultSuccess}, + {"client_encoding=UTF8", "client_encoding", "UTF8", true}, // test a runtime parameter not supported by libpq - {"work_mem='139kB'", "work_mem", "139kB", ResultSuccess}, + {"work_mem='139kB'", "work_mem", "139kB", true}, // test fallback_application_name - {"application_name=foo fallback_application_name=bar", "application_name", "foo", ResultSuccess}, - {"application_name='' fallback_application_name=bar", "application_name", "", ResultSuccess}, - {"fallback_application_name=bar", "application_name", "bar", ResultSuccess}, + {"application_name=foo fallback_application_name=bar", "application_name", "foo", true}, + {"application_name='' fallback_application_name=bar", "application_name", "", true}, + {"fallback_application_name=bar", "application_name", "bar", true}, } for _, test := range tests { @@ -1436,23 +1481,23 @@ func TestRuntimeParameters(t *testing.T) { continue } - tryGetParameterValue := func() (value string, outcome RuntimeTestResult) { + tryGetParameterValue := func() (value string, success bool) { defer db.Close() row := db.QueryRow("SELECT current_setting($1)", test.param) err = row.Scan(&value) if err != nil { - return "", ResultError + return "", false } - return value, ResultSuccess + return value, true } - value, outcome := tryGetParameterValue() - if outcome != test.expectedOutcome && outcome == ResultError { + value, success := tryGetParameterValue() + if success != test.success && !test.success { t.Fatalf("%v: unexpected error: %v", test.conninfo, err) } - if outcome != test.expectedOutcome { + if success != test.success { t.Fatalf("unexpected outcome %v (was expecting %v) for conninfo \"%s\"", - outcome, test.expectedOutcome, test.conninfo) + success, test.success, test.conninfo) } if value != test.expected { t.Fatalf("bad value for %s: got %s, want %s with conninfo \"%s\"", @@ -1565,10 +1610,10 @@ func TestRowsResultTag(t *testing.T) { t.Fatal(err) } defer conn.Close() - q := conn.(driver.Queryer) + q := conn.(driver.QueryerContext) for _, test := range tests { - if rows, err := q.Query(test.query, nil); err != nil { + if rows, err := q.QueryContext(context.Background(), test.query, nil); err != nil { t.Fatalf("%s: %s", test.query, err) } else { r := rows.(ResultTag) @@ -1583,3 +1628,32 @@ func TestRowsResultTag(t *testing.T) { } } } + +// TestQuickClose tests that closing a query early allows a subsequent query to work. +func TestQuickClose(t *testing.T) { + db := openTestConn(t) + defer db.Close() + + tx, err := db.Begin() + if err != nil { + t.Fatal(err) + } + rows, err := tx.Query("SELECT 1; SELECT 2;") + if err != nil { + t.Fatal(err) + } + if err := rows.Close(); err != nil { + t.Fatal(err) + } + + var id int + if err := tx.QueryRow("SELECT 3").Scan(&id); err != nil { + t.Fatal(err) + } + if id != 3 { + t.Fatalf("unexpected %d", id) + } + if err := tx.Commit(); err != nil { + t.Fatal(err) + } +} diff --git a/vendor/github.com/lib/pq/copy_test.go b/vendor/github.com/lib/pq/copy_test.go index 86745b38..a888a894 100644 --- a/vendor/github.com/lib/pq/copy_test.go +++ b/vendor/github.com/lib/pq/copy_test.go @@ -4,13 +4,13 @@ import ( "bytes" "database/sql" "database/sql/driver" + "net" "strings" "testing" ) func TestCopyInStmt(t *testing.T) { - var stmt string - stmt = CopyIn("table name") + stmt := CopyIn("table name") if stmt != `COPY "table name" () FROM STDIN` { t.Fatal(stmt) } @@ -27,8 +27,7 @@ func TestCopyInStmt(t *testing.T) { } func TestCopyInSchemaStmt(t *testing.T) { - var stmt string - stmt = CopyInSchema("schema name", "table name") + stmt := CopyInSchema("schema name", "table name") if stmt != `COPY "schema name"."table name" () FROM STDIN` { t.Fatal(stmt) } @@ -226,7 +225,7 @@ func TestCopyInTypes(t *testing.T) { if text != "Héllö\n ☃!\r\t\\" { t.Fatal("unexpected result", text) } - if bytes.Compare(blob, []byte{0, 255, 9, 10, 13}) != 0 { + if !bytes.Equal(blob, []byte{0, 255, 9, 10, 13}) { t.Fatal("unexpected result", blob) } if nothing.Valid { @@ -402,15 +401,19 @@ func TestCopyRespLoopConnectionError(t *testing.T) { if err == nil { t.Fatalf("expected error") } - pge, ok := err.(*Error) - if !ok { + switch pge := err.(type) { + case *Error: + if pge.Code.Name() != "admin_shutdown" { + t.Fatalf("expected admin_shutdown, got %s", pge.Code.Name()) + } + case *net.OpError: + // ignore + default: if err == driver.ErrBadConn { // likely an EPIPE } else { - t.Fatalf("expected *pq.Error or driver.ErrBadConn, got %+#v", err) + t.Fatalf("unexpected error, got %+#v", err) } - } else if pge.Code.Name() != "admin_shutdown" { - t.Fatalf("expected admin_shutdown, got %s", pge.Code.Name()) } _ = stmt.Close() diff --git a/vendor/github.com/lib/pq/doc.go b/vendor/github.com/lib/pq/doc.go index 6d252ece..a1b02971 100644 --- a/vendor/github.com/lib/pq/doc.go +++ b/vendor/github.com/lib/pq/doc.go @@ -11,7 +11,8 @@ using this package directly. For example: ) func main() { - db, err := sql.Open("postgres", "user=pqgotest dbname=pqgotest sslmode=verify-full") + connStr := "user=pqgotest dbname=pqgotest sslmode=verify-full" + db, err := sql.Open("postgres", connStr) if err != nil { log.Fatal(err) } @@ -23,7 +24,8 @@ using this package directly. For example: You can also connect to a database using a URL. For example: - db, err := sql.Open("postgres", "postgres://pqgotest:password@localhost/pqgotest?sslmode=verify-full") + connStr := "postgres://pqgotest:password@localhost/pqgotest?sslmode=verify-full" + db, err := sql.Open("postgres", connStr) Connection String Parameters @@ -43,21 +45,28 @@ supported: * dbname - The name of the database to connect to * user - The user to sign in as * password - The user's password - * host - The host to connect to. Values that start with / are for unix domain sockets. (default is localhost) + * host - The host to connect to. Values that start with / are for unix + domain sockets. (default is localhost) * port - The port to bind to. (default is 5432) - * sslmode - Whether or not to use SSL (default is require, this is not the default for libpq) + * sslmode - Whether or not to use SSL (default is require, this is not + the default for libpq) * fallback_application_name - An application_name to fall back to if one isn't provided. - * connect_timeout - Maximum wait for connection, in seconds. Zero or not specified means wait indefinitely. + * connect_timeout - Maximum wait for connection, in seconds. Zero or + not specified means wait indefinitely. * sslcert - Cert file location. The file must contain PEM encoded data. * sslkey - Key file location. The file must contain PEM encoded data. - * sslrootcert - The location of the root certificate file. The file must contain PEM encoded data. + * sslrootcert - The location of the root certificate file. The file + must contain PEM encoded data. Valid values for sslmode are: * disable - No SSL * require - Always SSL (skip verification) - * verify-ca - Always SSL (verify that the certificate presented by the server was signed by a trusted CA) - * verify-full - Always SSL (verify that the certification presented by the server was signed by a trusted CA and the server host name matches the one in the certificate) + * verify-ca - Always SSL (verify that the certificate presented by the + server was signed by a trusted CA) + * verify-full - Always SSL (verify that the certification presented by + the server was signed by a trusted CA and the server host name + matches the one in the certificate) See http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING for more information about connection string parameters. @@ -68,7 +77,7 @@ Use single quotes for values that contain whitespace: A backslash will escape the next character in values: - "user=space\ man password='it\'s valid' + "user=space\ man password='it\'s valid'" Note that the connection parameter client_encoding (which sets the text encoding for the connection) may be set but must be "UTF8", @@ -129,7 +138,8 @@ This package returns the following types for values from the PostgreSQL backend: - integer types smallint, integer, and bigint are returned as int64 - floating-point types real and double precision are returned as float64 - character types char, varchar, and text are returned as string - - temporal types date, time, timetz, timestamp, and timestamptz are returned as time.Time + - temporal types date, time, timetz, timestamp, and timestamptz are + returned as time.Time - the boolean type is returned as bool - the bytea type is returned as []byte @@ -229,7 +239,7 @@ for more information). Note that the channel name will be truncated to 63 bytes by the PostgreSQL server. You can find a complete, working example of Listener usage at -http://godoc.org/github.com/lib/pq/listen_example. +http://godoc.org/github.com/lib/pq/example/listen. */ package pq diff --git a/vendor/github.com/lib/pq/encode.go b/vendor/github.com/lib/pq/encode.go index 88a322cd..3b0d365f 100644 --- a/vendor/github.com/lib/pq/encode.go +++ b/vendor/github.com/lib/pq/encode.go @@ -367,8 +367,15 @@ func ParseTimestamp(currentLocation *time.Location, str string) (time.Time, erro timeSep := daySep + 3 day := p.mustAtoi(str, daySep+1, timeSep) + minLen := monSep + len("01-01") + 1 + + isBC := strings.HasSuffix(str, " BC") + if isBC { + minLen += 3 + } + var hour, minute, second int - if len(str) > monSep+len("01-01")+1 { + if len(str) > minLen { p.expect(str, ' ', timeSep) minSep := timeSep + 3 p.expect(str, ':', minSep) @@ -424,7 +431,8 @@ func ParseTimestamp(currentLocation *time.Location, str string) (time.Time, erro tzOff = tzSign * ((tzHours * 60 * 60) + (tzMin * 60) + tzSec) } var isoYear int - if remainderIdx+3 <= len(str) && str[remainderIdx:remainderIdx+3] == " BC" { + + if isBC { isoYear = 1 - year remainderIdx += 3 } else { diff --git a/vendor/github.com/lib/pq/encode_test.go b/vendor/github.com/lib/pq/encode_test.go index b1531ec2..d58798a4 100644 --- a/vendor/github.com/lib/pq/encode_test.go +++ b/vendor/github.com/lib/pq/encode_test.go @@ -4,7 +4,7 @@ import ( "bytes" "database/sql" "fmt" - "strings" + "regexp" "testing" "time" @@ -37,6 +37,8 @@ var timeTests = []struct { }{ {"22001-02-03", time.Date(22001, time.February, 3, 0, 0, 0, 0, time.FixedZone("", 0))}, {"2001-02-03", time.Date(2001, time.February, 3, 0, 0, 0, 0, time.FixedZone("", 0))}, + {"0001-12-31 BC", time.Date(0, time.December, 31, 0, 0, 0, 0, time.FixedZone("", 0))}, + {"2001-02-03 BC", time.Date(-2000, time.February, 3, 0, 0, 0, 0, time.FixedZone("", 0))}, {"2001-02-03 04:05:06", time.Date(2001, time.February, 3, 4, 5, 6, 0, time.FixedZone("", 0))}, {"2001-02-03 04:05:06.000001", time.Date(2001, time.February, 3, 4, 5, 6, 1000, time.FixedZone("", 0))}, {"2001-02-03 04:05:06.00001", time.Date(2001, time.February, 3, 4, 5, 6, 10000, time.FixedZone("", 0))}, @@ -86,15 +88,22 @@ func TestParseTs(t *testing.T) { } var timeErrorTests = []string{ + "BC", + " BC", "2001", "2001-2-03", "2001-02-3", "2001-02-03 ", + "2001-02-03 B", "2001-02-03 04", "2001-02-03 04:", "2001-02-03 04:05", + "2001-02-03 04:05 B", + "2001-02-03 04:05 BC", "2001-02-03 04:05:", "2001-02-03 04:05:6", + "2001-02-03 04:05:06 B", + "2001-02-03 04:05:06BC", "2001-02-03 04:05:06.123 B", } @@ -258,9 +267,7 @@ func TestTimestampWithOutTimezone(t *testing.T) { t.Fatalf("Could not run query: %v", err) } - n := r.Next() - - if n != true { + if !r.Next() { t.Fatal("Expected at least one row") } @@ -280,8 +287,7 @@ func TestTimestampWithOutTimezone(t *testing.T) { expected, result) } - n = r.Next() - if n != false { + if r.Next() { t.Fatal("Expected only one row") } } @@ -298,24 +304,27 @@ func TestInfinityTimestamp(t *testing.T) { var err error var resultT time.Time - expectedErrorStrPrefix := `sql: Scan error on column index 0: unsupported` + expectedErrorStrRegexp := regexp.MustCompile( + `^sql: Scan error on column index 0(, name "timestamp(tz)?"|): unsupported`) + type testCases []struct { - Query string - Param string - ExpectedErrStrPrefix string - ExpectedVal interface{} + Query string + Param string + ExpectedErrorStrRegexp *regexp.Regexp + ExpectedVal interface{} } tc := testCases{ - {"SELECT $1::timestamp", "-infinity", expectedErrorStrPrefix, "-infinity"}, - {"SELECT $1::timestamptz", "-infinity", expectedErrorStrPrefix, "-infinity"}, - {"SELECT $1::timestamp", "infinity", expectedErrorStrPrefix, "infinity"}, - {"SELECT $1::timestamptz", "infinity", expectedErrorStrPrefix, "infinity"}, + {"SELECT $1::timestamp", "-infinity", expectedErrorStrRegexp, "-infinity"}, + {"SELECT $1::timestamptz", "-infinity", expectedErrorStrRegexp, "-infinity"}, + {"SELECT $1::timestamp", "infinity", expectedErrorStrRegexp, "infinity"}, + {"SELECT $1::timestamptz", "infinity", expectedErrorStrRegexp, "infinity"}, } // try to assert []byte to time.Time for _, q := range tc { err = db.QueryRow(q.Query, q.Param).Scan(&resultT) - if !strings.HasPrefix(err.Error(), q.ExpectedErrStrPrefix) { - t.Errorf("Scanning -/+infinity, expected error to have prefix %q, got %q", q.ExpectedErrStrPrefix, err) + if !q.ExpectedErrorStrRegexp.MatchString(err.Error()) { + t.Errorf("Scanning -/+infinity, expected error to match regexp %q, got %q", + q.ExpectedErrorStrRegexp, err) } } // yield []byte @@ -370,17 +379,17 @@ func TestInfinityTimestamp(t *testing.T) { t.Errorf("Scanning -infinity, expected time %q, got %q", y1500, resultT.String()) } - y_1500 := time.Date(-1500, time.January, 1, 0, 0, 0, 0, time.UTC) + ym1500 := time.Date(-1500, time.January, 1, 0, 0, 0, 0, time.UTC) y11500 := time.Date(11500, time.January, 1, 0, 0, 0, 0, time.UTC) var s string - err = db.QueryRow("SELECT $1::timestamp::text", y_1500).Scan(&s) + err = db.QueryRow("SELECT $1::timestamp::text", ym1500).Scan(&s) if err != nil { t.Errorf("Encoding -infinity, expected no error, got %q", err) } if s != "-infinity" { t.Errorf("Encoding -infinity, expected %q, got %q", "-infinity", s) } - err = db.QueryRow("SELECT $1::timestamptz::text", y_1500).Scan(&s) + err = db.QueryRow("SELECT $1::timestamptz::text", ym1500).Scan(&s) if err != nil { t.Errorf("Encoding -infinity, expected no error, got %q", err) } @@ -722,8 +731,7 @@ func TestAppendEscapedText(t *testing.T) { } func TestAppendEscapedTextExistingBuffer(t *testing.T) { - var buf []byte - buf = []byte("123\t") + buf := []byte("123\t") if esc := appendEscapedText(buf, "hallo\tescape"); string(esc) != "123\thallo\\tescape" { t.Fatal(string(esc)) } diff --git a/vendor/github.com/lib/pq/error.go b/vendor/github.com/lib/pq/error.go index b4bb44ce..96aae29c 100644 --- a/vendor/github.com/lib/pq/error.go +++ b/vendor/github.com/lib/pq/error.go @@ -153,6 +153,7 @@ var errorCodeNames = map[ErrorCode]string{ "22004": "null_value_not_allowed", "22002": "null_value_no_indicator_parameter", "22003": "numeric_value_out_of_range", + "2200H": "sequence_generator_limit_exceeded", "22026": "string_data_length_mismatch", "22001": "string_data_right_truncation", "22011": "substring_error", @@ -459,6 +460,11 @@ func errorf(s string, args ...interface{}) { panic(fmt.Errorf("pq: %s", fmt.Sprintf(s, args...))) } +// TODO(ainar-g) Rename to errorf after removing panics. +func fmterrorf(s string, args ...interface{}) error { + return fmt.Errorf("pq: %s", fmt.Sprintf(s, args...)) +} + func errRecoverNoErrBadConn(err *error) { e := recover() if e == nil { @@ -487,7 +493,8 @@ func (c *conn) errRecover(err *error) { *err = v } case *net.OpError: - *err = driver.ErrBadConn + c.bad = true + *err = v case error: if v == io.EOF || v.(error).Error() == "remote error: handshake failure" { *err = driver.ErrBadConn diff --git a/vendor/github.com/lib/pq/listen_example/doc.go b/vendor/github.com/lib/pq/example/listen/doc.go similarity index 90% rename from vendor/github.com/lib/pq/listen_example/doc.go rename to vendor/github.com/lib/pq/example/listen/doc.go index 80f0a9b9..91e2ddba 100644 --- a/vendor/github.com/lib/pq/listen_example/doc.go +++ b/vendor/github.com/lib/pq/example/listen/doc.go @@ -1,6 +1,6 @@ /* -Below you will find a self-contained Go program which uses the LISTEN / NOTIFY +Package listen is a self-contained Go program which uses the LISTEN / NOTIFY mechanism to avoid polling the database while waiting for more work to arrive. // @@ -77,7 +77,9 @@ mechanism to avoid polling the database while waiting for more work to arrive. } } - listener := pq.NewListener(conninfo, 10 * time.Second, time.Minute, reportProblem) + minReconn := 10 * time.Second + maxReconn := time.Minute + listener := pq.NewListener(conninfo, minReconn, maxReconn, reportProblem) err = listener.Listen("getwork") if err != nil { panic(err) @@ -93,4 +95,4 @@ mechanism to avoid polling the database while waiting for more work to arrive. */ -package listen_example +package listen diff --git a/vendor/github.com/lib/pq/go18_test.go b/vendor/github.com/lib/pq/go18_test.go index 4bf6391e..1a88a5b4 100644 --- a/vendor/github.com/lib/pq/go18_test.go +++ b/vendor/github.com/lib/pq/go18_test.go @@ -228,7 +228,9 @@ func TestContextCancelBegin(t *testing.T) { cancel() if err != nil { t.Fatal(err) - } else if err := tx.Rollback(); err != nil && err != sql.ErrTxDone { + } else if err := tx.Rollback(); err != nil && + err.Error() != "pq: canceling statement due to user request" && + err != sql.ErrTxDone { t.Fatal(err) } }() diff --git a/vendor/github.com/lib/pq/hstore/hstore.go b/vendor/github.com/lib/pq/hstore/hstore.go index 72d5abf5..f1470db1 100644 --- a/vendor/github.com/lib/pq/hstore/hstore.go +++ b/vendor/github.com/lib/pq/hstore/hstore.go @@ -6,7 +6,7 @@ import ( "strings" ) -// A wrapper for transferring Hstore values back and forth easily. +// Hstore is a wrapper for transferring Hstore values back and forth easily. type Hstore struct { Map map[string]sql.NullString } diff --git a/vendor/github.com/lib/pq/notify.go b/vendor/github.com/lib/pq/notify.go index 09f94244..947d189f 100644 --- a/vendor/github.com/lib/pq/notify.go +++ b/vendor/github.com/lib/pq/notify.go @@ -60,7 +60,7 @@ type ListenerConn struct { replyChan chan message } -// Creates a new ListenerConn. Use NewListener instead. +// NewListenerConn creates a new ListenerConn. Use NewListener instead. func NewListenerConn(name string, notificationChan chan<- *Notification) (*ListenerConn, error) { return newDialListenerConn(defaultDialer{}, name, notificationChan) } @@ -214,17 +214,17 @@ func (l *ListenerConn) listenerConnMain() { // this ListenerConn is done } -// Send a LISTEN query to the server. See ExecSimpleQuery. +// Listen sends a LISTEN query to the server. See ExecSimpleQuery. func (l *ListenerConn) Listen(channel string) (bool, error) { return l.ExecSimpleQuery("LISTEN " + QuoteIdentifier(channel)) } -// Send an UNLISTEN query to the server. See ExecSimpleQuery. +// Unlisten sends an UNLISTEN query to the server. See ExecSimpleQuery. func (l *ListenerConn) Unlisten(channel string) (bool, error) { return l.ExecSimpleQuery("UNLISTEN " + QuoteIdentifier(channel)) } -// Send `UNLISTEN *` to the server. See ExecSimpleQuery. +// UnlistenAll sends an `UNLISTEN *` query to the server. See ExecSimpleQuery. func (l *ListenerConn) UnlistenAll() (bool, error) { return l.ExecSimpleQuery("UNLISTEN *") } @@ -267,8 +267,8 @@ func (l *ListenerConn) sendSimpleQuery(q string) (err error) { return nil } -// Execute a "simple query" (i.e. one with no bindable parameters) on the -// connection. The possible return values are: +// ExecSimpleQuery executes a "simple query" (i.e. one with no bindable +// parameters) on the connection. The possible return values are: // 1) "executed" is true; the query was executed to completion on the // database server. If the query failed, err will be set to the error // returned by the database, otherwise err will be nil. @@ -333,6 +333,7 @@ func (l *ListenerConn) ExecSimpleQuery(q string) (executed bool, err error) { } } +// Close closes the connection. func (l *ListenerConn) Close() error { l.connectionLock.Lock() if l.err != nil { @@ -346,7 +347,7 @@ func (l *ListenerConn) Close() error { return l.cn.c.Close() } -// Err() returns the reason the connection was closed. It is not safe to call +// Err returns the reason the connection was closed. It is not safe to call // this function until l.Notify has been closed. func (l *ListenerConn) Err() error { return l.err @@ -354,32 +355,43 @@ func (l *ListenerConn) Err() error { var errListenerClosed = errors.New("pq: Listener has been closed") +// ErrChannelAlreadyOpen is returned from Listen when a channel is already +// open. var ErrChannelAlreadyOpen = errors.New("pq: channel is already open") + +// ErrChannelNotOpen is returned from Unlisten when a channel is not open. var ErrChannelNotOpen = errors.New("pq: channel is not open") +// ListenerEventType is an enumeration of listener event types. type ListenerEventType int const ( - // Emitted only when the database connection has been initially - // initialized. err will always be nil. + // ListenerEventConnected is emitted only when the database connection + // has been initially initialized. The err argument of the callback + // will always be nil. ListenerEventConnected ListenerEventType = iota - // Emitted after a database connection has been lost, either because of an - // error or because Close has been called. err will be set to the reason - // the database connection was lost. + // ListenerEventDisconnected is emitted after a database connection has + // been lost, either because of an error or because Close has been + // called. The err argument will be set to the reason the database + // connection was lost. ListenerEventDisconnected - // Emitted after a database connection has been re-established after - // connection loss. err will always be nil. After this event has been - // emitted, a nil pq.Notification is sent on the Listener.Notify channel. + // ListenerEventReconnected is emitted after a database connection has + // been re-established after connection loss. The err argument of the + // callback will always be nil. After this event has been emitted, a + // nil pq.Notification is sent on the Listener.Notify channel. ListenerEventReconnected - // Emitted after a connection to the database was attempted, but failed. - // err will be set to an error describing why the connection attempt did - // not succeed. + // ListenerEventConnectionAttemptFailed is emitted after a connection + // to the database was attempted, but failed. The err argument will be + // set to an error describing why the connection attempt did not + // succeed. ListenerEventConnectionAttemptFailed ) +// EventCallbackType is the event callback type. See also ListenerEventType +// constants' documentation. type EventCallbackType func(event ListenerEventType, err error) // Listener provides an interface for listening to notifications from a @@ -454,9 +466,9 @@ func NewDialListener(d Dialer, return l } -// Returns the notification channel for this listener. This is the same -// channel as Notify, and will not be recreated during the life time of the -// Listener. +// NotificationChannel returns the notification channel for this listener. +// This is the same channel as Notify, and will not be recreated during the +// life time of the Listener. func (l *Listener) NotificationChannel() <-chan *Notification { return l.Notify } @@ -625,7 +637,7 @@ func (l *Listener) disconnectCleanup() error { // after the connection has been established. func (l *Listener) resync(cn *ListenerConn, notificationChan <-chan *Notification) error { doneChan := make(chan error) - go func() { + go func(notificationChan <-chan *Notification) { for channel := range l.channels { // If we got a response, return that error to our caller as it's // going to be more descriptive than cn.Err(). @@ -639,14 +651,14 @@ func (l *Listener) resync(cn *ListenerConn, notificationChan <-chan *Notificatio // close and then return the error message from the connection, as // per ListenerConn's interface. if err != nil { - for _ = range notificationChan { + for range notificationChan { } doneChan <- cn.Err() return } } doneChan <- nil - }() + }(notificationChan) // Ignore notifications while synchronization is going on to avoid // deadlocks. We have to send a nil notification over Notify anyway as @@ -772,7 +784,7 @@ func (l *Listener) listenerConnLoop() { } l.emitEvent(ListenerEventDisconnected, err) - time.Sleep(nextReconnect.Sub(time.Now())) + time.Sleep(time.Until(nextReconnect)) } } diff --git a/vendor/github.com/lib/pq/notify_test.go b/vendor/github.com/lib/pq/notify_test.go index 82a77e1e..075666dd 100644 --- a/vendor/github.com/lib/pq/notify_test.go +++ b/vendor/github.com/lib/pq/notify_test.go @@ -123,6 +123,9 @@ func TestConnUnlisten(t *testing.T) { } _, err = db.Exec("NOTIFY notify_test") + if err != nil { + t.Fatal(err) + } err = expectNotification(t, channel, "notify_test", "") if err != nil { @@ -159,6 +162,9 @@ func TestConnUnlistenAll(t *testing.T) { } _, err = db.Exec("NOTIFY notify_test") + if err != nil { + t.Fatal(err) + } err = expectNotification(t, channel, "notify_test", "") if err != nil { diff --git a/vendor/github.com/lib/pq/oid/gen.go b/vendor/github.com/lib/pq/oid/gen.go index cd4aea80..7c634cdc 100644 --- a/vendor/github.com/lib/pq/oid/gen.go +++ b/vendor/github.com/lib/pq/oid/gen.go @@ -10,10 +10,22 @@ import ( "log" "os" "os/exec" + "strings" _ "github.com/lib/pq" ) +// OID represent a postgres Object Identifier Type. +type OID struct { + ID int + Type string +} + +// Name returns an upper case version of the oid type. +func (o OID) Name() string { + return strings.ToUpper(o.Type) +} + func main() { datname := os.Getenv("PGDATABASE") sslmode := os.Getenv("PGSSLMODE") @@ -30,6 +42,25 @@ func main() { if err != nil { log.Fatal(err) } + rows, err := db.Query(` + SELECT typname, oid + FROM pg_type WHERE oid < 10000 + ORDER BY oid; + `) + if err != nil { + log.Fatal(err) + } + oids := make([]*OID, 0) + for rows.Next() { + var oid OID + if err = rows.Scan(&oid.Type, &oid.ID); err != nil { + log.Fatal(err) + } + oids = append(oids, &oid) + } + if err = rows.Err(); err != nil { + log.Fatal(err) + } cmd := exec.Command("gofmt") cmd.Stderr = os.Stderr w, err := cmd.StdinPipe() @@ -45,30 +76,18 @@ func main() { if err != nil { log.Fatal(err) } - fmt.Fprintln(w, "// generated by 'go run gen.go'; do not edit") + fmt.Fprintln(w, "// Code generated by gen.go. DO NOT EDIT.") fmt.Fprintln(w, "\npackage oid") fmt.Fprintln(w, "const (") - rows, err := db.Query(` - SELECT typname, oid - FROM pg_type WHERE oid < 10000 - ORDER BY oid; - `) - if err != nil { - log.Fatal(err) - } - var name string - var oid int - for rows.Next() { - err = rows.Scan(&name, &oid) - if err != nil { - log.Fatal(err) - } - fmt.Fprintf(w, "T_%s Oid = %d\n", name, oid) - } - if err = rows.Err(); err != nil { - log.Fatal(err) + for _, oid := range oids { + fmt.Fprintf(w, "T_%s Oid = %d\n", oid.Type, oid.ID) } fmt.Fprintln(w, ")") + fmt.Fprintln(w, "var TypeName = map[Oid]string{") + for _, oid := range oids { + fmt.Fprintf(w, "T_%s: \"%s\",\n", oid.Type, oid.Name()) + } + fmt.Fprintln(w, "}") w.Close() cmd.Wait() } diff --git a/vendor/github.com/lib/pq/oid/types.go b/vendor/github.com/lib/pq/oid/types.go index a3390c23..ecc84c2c 100644 --- a/vendor/github.com/lib/pq/oid/types.go +++ b/vendor/github.com/lib/pq/oid/types.go @@ -1,4 +1,4 @@ -// generated by 'go run gen.go'; do not edit +// Code generated by gen.go. DO NOT EDIT. package oid @@ -171,3 +171,173 @@ const ( T_regrole Oid = 4096 T__regrole Oid = 4097 ) + +var TypeName = map[Oid]string{ + T_bool: "BOOL", + T_bytea: "BYTEA", + T_char: "CHAR", + T_name: "NAME", + T_int8: "INT8", + T_int2: "INT2", + T_int2vector: "INT2VECTOR", + T_int4: "INT4", + T_regproc: "REGPROC", + T_text: "TEXT", + T_oid: "OID", + T_tid: "TID", + T_xid: "XID", + T_cid: "CID", + T_oidvector: "OIDVECTOR", + T_pg_ddl_command: "PG_DDL_COMMAND", + T_pg_type: "PG_TYPE", + T_pg_attribute: "PG_ATTRIBUTE", + T_pg_proc: "PG_PROC", + T_pg_class: "PG_CLASS", + T_json: "JSON", + T_xml: "XML", + T__xml: "_XML", + T_pg_node_tree: "PG_NODE_TREE", + T__json: "_JSON", + T_smgr: "SMGR", + T_index_am_handler: "INDEX_AM_HANDLER", + T_point: "POINT", + T_lseg: "LSEG", + T_path: "PATH", + T_box: "BOX", + T_polygon: "POLYGON", + T_line: "LINE", + T__line: "_LINE", + T_cidr: "CIDR", + T__cidr: "_CIDR", + T_float4: "FLOAT4", + T_float8: "FLOAT8", + T_abstime: "ABSTIME", + T_reltime: "RELTIME", + T_tinterval: "TINTERVAL", + T_unknown: "UNKNOWN", + T_circle: "CIRCLE", + T__circle: "_CIRCLE", + T_money: "MONEY", + T__money: "_MONEY", + T_macaddr: "MACADDR", + T_inet: "INET", + T__bool: "_BOOL", + T__bytea: "_BYTEA", + T__char: "_CHAR", + T__name: "_NAME", + T__int2: "_INT2", + T__int2vector: "_INT2VECTOR", + T__int4: "_INT4", + T__regproc: "_REGPROC", + T__text: "_TEXT", + T__tid: "_TID", + T__xid: "_XID", + T__cid: "_CID", + T__oidvector: "_OIDVECTOR", + T__bpchar: "_BPCHAR", + T__varchar: "_VARCHAR", + T__int8: "_INT8", + T__point: "_POINT", + T__lseg: "_LSEG", + T__path: "_PATH", + T__box: "_BOX", + T__float4: "_FLOAT4", + T__float8: "_FLOAT8", + T__abstime: "_ABSTIME", + T__reltime: "_RELTIME", + T__tinterval: "_TINTERVAL", + T__polygon: "_POLYGON", + T__oid: "_OID", + T_aclitem: "ACLITEM", + T__aclitem: "_ACLITEM", + T__macaddr: "_MACADDR", + T__inet: "_INET", + T_bpchar: "BPCHAR", + T_varchar: "VARCHAR", + T_date: "DATE", + T_time: "TIME", + T_timestamp: "TIMESTAMP", + T__timestamp: "_TIMESTAMP", + T__date: "_DATE", + T__time: "_TIME", + T_timestamptz: "TIMESTAMPTZ", + T__timestamptz: "_TIMESTAMPTZ", + T_interval: "INTERVAL", + T__interval: "_INTERVAL", + T__numeric: "_NUMERIC", + T_pg_database: "PG_DATABASE", + T__cstring: "_CSTRING", + T_timetz: "TIMETZ", + T__timetz: "_TIMETZ", + T_bit: "BIT", + T__bit: "_BIT", + T_varbit: "VARBIT", + T__varbit: "_VARBIT", + T_numeric: "NUMERIC", + T_refcursor: "REFCURSOR", + T__refcursor: "_REFCURSOR", + T_regprocedure: "REGPROCEDURE", + T_regoper: "REGOPER", + T_regoperator: "REGOPERATOR", + T_regclass: "REGCLASS", + T_regtype: "REGTYPE", + T__regprocedure: "_REGPROCEDURE", + T__regoper: "_REGOPER", + T__regoperator: "_REGOPERATOR", + T__regclass: "_REGCLASS", + T__regtype: "_REGTYPE", + T_record: "RECORD", + T_cstring: "CSTRING", + T_any: "ANY", + T_anyarray: "ANYARRAY", + T_void: "VOID", + T_trigger: "TRIGGER", + T_language_handler: "LANGUAGE_HANDLER", + T_internal: "INTERNAL", + T_opaque: "OPAQUE", + T_anyelement: "ANYELEMENT", + T__record: "_RECORD", + T_anynonarray: "ANYNONARRAY", + T_pg_authid: "PG_AUTHID", + T_pg_auth_members: "PG_AUTH_MEMBERS", + T__txid_snapshot: "_TXID_SNAPSHOT", + T_uuid: "UUID", + T__uuid: "_UUID", + T_txid_snapshot: "TXID_SNAPSHOT", + T_fdw_handler: "FDW_HANDLER", + T_pg_lsn: "PG_LSN", + T__pg_lsn: "_PG_LSN", + T_tsm_handler: "TSM_HANDLER", + T_anyenum: "ANYENUM", + T_tsvector: "TSVECTOR", + T_tsquery: "TSQUERY", + T_gtsvector: "GTSVECTOR", + T__tsvector: "_TSVECTOR", + T__gtsvector: "_GTSVECTOR", + T__tsquery: "_TSQUERY", + T_regconfig: "REGCONFIG", + T__regconfig: "_REGCONFIG", + T_regdictionary: "REGDICTIONARY", + T__regdictionary: "_REGDICTIONARY", + T_jsonb: "JSONB", + T__jsonb: "_JSONB", + T_anyrange: "ANYRANGE", + T_event_trigger: "EVENT_TRIGGER", + T_int4range: "INT4RANGE", + T__int4range: "_INT4RANGE", + T_numrange: "NUMRANGE", + T__numrange: "_NUMRANGE", + T_tsrange: "TSRANGE", + T__tsrange: "_TSRANGE", + T_tstzrange: "TSTZRANGE", + T__tstzrange: "_TSTZRANGE", + T_daterange: "DATERANGE", + T__daterange: "_DATERANGE", + T_int8range: "INT8RANGE", + T__int8range: "_INT8RANGE", + T_pg_shseclabel: "PG_SHSECLABEL", + T_regnamespace: "REGNAMESPACE", + T__regnamespace: "_REGNAMESPACE", + T_regrole: "REGROLE", + T__regrole: "_REGROLE", +} diff --git a/vendor/github.com/lib/pq/rows.go b/vendor/github.com/lib/pq/rows.go new file mode 100644 index 00000000..c6aa5b9a --- /dev/null +++ b/vendor/github.com/lib/pq/rows.go @@ -0,0 +1,93 @@ +package pq + +import ( + "math" + "reflect" + "time" + + "github.com/lib/pq/oid" +) + +const headerSize = 4 + +type fieldDesc struct { + // The object ID of the data type. + OID oid.Oid + // The data type size (see pg_type.typlen). + // Note that negative values denote variable-width types. + Len int + // The type modifier (see pg_attribute.atttypmod). + // The meaning of the modifier is type-specific. + Mod int +} + +func (fd fieldDesc) Type() reflect.Type { + switch fd.OID { + case oid.T_int8: + return reflect.TypeOf(int64(0)) + case oid.T_int4: + return reflect.TypeOf(int32(0)) + case oid.T_int2: + return reflect.TypeOf(int16(0)) + case oid.T_varchar, oid.T_text: + return reflect.TypeOf("") + case oid.T_bool: + return reflect.TypeOf(false) + case oid.T_date, oid.T_time, oid.T_timetz, oid.T_timestamp, oid.T_timestamptz: + return reflect.TypeOf(time.Time{}) + case oid.T_bytea: + return reflect.TypeOf([]byte(nil)) + default: + return reflect.TypeOf(new(interface{})).Elem() + } +} + +func (fd fieldDesc) Name() string { + return oid.TypeName[fd.OID] +} + +func (fd fieldDesc) Length() (length int64, ok bool) { + switch fd.OID { + case oid.T_text, oid.T_bytea: + return math.MaxInt64, true + case oid.T_varchar, oid.T_bpchar: + return int64(fd.Mod - headerSize), true + default: + return 0, false + } +} + +func (fd fieldDesc) PrecisionScale() (precision, scale int64, ok bool) { + switch fd.OID { + case oid.T_numeric, oid.T__numeric: + mod := fd.Mod - headerSize + precision = int64((mod >> 16) & 0xffff) + scale = int64(mod & 0xffff) + return precision, scale, true + default: + return 0, 0, false + } +} + +// ColumnTypeScanType returns the value type that can be used to scan types into. +func (rs *rows) ColumnTypeScanType(index int) reflect.Type { + return rs.colTyps[index].Type() +} + +// ColumnTypeDatabaseTypeName return the database system type name. +func (rs *rows) ColumnTypeDatabaseTypeName(index int) string { + return rs.colTyps[index].Name() +} + +// ColumnTypeLength returns the length of the column type if the column is a +// variable length type. If the column is not a variable length type ok +// should return false. +func (rs *rows) ColumnTypeLength(index int) (length int64, ok bool) { + return rs.colTyps[index].Length() +} + +// ColumnTypePrecisionScale should return the precision and scale for decimal +// types. If not applicable, ok should be false. +func (rs *rows) ColumnTypePrecisionScale(index int) (precision, scale int64, ok bool) { + return rs.colTyps[index].PrecisionScale() +} diff --git a/vendor/github.com/lib/pq/rows_test.go b/vendor/github.com/lib/pq/rows_test.go new file mode 100644 index 00000000..3033bc01 --- /dev/null +++ b/vendor/github.com/lib/pq/rows_test.go @@ -0,0 +1,220 @@ +// +build go1.8 + +package pq + +import ( + "math" + "reflect" + "testing" + + "github.com/lib/pq/oid" +) + +func TestDataTypeName(t *testing.T) { + tts := []struct { + typ oid.Oid + name string + }{ + {oid.T_int8, "INT8"}, + {oid.T_int4, "INT4"}, + {oid.T_int2, "INT2"}, + {oid.T_varchar, "VARCHAR"}, + {oid.T_text, "TEXT"}, + {oid.T_bool, "BOOL"}, + {oid.T_numeric, "NUMERIC"}, + {oid.T_date, "DATE"}, + {oid.T_time, "TIME"}, + {oid.T_timetz, "TIMETZ"}, + {oid.T_timestamp, "TIMESTAMP"}, + {oid.T_timestamptz, "TIMESTAMPTZ"}, + {oid.T_bytea, "BYTEA"}, + } + + for i, tt := range tts { + dt := fieldDesc{OID: tt.typ} + if name := dt.Name(); name != tt.name { + t.Errorf("(%d) got: %s want: %s", i, name, tt.name) + } + } +} + +func TestDataType(t *testing.T) { + tts := []struct { + typ oid.Oid + kind reflect.Kind + }{ + {oid.T_int8, reflect.Int64}, + {oid.T_int4, reflect.Int32}, + {oid.T_int2, reflect.Int16}, + {oid.T_varchar, reflect.String}, + {oid.T_text, reflect.String}, + {oid.T_bool, reflect.Bool}, + {oid.T_date, reflect.Struct}, + {oid.T_time, reflect.Struct}, + {oid.T_timetz, reflect.Struct}, + {oid.T_timestamp, reflect.Struct}, + {oid.T_timestamptz, reflect.Struct}, + {oid.T_bytea, reflect.Slice}, + } + + for i, tt := range tts { + dt := fieldDesc{OID: tt.typ} + if kind := dt.Type().Kind(); kind != tt.kind { + t.Errorf("(%d) got: %s want: %s", i, kind, tt.kind) + } + } +} + +func TestDataTypeLength(t *testing.T) { + tts := []struct { + typ oid.Oid + len int + mod int + length int64 + ok bool + }{ + {oid.T_int4, 0, -1, 0, false}, + {oid.T_varchar, 65535, 9, 5, true}, + {oid.T_text, 65535, -1, math.MaxInt64, true}, + {oid.T_bytea, 65535, -1, math.MaxInt64, true}, + } + + for i, tt := range tts { + dt := fieldDesc{OID: tt.typ, Len: tt.len, Mod: tt.mod} + if l, k := dt.Length(); k != tt.ok || l != tt.length { + t.Errorf("(%d) got: %d, %t want: %d, %t", i, l, k, tt.length, tt.ok) + } + } +} + +func TestDataTypePrecisionScale(t *testing.T) { + tts := []struct { + typ oid.Oid + mod int + precision, scale int64 + ok bool + }{ + {oid.T_int4, -1, 0, 0, false}, + {oid.T_numeric, 589830, 9, 2, true}, + {oid.T_text, -1, 0, 0, false}, + } + + for i, tt := range tts { + dt := fieldDesc{OID: tt.typ, Mod: tt.mod} + p, s, k := dt.PrecisionScale() + if k != tt.ok { + t.Errorf("(%d) got: %t want: %t", i, k, tt.ok) + } + if p != tt.precision { + t.Errorf("(%d) wrong precision got: %d want: %d", i, p, tt.precision) + } + if s != tt.scale { + t.Errorf("(%d) wrong scale got: %d want: %d", i, s, tt.scale) + } + } +} + +func TestRowsColumnTypes(t *testing.T) { + columnTypesTests := []struct { + Name string + TypeName string + Length struct { + Len int64 + OK bool + } + DecimalSize struct { + Precision int64 + Scale int64 + OK bool + } + ScanType reflect.Type + }{ + { + Name: "a", + TypeName: "INT4", + Length: struct { + Len int64 + OK bool + }{ + Len: 0, + OK: false, + }, + DecimalSize: struct { + Precision int64 + Scale int64 + OK bool + }{ + Precision: 0, + Scale: 0, + OK: false, + }, + ScanType: reflect.TypeOf(int32(0)), + }, { + Name: "bar", + TypeName: "TEXT", + Length: struct { + Len int64 + OK bool + }{ + Len: math.MaxInt64, + OK: true, + }, + DecimalSize: struct { + Precision int64 + Scale int64 + OK bool + }{ + Precision: 0, + Scale: 0, + OK: false, + }, + ScanType: reflect.TypeOf(""), + }, + } + + db := openTestConn(t) + defer db.Close() + + rows, err := db.Query("SELECT 1 AS a, text 'bar' AS bar, 1.28::numeric(9, 2) AS dec") + if err != nil { + t.Fatal(err) + } + + columns, err := rows.ColumnTypes() + if err != nil { + t.Fatal(err) + } + if len(columns) != 3 { + t.Errorf("expected 3 columns found %d", len(columns)) + } + + for i, tt := range columnTypesTests { + c := columns[i] + if c.Name() != tt.Name { + t.Errorf("(%d) got: %s, want: %s", i, c.Name(), tt.Name) + } + if c.DatabaseTypeName() != tt.TypeName { + t.Errorf("(%d) got: %s, want: %s", i, c.DatabaseTypeName(), tt.TypeName) + } + l, ok := c.Length() + if l != tt.Length.Len { + t.Errorf("(%d) got: %d, want: %d", i, l, tt.Length.Len) + } + if ok != tt.Length.OK { + t.Errorf("(%d) got: %t, want: %t", i, ok, tt.Length.OK) + } + p, s, ok := c.DecimalSize() + if p != tt.DecimalSize.Precision { + t.Errorf("(%d) got: %d, want: %d", i, p, tt.DecimalSize.Precision) + } + if s != tt.DecimalSize.Scale { + t.Errorf("(%d) got: %d, want: %d", i, s, tt.DecimalSize.Scale) + } + if ok != tt.DecimalSize.OK { + t.Errorf("(%d) got: %t, want: %t", i, ok, tt.DecimalSize.OK) + } + if c.ScanType() != tt.ScanType { + t.Errorf("(%d) got: %v, want: %v", i, c.ScanType(), tt.ScanType) + } + } +} diff --git a/vendor/github.com/lib/pq/ssl.go b/vendor/github.com/lib/pq/ssl.go index 7deb3043..e1a326a0 100644 --- a/vendor/github.com/lib/pq/ssl.go +++ b/vendor/github.com/lib/pq/ssl.go @@ -12,7 +12,7 @@ import ( // ssl generates a function to upgrade a net.Conn based on the "sslmode" and // related settings. The function is nil when no upgrade should take place. -func ssl(o values) func(net.Conn) net.Conn { +func ssl(o values) (func(net.Conn) (net.Conn, error), error) { verifyCaOnly := false tlsConf := tls.Config{} switch mode := o["sslmode"]; mode { @@ -45,29 +45,38 @@ func ssl(o values) func(net.Conn) net.Conn { case "verify-full": tlsConf.ServerName = o["host"] case "disable": - return nil + return nil, nil default: - errorf(`unsupported sslmode %q; only "require" (default), "verify-full", "verify-ca", and "disable" supported`, mode) + return nil, fmterrorf(`unsupported sslmode %q; only "require" (default), "verify-full", "verify-ca", and "disable" supported`, mode) } - sslClientCertificates(&tlsConf, o) - sslCertificateAuthority(&tlsConf, o) + err := sslClientCertificates(&tlsConf, o) + if err != nil { + return nil, err + } + err = sslCertificateAuthority(&tlsConf, o) + if err != nil { + return nil, err + } sslRenegotiation(&tlsConf) - return func(conn net.Conn) net.Conn { + return func(conn net.Conn) (net.Conn, error) { client := tls.Client(conn, &tlsConf) if verifyCaOnly { - sslVerifyCertificateAuthority(client, &tlsConf) + err := sslVerifyCertificateAuthority(client, &tlsConf) + if err != nil { + return nil, err + } } - return client - } + return client, nil + }, nil } // sslClientCertificates adds the certificate specified in the "sslcert" and // "sslkey" settings, or if they aren't set, from the .postgresql directory // in the user's home directory. The configured files must exist and have // the correct permissions. -func sslClientCertificates(tlsConf *tls.Config, o values) { +func sslClientCertificates(tlsConf *tls.Config, o values) error { // user.Current() might fail when cross-compiling. We have to ignore the // error and continue without home directory defaults, since we wouldn't // know from where to load them. @@ -82,13 +91,13 @@ func sslClientCertificates(tlsConf *tls.Config, o values) { } // https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1045 if len(sslcert) == 0 { - return + return nil } // https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1050:L1054 if _, err := os.Stat(sslcert); os.IsNotExist(err) { - return + return nil } else if err != nil { - panic(err) + return err } // In libpq, the ssl key is only loaded if the setting is not blank. @@ -101,19 +110,21 @@ func sslClientCertificates(tlsConf *tls.Config, o values) { if len(sslkey) > 0 { if err := sslKeyPermissions(sslkey); err != nil { - panic(err) + return err } } cert, err := tls.LoadX509KeyPair(sslcert, sslkey) if err != nil { - panic(err) + return err } + tlsConf.Certificates = []tls.Certificate{cert} + return nil } // sslCertificateAuthority adds the RootCA specified in the "sslrootcert" setting. -func sslCertificateAuthority(tlsConf *tls.Config, o values) { +func sslCertificateAuthority(tlsConf *tls.Config, o values) error { // In libpq, the root certificate is only loaded if the setting is not blank. // // https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L950-L951 @@ -122,22 +133,24 @@ func sslCertificateAuthority(tlsConf *tls.Config, o values) { cert, err := ioutil.ReadFile(sslrootcert) if err != nil { - panic(err) + return err } if !tlsConf.RootCAs.AppendCertsFromPEM(cert) { - errorf("couldn't parse pem in sslrootcert") + return fmterrorf("couldn't parse pem in sslrootcert") } } + + return nil } // sslVerifyCertificateAuthority carries out a TLS handshake to the server and // verifies the presented certificate against the CA, i.e. the one specified in // sslrootcert or the system CA if sslrootcert was not specified. -func sslVerifyCertificateAuthority(client *tls.Conn, tlsConf *tls.Config) { +func sslVerifyCertificateAuthority(client *tls.Conn, tlsConf *tls.Config) error { err := client.Handshake() if err != nil { - panic(err) + return err } certs := client.ConnectionState().PeerCertificates opts := x509.VerifyOptions{ @@ -152,7 +165,5 @@ func sslVerifyCertificateAuthority(client *tls.Conn, tlsConf *tls.Config) { opts.Intermediates.AddCert(cert) } _, err = certs[0].Verify(opts) - if err != nil { - panic(err) - } + return err } diff --git a/vendor/github.com/lib/pq/uuid_test.go b/vendor/github.com/lib/pq/uuid_test.go index 9df4a79b..8ecee2fd 100644 --- a/vendor/github.com/lib/pq/uuid_test.go +++ b/vendor/github.com/lib/pq/uuid_test.go @@ -33,7 +33,7 @@ func TestDecodeUUIDBackend(t *testing.T) { db := openTestConn(t) defer db.Close() - var s string = "a0ecc91d-a13f-4fe4-9fce-7e09777cc70a" + var s = "a0ecc91d-a13f-4fe4-9fce-7e09777cc70a" var scanned interface{} err := db.QueryRow(`SELECT $1::uuid`, s).Scan(&scanned) diff --git a/vendor/github.com/pborman/uuid/.travis.yml b/vendor/github.com/pborman/uuid/.travis.yml index a6a98db8..d8156a60 100644 --- a/vendor/github.com/pborman/uuid/.travis.yml +++ b/vendor/github.com/pborman/uuid/.travis.yml @@ -3,7 +3,6 @@ language: go go: - 1.4.3 - 1.5.3 - - release - tip script: diff --git a/vendor/github.com/pborman/uuid/CONTRIBUTING.md b/vendor/github.com/pborman/uuid/CONTRIBUTING.md new file mode 100644 index 00000000..04fdf09f --- /dev/null +++ b/vendor/github.com/pborman/uuid/CONTRIBUTING.md @@ -0,0 +1,10 @@ +# How to contribute + +We definitely welcome patches and contribution to this project! + +### Legal requirements + +In order to protect both you and ourselves, you will need to sign the +[Contributor License Agreement](https://cla.developers.google.com/clas). + +You may have already signed it for other Google projects. diff --git a/vendor/github.com/pborman/uuid/README.md b/vendor/github.com/pborman/uuid/README.md index f023d47c..b0396b27 100644 --- a/vendor/github.com/pborman/uuid/README.md +++ b/vendor/github.com/pborman/uuid/README.md @@ -1,7 +1,7 @@ This project was automatically exported from code.google.com/p/go-uuid # uuid ![build status](https://travis-ci.org/pborman/uuid.svg?branch=master) -The uuid package generates and inspects UUIDs based on [RFC 412](http://tools.ietf.org/html/rfc4122) and DCE 1.1: Authentication and Security Services. +The uuid package generates and inspects UUIDs based on [RFC 4122](http://tools.ietf.org/html/rfc4122) and DCE 1.1: Authentication and Security Services. ###### Install `go get github.com/pborman/uuid` diff --git a/vendor/github.com/pborman/uuid/dce.go b/vendor/github.com/pborman/uuid/dce.go old mode 100755 new mode 100644 diff --git a/vendor/github.com/pborman/uuid/doc.go b/vendor/github.com/pborman/uuid/doc.go old mode 100755 new mode 100644 diff --git a/vendor/github.com/pborman/uuid/json.go b/vendor/github.com/pborman/uuid/json.go deleted file mode 100644 index 9dda1dfb..00000000 --- a/vendor/github.com/pborman/uuid/json.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2014 Google Inc. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package uuid - -import "errors" - -func (u UUID) MarshalJSON() ([]byte, error) { - if len(u) != 16 { - return []byte(`""`), nil - } - var js [38]byte - js[0] = '"' - encodeHex(js[1:], u) - js[37] = '"' - return js[:], nil -} - -func (u *UUID) UnmarshalJSON(data []byte) error { - if string(data) == `""` { - return nil - } - if data[0] != '"' { - return errors.New("invalid UUID format") - } - data = data[1 : len(data)-1] - uu := Parse(string(data)) - if uu == nil { - return errors.New("invalid UUID format") - } - *u = uu - return nil -} diff --git a/vendor/github.com/pborman/uuid/json_test.go b/vendor/github.com/pborman/uuid/json_test.go deleted file mode 100644 index 2866b8dc..00000000 --- a/vendor/github.com/pborman/uuid/json_test.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2014 Google Inc. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package uuid - -import ( - "encoding/json" - "reflect" - "testing" -) - -var testUUID = Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") - -func TestJSON(t *testing.T) { - type S struct { - ID1 UUID - ID2 UUID - } - s1 := S{ID1: testUUID} - data, err := json.Marshal(&s1) - if err != nil { - t.Fatal(err) - } - var s2 S - if err := json.Unmarshal(data, &s2); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(&s1, &s2) { - t.Errorf("got %#v, want %#v", s2, s1) - } -} - -func BenchmarkUUID_MarshalJSON(b *testing.B) { - x := &struct { - UUID UUID `json:"uuid"` - }{} - x.UUID = Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") - if x.UUID == nil { - b.Fatal("invalid uuid") - } - for i := 0; i < b.N; i++ { - js, err := json.Marshal(x) - if err != nil { - b.Fatalf("marshal json: %#v (%v)", js, err) - } - } -} - -func BenchmarkUUID_UnmarshalJSON(b *testing.B) { - js := []byte(`{"uuid":"f47ac10b-58cc-0372-8567-0e02b2c3d479"}`) - var x *struct { - UUID UUID `json:"uuid"` - } - for i := 0; i < b.N; i++ { - err := json.Unmarshal(js, &x) - if err != nil { - b.Fatalf("marshal json: %#v (%v)", js, err) - } - } -} diff --git a/vendor/github.com/pborman/uuid/marshal.go b/vendor/github.com/pborman/uuid/marshal.go new file mode 100644 index 00000000..6621dd54 --- /dev/null +++ b/vendor/github.com/pborman/uuid/marshal.go @@ -0,0 +1,83 @@ +// Copyright 2016 Google Inc. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package uuid + +import ( + "errors" + "fmt" +) + +// MarshalText implements encoding.TextMarshaler. +func (u UUID) MarshalText() ([]byte, error) { + if len(u) != 16 { + return nil, nil + } + var js [36]byte + encodeHex(js[:], u) + return js[:], nil +} + +// UnmarshalText implements encoding.TextUnmarshaler. +func (u *UUID) UnmarshalText(data []byte) error { + if len(data) == 0 { + return nil + } + id := Parse(string(data)) + if id == nil { + return errors.New("invalid UUID") + } + *u = id + return nil +} + +// MarshalBinary implements encoding.BinaryMarshaler. +func (u UUID) MarshalBinary() ([]byte, error) { + return u[:], nil +} + +// UnmarshalBinary implements encoding.BinaryUnmarshaler. +func (u *UUID) UnmarshalBinary(data []byte) error { + if len(data) == 0 { + return nil + } + if len(data) != 16 { + return fmt.Errorf("invalid UUID (got %d bytes)", len(data)) + } + var id [16]byte + copy(id[:], data) + *u = id[:] + return nil +} + +// MarshalText implements encoding.TextMarshaler. +func (u Array) MarshalText() ([]byte, error) { + var js [36]byte + encodeHex(js[:], u[:]) + return js[:], nil +} + +// UnmarshalText implements encoding.TextUnmarshaler. +func (u *Array) UnmarshalText(data []byte) error { + id := Parse(string(data)) + if id == nil { + return errors.New("invalid UUID") + } + *u = id.Array() + return nil +} + +// MarshalBinary implements encoding.BinaryMarshaler. +func (u Array) MarshalBinary() ([]byte, error) { + return u[:], nil +} + +// UnmarshalBinary implements encoding.BinaryUnmarshaler. +func (u *Array) UnmarshalBinary(data []byte) error { + if len(data) != 16 { + return fmt.Errorf("invalid UUID (got %d bytes)", len(data)) + } + copy(u[:], data) + return nil +} diff --git a/vendor/github.com/pborman/uuid/marshal_test.go b/vendor/github.com/pborman/uuid/marshal_test.go new file mode 100644 index 00000000..4e85b6ba --- /dev/null +++ b/vendor/github.com/pborman/uuid/marshal_test.go @@ -0,0 +1,124 @@ +// Copyright 2014 Google Inc. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package uuid + +import ( + "bytes" + "encoding/json" + "reflect" + "testing" +) + +var testUUID = Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") +var testArray = testUUID.Array() + +func TestJSON(t *testing.T) { + type S struct { + ID1 UUID + ID2 UUID + } + s1 := S{ID1: testUUID} + data, err := json.Marshal(&s1) + if err != nil { + t.Fatal(err) + } + var s2 S + if err := json.Unmarshal(data, &s2); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(&s1, &s2) { + t.Errorf("got %#v, want %#v", s2, s1) + } +} + +func TestJSONArray(t *testing.T) { + type S struct { + ID1 Array + ID2 Array + } + s1 := S{ID1: testArray} + data, err := json.Marshal(&s1) + if err != nil { + t.Fatal(err) + } + var s2 S + if err := json.Unmarshal(data, &s2); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(&s1, &s2) { + t.Errorf("got %#v, want %#v", s2, s1) + } +} + +func TestMarshal(t *testing.T) { + data, err := testUUID.MarshalBinary() + if err != nil { + t.Fatalf("MarhsalBinary returned unexpected error %v", err) + } + if !bytes.Equal(data, testUUID) { + t.Fatalf("MarhsalBinary returns %x, want %x", data, testUUID) + } + var u UUID + u.UnmarshalBinary(data) + if !Equal(data, u) { + t.Fatalf("UnmarhsalBinary returns %v, want %v", u, testUUID) + } +} + +func TestMarshalArray(t *testing.T) { + data, err := testArray.MarshalBinary() + if err != nil { + t.Fatalf("MarhsalBinary returned unexpected error %v", err) + } + if !bytes.Equal(data, testUUID) { + t.Fatalf("MarhsalBinary returns %x, want %x", data, testUUID) + } + var a Array + a.UnmarshalBinary(data) + if a != testArray { + t.Fatalf("UnmarhsalBinary returns %v, want %v", a, testArray) + } +} + +func TestMarshalTextArray(t *testing.T) { + data, err := testArray.MarshalText() + if err != nil { + t.Fatalf("MarhsalText returned unexpected error %v", err) + } + var a Array + a.UnmarshalText(data) + if a != testArray { + t.Fatalf("UnmarhsalText returns %v, want %v", a, testArray) + } +} + +func BenchmarkUUID_MarshalJSON(b *testing.B) { + x := &struct { + UUID UUID `json:"uuid"` + }{} + x.UUID = Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") + if x.UUID == nil { + b.Fatal("invalid uuid") + } + for i := 0; i < b.N; i++ { + js, err := json.Marshal(x) + if err != nil { + b.Fatalf("marshal json: %#v (%v)", js, err) + } + } +} + +func BenchmarkUUID_UnmarshalJSON(b *testing.B) { + js := []byte(`{"uuid":"f47ac10b-58cc-0372-8567-0e02b2c3d479"}`) + var x *struct { + UUID UUID `json:"uuid"` + } + for i := 0; i < b.N; i++ { + err := json.Unmarshal(js, &x) + if err != nil { + b.Fatalf("marshal json: %#v (%v)", js, err) + } + } +} diff --git a/vendor/github.com/pborman/uuid/node.go b/vendor/github.com/pborman/uuid/node.go old mode 100755 new mode 100644 diff --git a/vendor/github.com/pborman/uuid/sql.go b/vendor/github.com/pborman/uuid/sql.go index c84f900d..d015bfd1 100644 --- a/vendor/github.com/pborman/uuid/sql.go +++ b/vendor/github.com/pborman/uuid/sql.go @@ -5,6 +5,7 @@ package uuid import ( + "database/sql/driver" "errors" "fmt" ) @@ -56,3 +57,10 @@ func (uuid *UUID) Scan(src interface{}) error { return nil } + +// Value implements sql.Valuer so that UUIDs can be written to databases +// transparently. Currently, UUIDs map to strings. Please consult +// database-specific driver documentation for matching types. +func (uuid UUID) Value() (driver.Value, error) { + return uuid.String(), nil +} diff --git a/vendor/github.com/pborman/uuid/sql_test.go b/vendor/github.com/pborman/uuid/sql_test.go index 4d26392a..10309515 100644 --- a/vendor/github.com/pborman/uuid/sql_test.go +++ b/vendor/github.com/pborman/uuid/sql_test.go @@ -85,3 +85,12 @@ func TestScan(t *testing.T) { t.Error("UUID was not nil after scanning empty string") } } + +func TestValue(t *testing.T) { + stringTest := "f47ac10b-58cc-0372-8567-0e02b2c3d479" + uuid := Parse(stringTest) + val, _ := uuid.Value() + if val != stringTest { + t.Error("Value() did not return expected string") + } +} diff --git a/vendor/github.com/pborman/uuid/time.go b/vendor/github.com/pborman/uuid/time.go old mode 100755 new mode 100644 diff --git a/vendor/github.com/pborman/uuid/uuid.go b/vendor/github.com/pborman/uuid/uuid.go index c4482cd8..7c643cf0 100644 --- a/vendor/github.com/pborman/uuid/uuid.go +++ b/vendor/github.com/pborman/uuid/uuid.go @@ -13,6 +13,20 @@ import ( "strings" ) +// Array is a pass-by-value UUID that can be used as an effecient key in a map. +type Array [16]byte + +// UUID converts uuid into a slice. +func (uuid Array) UUID() UUID { + return uuid[:] +} + +// String returns the string representation of uuid, +// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx. +func (uuid Array) String() string { + return uuid.UUID().String() +} + // A UUID is a 128 bit (16 byte) Universal Unique IDentifier as defined in RFC // 4122. type UUID []byte @@ -76,6 +90,17 @@ func Equal(uuid1, uuid2 UUID) bool { return bytes.Equal(uuid1, uuid2) } +// Array returns an array representation of uuid that can be used as a map key. +// Array panics if uuid is not valid. +func (uuid UUID) Array() Array { + if len(uuid) != 16 { + panic("invalid uuid") + } + var a Array + copy(a[:], uuid) + return a +} + // String returns the string form of uuid, xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx // , or "" if uuid is invalid. func (uuid UUID) String() string { @@ -161,7 +186,7 @@ func (v Variant) String() string { return fmt.Sprintf("BadVariant%d", int(v)) } -// SetRand sets the random number generator to r, which implents io.Reader. +// SetRand sets the random number generator to r, which implements io.Reader. // If r.Read returns an error when the package requests random data then // a panic will be issued. // diff --git a/vendor/github.com/pborman/uuid/uuid_test.go b/vendor/github.com/pborman/uuid/uuid_test.go index 3835cc80..03872396 100644 --- a/vendor/github.com/pborman/uuid/uuid_test.go +++ b/vendor/github.com/pborman/uuid/uuid_test.go @@ -300,7 +300,7 @@ func TestNode(t *testing.T) { t.Error("nodeid is all zeros") } - id := []byte{1,2,3,4,5,6,7,8} + id := []byte{1, 2, 3, 4, 5, 6, 7, 8} SetNodeID(id) ni = NodeID() if !bytes.Equal(ni, id[:6]) { @@ -421,13 +421,47 @@ func TestBadRand(t *testing.T) { uuid1 := New() uuid2 := New() if uuid1 != uuid2 { - t.Errorf("execpted duplicates, got %q and %q", uuid1, uuid2) + t.Errorf("expected duplicates, got %q and %q", uuid1, uuid2) } SetRand(nil) uuid1 = New() uuid2 = New() if uuid1 == uuid2 { - t.Errorf("unexecpted duplicates, got %q", uuid1) + t.Errorf("unexpected duplicates, got %q", uuid1) + } +} + +func TestUUID_Array(t *testing.T) { + expect := Array{ + 0xf4, 0x7a, 0xc1, 0x0b, + 0x58, 0xcc, + 0x03, 0x72, + 0x85, 0x67, + 0x0e, 0x02, 0xb2, 0xc3, 0xd4, 0x79, + } + uuid := Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") + if uuid == nil { + t.Fatal("invalid uuid") + } + if uuid.Array() != expect { + t.Fatal("invalid array") + } +} + +func TestArray_UUID(t *testing.T) { + array := Array{ + 0xf4, 0x7a, 0xc1, 0x0b, + 0x58, 0xcc, + 0x03, 0x72, + 0x85, 0x67, + 0x0e, 0x02, 0xb2, 0xc3, 0xd4, 0x79, + } + expect := Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") + if expect == nil { + t.Fatal("invalid uuid") + } + if !bytes.Equal(array.UUID(), expect) { + t.Fatal("invalid uuid") } } @@ -469,3 +503,41 @@ func BenchmarkUUID_URN(b *testing.B) { } } } + +func BenchmarkUUID_Array(b *testing.B) { + expect := Array{ + 0xf4, 0x7a, 0xc1, 0x0b, + 0x58, 0xcc, + 0x03, 0x72, + 0x85, 0x67, + 0x0e, 0x02, 0xb2, 0xc3, 0xd4, 0x79, + } + uuid := Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") + if uuid == nil { + b.Fatal("invalid uuid") + } + for i := 0; i < b.N; i++ { + if uuid.Array() != expect { + b.Fatal("invalid array") + } + } +} + +func BenchmarkArray_UUID(b *testing.B) { + array := Array{ + 0xf4, 0x7a, 0xc1, 0x0b, + 0x58, 0xcc, + 0x03, 0x72, + 0x85, 0x67, + 0x0e, 0x02, 0xb2, 0xc3, 0xd4, 0x79, + } + expect := Parse("f47ac10b-58cc-0372-8567-0e02b2c3d479") + if expect == nil { + b.Fatal("invalid uuid") + } + for i := 0; i < b.N; i++ { + if !bytes.Equal(array.UUID(), expect) { + b.Fatal("invalid uuid") + } + } +} diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go index 64cc40fe..003e99fa 100644 --- a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go +++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go @@ -559,10 +559,14 @@ type UnifiedDiff struct { func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { buf := bufio.NewWriter(writer) defer buf.Flush() - w := func(format string, args ...interface{}) error { + wf := func(format string, args ...interface{}) error { _, err := buf.WriteString(fmt.Sprintf(format, args...)) return err } + ws := func(s string) error { + _, err := buf.WriteString(s) + return err + } if len(diff.Eol) == 0 { diff.Eol = "\n" @@ -581,26 +585,28 @@ func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { if len(diff.ToDate) > 0 { toDate = "\t" + diff.ToDate } - err := w("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = w("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err + if diff.FromFile != "" || diff.ToFile != "" { + err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) + if err != nil { + return err + } + err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) + if err != nil { + return err + } } } first, last := g[0], g[len(g)-1] range1 := formatRangeUnified(first.I1, last.I2) range2 := formatRangeUnified(first.J1, last.J2) - if err := w("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { + if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { return err } for _, c := range g { i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 if c.Tag == 'e' { for _, line := range diff.A[i1:i2] { - if err := w(" " + line); err != nil { + if err := ws(" " + line); err != nil { return err } } @@ -608,14 +614,14 @@ func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { } if c.Tag == 'r' || c.Tag == 'd' { for _, line := range diff.A[i1:i2] { - if err := w("-" + line); err != nil { + if err := ws("-" + line); err != nil { return err } } } if c.Tag == 'r' || c.Tag == 'i' { for _, line := range diff.B[j1:j2] { - if err := w("+" + line); err != nil { + if err := ws("+" + line); err != nil { return err } } @@ -669,12 +675,18 @@ func WriteContextDiff(writer io.Writer, diff ContextDiff) error { buf := bufio.NewWriter(writer) defer buf.Flush() var diffErr error - w := func(format string, args ...interface{}) { + wf := func(format string, args ...interface{}) { _, err := buf.WriteString(fmt.Sprintf(format, args...)) if diffErr == nil && err != nil { diffErr = err } } + ws := func(s string) { + _, err := buf.WriteString(s) + if diffErr == nil && err != nil { + diffErr = err + } + } if len(diff.Eol) == 0 { diff.Eol = "\n" @@ -700,15 +712,17 @@ func WriteContextDiff(writer io.Writer, diff ContextDiff) error { if len(diff.ToDate) > 0 { toDate = "\t" + diff.ToDate } - w("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) - w("--- %s%s%s", diff.ToFile, toDate, diff.Eol) + if diff.FromFile != "" || diff.ToFile != "" { + wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) + wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) + } } first, last := g[0], g[len(g)-1] - w("***************" + diff.Eol) + ws("***************" + diff.Eol) range1 := formatRangeContext(first.I1, last.I2) - w("*** %s ****%s", range1, diff.Eol) + wf("*** %s ****%s", range1, diff.Eol) for _, c := range g { if c.Tag == 'r' || c.Tag == 'd' { for _, cc := range g { @@ -716,7 +730,7 @@ func WriteContextDiff(writer io.Writer, diff ContextDiff) error { continue } for _, line := range diff.A[cc.I1:cc.I2] { - w(prefix[cc.Tag] + line) + ws(prefix[cc.Tag] + line) } } break @@ -724,7 +738,7 @@ func WriteContextDiff(writer io.Writer, diff ContextDiff) error { } range2 := formatRangeContext(first.J1, last.J2) - w("--- %s ----%s", range2, diff.Eol) + wf("--- %s ----%s", range2, diff.Eol) for _, c := range g { if c.Tag == 'r' || c.Tag == 'i' { for _, cc := range g { @@ -732,7 +746,7 @@ func WriteContextDiff(writer io.Writer, diff ContextDiff) error { continue } for _, line := range diff.B[cc.J1:cc.J2] { - w(prefix[cc.Tag] + line) + ws(prefix[cc.Tag] + line) } } break diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib_test.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib_test.go index 94670bea..d7251196 100644 --- a/vendor/github.com/pmezard/go-difflib/difflib/difflib_test.go +++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib_test.go @@ -102,11 +102,12 @@ group } } -func ExampleGetUnifiedDiffString() { +func ExampleGetUnifiedDiffCode() { a := `one two three -four` +four +fmt.Printf("%s,%T",a,b)` b := `zero one three @@ -121,16 +122,54 @@ four` Context: 3, } result, _ := GetUnifiedDiffString(diff) - fmt.Printf(strings.Replace(result, "\t", " ", -1)) + fmt.Println(strings.Replace(result, "\t", " ", -1)) // Output: // --- Original 2005-01-26 23:30:50 // +++ Current 2010-04-02 10:20:52 - // @@ -1,4 +1,4 @@ + // @@ -1,5 +1,4 @@ // +zero // one // -two // three // four + // -fmt.Printf("%s,%T",a,b) +} + +func ExampleGetContextDiffCode() { + a := `one +two +three +four +fmt.Printf("%s,%T",a,b)` + b := `zero +one +tree +four` + diff := ContextDiff{ + A: SplitLines(a), + B: SplitLines(b), + FromFile: "Original", + ToFile: "Current", + Context: 3, + Eol: "\n", + } + result, _ := GetContextDiffString(diff) + fmt.Print(strings.Replace(result, "\t", " ", -1)) + // Output: + // *** Original + // --- Current + // *************** + // *** 1,5 **** + // one + // ! two + // ! three + // four + // - fmt.Printf("%s,%T",a,b) + // --- 1,4 ---- + // + zero + // one + // ! tree + // four } func ExampleGetContextDiffString() { @@ -318,6 +357,41 @@ func TestOutputFormatNoTrailingTabOnEmptyFiledate(t *testing.T) { assertEqual(t, SplitLines(cd)[:2], []string{"*** Original\n", "--- Current\n"}) } +func TestOmitFilenames(t *testing.T) { + diff := UnifiedDiff{ + A: SplitLines("o\nn\ne\n"), + B: SplitLines("t\nw\no\n"), + Eol: "\n", + } + ud, err := GetUnifiedDiffString(diff) + assertEqual(t, err, nil) + assertEqual(t, SplitLines(ud), []string{ + "@@ -0,0 +1,2 @@\n", + "+t\n", + "+w\n", + "@@ -2,2 +3,0 @@\n", + "-n\n", + "-e\n", + "\n", + }) + + cd, err := GetContextDiffString(ContextDiff(diff)) + assertEqual(t, err, nil) + assertEqual(t, SplitLines(cd), []string{ + "***************\n", + "*** 0 ****\n", + "--- 1,2 ----\n", + "+ t\n", + "+ w\n", + "***************\n", + "*** 2,3 ****\n", + "- n\n", + "- e\n", + "--- 3 ----\n", + "\n", + }) +} + func TestSplitLines(t *testing.T) { allTests := []struct { input string diff --git a/vendor/github.com/prometheus/client_model/Makefile b/vendor/github.com/prometheus/client_model/Makefile index 9cc23b34..e147c69d 100644 --- a/vendor/github.com/prometheus/client_model/Makefile +++ b/vendor/github.com/prometheus/client_model/Makefile @@ -39,6 +39,7 @@ src/main/java/io/prometheus/client/Metrics.java: metrics.proto python: python/prometheus/client/model/metrics_pb2.py python/prometheus/client/model/metrics_pb2.py: metrics.proto + mkdir -p python/prometheus/client/model protoc $< --python_out=python/prometheus/client/model ruby: diff --git a/vendor/github.com/prometheus/common/README.md b/vendor/github.com/prometheus/common/README.md index 98f6ce24..47985e4a 100644 --- a/vendor/github.com/prometheus/common/README.md +++ b/vendor/github.com/prometheus/common/README.md @@ -6,7 +6,7 @@ components and libraries. * **config**: Common configuration structures * **expfmt**: Decoding and encoding for the exposition format -* **log**: A logging wrapper around [logrus](https://github.com/Sirupsen/logrus) +* **log**: A logging wrapper around [logrus](https://github.com/sirupsen/logrus) * **model**: Shared data structures * **route**: A routing wrapper around [httprouter](https://github.com/julienschmidt/httprouter) using `context.Context` -* **version**: Version informations and metric +* **version**: Version information and metrics diff --git a/vendor/github.com/prometheus/common/config/config.go b/vendor/github.com/prometheus/common/config/config.go index 33eb922c..30719d83 100644 --- a/vendor/github.com/prometheus/common/config/config.go +++ b/vendor/github.com/prometheus/common/config/config.go @@ -11,20 +11,24 @@ // See the License for the specific language governing permissions and // limitations under the License. +// This package no longer handles safe yaml parsing. In order to +// ensure correct yaml unmarshalling, use "yaml.UnmarshalStrict()". + package config -import ( - "fmt" - "strings" -) +// Secret special type for storing secrets. +type Secret string -func checkOverflow(m map[string]interface{}, ctx string) error { - if len(m) > 0 { - var keys []string - for k := range m { - keys = append(keys, k) - } - return fmt.Errorf("unknown fields in %s: %s", ctx, strings.Join(keys, ", ")) +// MarshalYAML implements the yaml.Marshaler interface for Secrets. +func (s Secret) MarshalYAML() (interface{}, error) { + if s != "" { + return "", nil } - return nil + return nil, nil +} + +//UnmarshalYAML implements the yaml.Unmarshaler interface for Secrets. +func (s *Secret) UnmarshalYAML(unmarshal func(interface{}) error) error { + type plain Secret + return unmarshal((*plain)(s)) } diff --git a/vendor/github.com/prometheus/common/config/http_config.go b/vendor/github.com/prometheus/common/config/http_config.go new file mode 100644 index 00000000..da5d5901 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/http_config.go @@ -0,0 +1,317 @@ +// Copyright 2016 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package config + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" + "time" + + "github.com/mwitkow/go-conntrack" + "gopkg.in/yaml.v2" +) + +// BasicAuth contains basic HTTP authentication credentials. +type BasicAuth struct { + Username string `yaml:"username"` + Password Secret `yaml:"password,omitempty"` + PasswordFile string `yaml:"password_file,omitempty"` +} + +// URL is a custom URL type that allows validation at configuration load time. +type URL struct { + *url.URL +} + +// UnmarshalYAML implements the yaml.Unmarshaler interface for URLs. +func (u *URL) UnmarshalYAML(unmarshal func(interface{}) error) error { + var s string + if err := unmarshal(&s); err != nil { + return err + } + + urlp, err := url.Parse(s) + if err != nil { + return err + } + u.URL = urlp + return nil +} + +// MarshalYAML implements the yaml.Marshaler interface for URLs. +func (u URL) MarshalYAML() (interface{}, error) { + if u.URL != nil { + return u.String(), nil + } + return nil, nil +} + +// HTTPClientConfig configures an HTTP client. +type HTTPClientConfig struct { + // The HTTP basic authentication credentials for the targets. + BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"` + // The bearer token for the targets. + BearerToken Secret `yaml:"bearer_token,omitempty"` + // The bearer token file for the targets. + BearerTokenFile string `yaml:"bearer_token_file,omitempty"` + // HTTP proxy server to use to connect to the targets. + ProxyURL URL `yaml:"proxy_url,omitempty"` + // TLSConfig to use to connect to the targets. + TLSConfig TLSConfig `yaml:"tls_config,omitempty"` +} + +// Validate validates the HTTPClientConfig to check only one of BearerToken, +// BasicAuth and BearerTokenFile is configured. +func (c *HTTPClientConfig) Validate() error { + if len(c.BearerToken) > 0 && len(c.BearerTokenFile) > 0 { + return fmt.Errorf("at most one of bearer_token & bearer_token_file must be configured") + } + if c.BasicAuth != nil && (len(c.BearerToken) > 0 || len(c.BearerTokenFile) > 0) { + return fmt.Errorf("at most one of basic_auth, bearer_token & bearer_token_file must be configured") + } + if c.BasicAuth != nil && (string(c.BasicAuth.Password) != "" && c.BasicAuth.PasswordFile != "") { + return fmt.Errorf("at most one of basic_auth password & password_file must be configured") + } + return nil +} + +// UnmarshalYAML implements the yaml.Unmarshaler interface +func (c *HTTPClientConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + type plain HTTPClientConfig + if err := unmarshal((*plain)(c)); err != nil { + return err + } + return c.Validate() +} + +// UnmarshalYAML implements the yaml.Unmarshaler interface. +func (a *BasicAuth) UnmarshalYAML(unmarshal func(interface{}) error) error { + type plain BasicAuth + return unmarshal((*plain)(a)) +} + +// NewClient returns a http.Client using the specified http.RoundTripper. +func newClient(rt http.RoundTripper) *http.Client { + return &http.Client{Transport: rt} +} + +// NewClientFromConfig returns a new HTTP client configured for the +// given config.HTTPClientConfig. The name is used as go-conntrack metric label. +func NewClientFromConfig(cfg HTTPClientConfig, name string) (*http.Client, error) { + rt, err := NewRoundTripperFromConfig(cfg, name) + if err != nil { + return nil, err + } + return newClient(rt), nil +} + +// NewRoundTripperFromConfig returns a new HTTP RoundTripper configured for the +// given config.HTTPClientConfig. The name is used as go-conntrack metric label. +func NewRoundTripperFromConfig(cfg HTTPClientConfig, name string) (http.RoundTripper, error) { + tlsConfig, err := NewTLSConfig(&cfg.TLSConfig) + if err != nil { + return nil, err + } + // The only timeout we care about is the configured scrape timeout. + // It is applied on request. So we leave out any timings here. + var rt http.RoundTripper = &http.Transport{ + Proxy: http.ProxyURL(cfg.ProxyURL.URL), + MaxIdleConns: 20000, + MaxIdleConnsPerHost: 1000, // see https://github.com/golang/go/issues/13801 + DisableKeepAlives: false, + TLSClientConfig: tlsConfig, + DisableCompression: true, + // 5 minutes is typically above the maximum sane scrape interval. So we can + // use keepalive for all configurations. + IdleConnTimeout: 5 * time.Minute, + DialContext: conntrack.NewDialContextFunc( + conntrack.DialWithTracing(), + conntrack.DialWithName(name), + ), + } + + // If a bearer token is provided, create a round tripper that will set the + // Authorization header correctly on each request. + if len(cfg.BearerToken) > 0 { + rt = NewBearerAuthRoundTripper(cfg.BearerToken, rt) + } else if len(cfg.BearerTokenFile) > 0 { + rt = NewBearerAuthFileRoundTripper(cfg.BearerTokenFile, rt) + } + + if cfg.BasicAuth != nil { + rt = NewBasicAuthRoundTripper(cfg.BasicAuth.Username, cfg.BasicAuth.Password, cfg.BasicAuth.PasswordFile, rt) + } + + // Return a new configured RoundTripper. + return rt, nil +} + +type bearerAuthRoundTripper struct { + bearerToken Secret + rt http.RoundTripper +} + +// NewBearerAuthRoundTripper adds the provided bearer token to a request unless the authorization +// header has already been set. +func NewBearerAuthRoundTripper(token Secret, rt http.RoundTripper) http.RoundTripper { + return &bearerAuthRoundTripper{token, rt} +} + +func (rt *bearerAuthRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + if len(req.Header.Get("Authorization")) == 0 { + req = cloneRequest(req) + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(rt.bearerToken))) + } + return rt.rt.RoundTrip(req) +} + +type bearerAuthFileRoundTripper struct { + bearerFile string + rt http.RoundTripper +} + +// NewBearerAuthFileRoundTripper adds the bearer token read from the provided file to a request unless +// the authorization header has already been set. This file is read for every request. +func NewBearerAuthFileRoundTripper(bearerFile string, rt http.RoundTripper) http.RoundTripper { + return &bearerAuthFileRoundTripper{bearerFile, rt} +} + +func (rt *bearerAuthFileRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + if len(req.Header.Get("Authorization")) == 0 { + b, err := ioutil.ReadFile(rt.bearerFile) + if err != nil { + return nil, fmt.Errorf("unable to read bearer token file %s: %s", rt.bearerFile, err) + } + bearerToken := strings.TrimSpace(string(b)) + + req = cloneRequest(req) + req.Header.Set("Authorization", "Bearer "+bearerToken) + } + + return rt.rt.RoundTrip(req) +} + +type basicAuthRoundTripper struct { + username string + password Secret + passwordFile string + rt http.RoundTripper +} + +// NewBasicAuthRoundTripper will apply a BASIC auth authorization header to a request unless it has +// already been set. +func NewBasicAuthRoundTripper(username string, password Secret, passwordFile string, rt http.RoundTripper) http.RoundTripper { + return &basicAuthRoundTripper{username, password, passwordFile, rt} +} + +func (rt *basicAuthRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + if len(req.Header.Get("Authorization")) != 0 { + return rt.rt.RoundTrip(req) + } + req = cloneRequest(req) + if rt.passwordFile != "" { + bs, err := ioutil.ReadFile(rt.passwordFile) + if err != nil { + return nil, fmt.Errorf("unable to read basic auth password file %s: %s", rt.passwordFile, err) + } + req.SetBasicAuth(rt.username, strings.TrimSpace(string(bs))) + } else { + req.SetBasicAuth(rt.username, strings.TrimSpace(string(rt.password))) + } + return rt.rt.RoundTrip(req) +} + +// cloneRequest returns a clone of the provided *http.Request. +// The clone is a shallow copy of the struct and its Header map. +func cloneRequest(r *http.Request) *http.Request { + // Shallow copy of the struct. + r2 := new(http.Request) + *r2 = *r + // Deep copy of the Header. + r2.Header = make(http.Header) + for k, s := range r.Header { + r2.Header[k] = s + } + return r2 +} + +// NewTLSConfig creates a new tls.Config from the given TLSConfig. +func NewTLSConfig(cfg *TLSConfig) (*tls.Config, error) { + tlsConfig := &tls.Config{InsecureSkipVerify: cfg.InsecureSkipVerify} + + // If a CA cert is provided then let's read it in so we can validate the + // scrape target's certificate properly. + if len(cfg.CAFile) > 0 { + caCertPool := x509.NewCertPool() + // Load CA cert. + caCert, err := ioutil.ReadFile(cfg.CAFile) + if err != nil { + return nil, fmt.Errorf("unable to use specified CA cert %s: %s", cfg.CAFile, err) + } + caCertPool.AppendCertsFromPEM(caCert) + tlsConfig.RootCAs = caCertPool + } + + if len(cfg.ServerName) > 0 { + tlsConfig.ServerName = cfg.ServerName + } + // If a client cert & key is provided then configure TLS config accordingly. + if len(cfg.CertFile) > 0 && len(cfg.KeyFile) == 0 { + return nil, fmt.Errorf("client cert file %q specified without client key file", cfg.CertFile) + } else if len(cfg.KeyFile) > 0 && len(cfg.CertFile) == 0 { + return nil, fmt.Errorf("client key file %q specified without client cert file", cfg.KeyFile) + } else if len(cfg.CertFile) > 0 && len(cfg.KeyFile) > 0 { + cert, err := tls.LoadX509KeyPair(cfg.CertFile, cfg.KeyFile) + if err != nil { + return nil, fmt.Errorf("unable to use specified client cert (%s) & key (%s): %s", cfg.CertFile, cfg.KeyFile, err) + } + tlsConfig.Certificates = []tls.Certificate{cert} + } + tlsConfig.BuildNameToCertificate() + + return tlsConfig, nil +} + +// TLSConfig configures the options for TLS connections. +type TLSConfig struct { + // The CA cert to use for the targets. + CAFile string `yaml:"ca_file,omitempty"` + // The client cert file for the targets. + CertFile string `yaml:"cert_file,omitempty"` + // The client key file for the targets. + KeyFile string `yaml:"key_file,omitempty"` + // Used to verify the hostname for the targets. + ServerName string `yaml:"server_name,omitempty"` + // Disable target certificate validation. + InsecureSkipVerify bool `yaml:"insecure_skip_verify"` +} + +// UnmarshalYAML implements the yaml.Unmarshaler interface. +func (c *TLSConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + type plain TLSConfig + return unmarshal((*plain)(c)) +} + +func (c HTTPClientConfig) String() string { + b, err := yaml.Marshal(c) + if err != nil { + return fmt.Sprintf("", err) + } + return string(b) +} diff --git a/vendor/github.com/prometheus/common/config/http_config_test.go b/vendor/github.com/prometheus/common/config/http_config_test.go new file mode 100644 index 00000000..4639ae47 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/http_config_test.go @@ -0,0 +1,618 @@ +// Copyright 2015 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package config + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + "io/ioutil" + "net/http" + "net/http/httptest" + "reflect" + "strings" + "testing" + + "gopkg.in/yaml.v2" +) + +const ( + TLSCAChainPath = "testdata/tls-ca-chain.pem" + ServerCertificatePath = "testdata/server.crt" + ServerKeyPath = "testdata/server.key" + BarneyCertificatePath = "testdata/barney.crt" + BarneyKeyNoPassPath = "testdata/barney-no-pass.key" + MissingCA = "missing/ca.crt" + MissingCert = "missing/cert.crt" + MissingKey = "missing/secret.key" + + ExpectedMessage = "I'm here to serve you!!!" + BearerToken = "theanswertothegreatquestionoflifetheuniverseandeverythingisfortytwo" + BearerTokenFile = "testdata/bearer.token" + MissingBearerTokenFile = "missing/bearer.token" + ExpectedBearer = "Bearer " + BearerToken + ExpectedUsername = "arthurdent" + ExpectedPassword = "42" +) + +var invalidHTTPClientConfigs = []struct { + httpClientConfigFile string + errMsg string +}{ + { + httpClientConfigFile: "testdata/http.conf.bearer-token-and-file-set.bad.yml", + errMsg: "at most one of bearer_token & bearer_token_file must be configured", + }, + { + httpClientConfigFile: "testdata/http.conf.empty.bad.yml", + errMsg: "at most one of basic_auth, bearer_token & bearer_token_file must be configured", + }, + { + httpClientConfigFile: "testdata/http.conf.basic-auth.too-much.bad.yaml", + errMsg: "at most one of basic_auth password & password_file must be configured", + }, +} + +func newTestServer(handler func(w http.ResponseWriter, r *http.Request)) (*httptest.Server, error) { + testServer := httptest.NewUnstartedServer(http.HandlerFunc(handler)) + + tlsCAChain, err := ioutil.ReadFile(TLSCAChainPath) + if err != nil { + return nil, fmt.Errorf("Can't read %s", TLSCAChainPath) + } + serverCertificate, err := tls.LoadX509KeyPair(ServerCertificatePath, ServerKeyPath) + if err != nil { + return nil, fmt.Errorf("Can't load X509 key pair %s - %s", ServerCertificatePath, ServerKeyPath) + } + + rootCAs := x509.NewCertPool() + rootCAs.AppendCertsFromPEM(tlsCAChain) + + testServer.TLS = &tls.Config{ + Certificates: make([]tls.Certificate, 1), + RootCAs: rootCAs, + ClientAuth: tls.RequireAndVerifyClientCert, + ClientCAs: rootCAs} + testServer.TLS.Certificates[0] = serverCertificate + testServer.TLS.BuildNameToCertificate() + + testServer.StartTLS() + + return testServer, nil +} + +func TestNewClientFromConfig(t *testing.T) { + var newClientValidConfig = []struct { + clientConfig HTTPClientConfig + handler func(w http.ResponseWriter, r *http.Request) + }{ + { + clientConfig: HTTPClientConfig{ + TLSConfig: TLSConfig{ + CAFile: "", + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: true}, + }, + handler: func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, ExpectedMessage) + }, + }, { + clientConfig: HTTPClientConfig{ + TLSConfig: TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + }, + handler: func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, ExpectedMessage) + }, + }, { + clientConfig: HTTPClientConfig{ + BearerToken: BearerToken, + TLSConfig: TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + }, + handler: func(w http.ResponseWriter, r *http.Request) { + bearer := r.Header.Get("Authorization") + if bearer != ExpectedBearer { + fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)", + ExpectedBearer, bearer) + } else { + fmt.Fprint(w, ExpectedMessage) + } + }, + }, { + clientConfig: HTTPClientConfig{ + BearerTokenFile: BearerTokenFile, + TLSConfig: TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + }, + handler: func(w http.ResponseWriter, r *http.Request) { + bearer := r.Header.Get("Authorization") + if bearer != ExpectedBearer { + fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)", + ExpectedBearer, bearer) + } else { + fmt.Fprint(w, ExpectedMessage) + } + }, + }, { + clientConfig: HTTPClientConfig{ + BasicAuth: &BasicAuth{ + Username: ExpectedUsername, + Password: ExpectedPassword, + }, + TLSConfig: TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + }, + handler: func(w http.ResponseWriter, r *http.Request) { + username, password, ok := r.BasicAuth() + if !ok { + fmt.Fprintf(w, "The Authorization header wasn't set") + } else if ExpectedUsername != username { + fmt.Fprintf(w, "The expected username (%s) differs from the obtained username (%s).", ExpectedUsername, username) + } else if ExpectedPassword != password { + fmt.Fprintf(w, "The expected password (%s) differs from the obtained password (%s).", ExpectedPassword, password) + } else { + fmt.Fprint(w, ExpectedMessage) + } + }, + }, + } + + for _, validConfig := range newClientValidConfig { + testServer, err := newTestServer(validConfig.handler) + if err != nil { + t.Fatal(err.Error()) + } + defer testServer.Close() + + client, err := NewClientFromConfig(validConfig.clientConfig, "test") + if err != nil { + t.Errorf("Can't create a client from this config: %+v", validConfig.clientConfig) + continue + } + response, err := client.Get(testServer.URL) + if err != nil { + t.Errorf("Can't connect to the test server using this config: %+v", validConfig.clientConfig) + continue + } + + message, err := ioutil.ReadAll(response.Body) + response.Body.Close() + if err != nil { + t.Errorf("Can't read the server response body using this config: %+v", validConfig.clientConfig) + continue + } + + trimMessage := strings.TrimSpace(string(message)) + if ExpectedMessage != trimMessage { + t.Errorf("The expected message (%s) differs from the obtained message (%s) using this config: %+v", + ExpectedMessage, trimMessage, validConfig.clientConfig) + } + } +} + +func TestNewClientFromInvalidConfig(t *testing.T) { + var newClientInvalidConfig = []struct { + clientConfig HTTPClientConfig + errorMsg string + }{ + { + clientConfig: HTTPClientConfig{ + TLSConfig: TLSConfig{ + CAFile: MissingCA, + CertFile: "", + KeyFile: "", + ServerName: "", + InsecureSkipVerify: true}, + }, + errorMsg: fmt.Sprintf("unable to use specified CA cert %s:", MissingCA), + }, + } + + for _, invalidConfig := range newClientInvalidConfig { + client, err := NewClientFromConfig(invalidConfig.clientConfig, "test") + if client != nil { + t.Errorf("A client instance was returned instead of nil using this config: %+v", invalidConfig.clientConfig) + } + if err == nil { + t.Errorf("No error was returned using this config: %+v", invalidConfig.clientConfig) + } + if !strings.Contains(err.Error(), invalidConfig.errorMsg) { + t.Errorf("Expected error %s does not contain %s", err.Error(), invalidConfig.errorMsg) + } + } +} + +func TestMissingBearerAuthFile(t *testing.T) { + cfg := HTTPClientConfig{ + BearerTokenFile: MissingBearerTokenFile, + TLSConfig: TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + } + handler := func(w http.ResponseWriter, r *http.Request) { + bearer := r.Header.Get("Authorization") + if bearer != ExpectedBearer { + fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)", + ExpectedBearer, bearer) + } else { + fmt.Fprint(w, ExpectedMessage) + } + } + + testServer, err := newTestServer(handler) + if err != nil { + t.Fatal(err.Error()) + } + defer testServer.Close() + + client, err := NewClientFromConfig(cfg, "test") + if err != nil { + t.Fatal(err) + } + + _, err = client.Get(testServer.URL) + if err == nil { + t.Fatal("No error is returned here") + } + + if !strings.Contains(err.Error(), "unable to read bearer token file missing/bearer.token: open missing/bearer.token: no such file or directory") { + t.Fatal("wrong error message being returned") + } +} + +func TestBearerAuthRoundTripper(t *testing.T) { + const ( + newBearerToken = "goodbyeandthankyouforthefish" + ) + + fakeRoundTripper := NewRoundTripCheckRequest(func(req *http.Request) { + bearer := req.Header.Get("Authorization") + if bearer != ExpectedBearer { + t.Errorf("The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)", + ExpectedBearer, bearer) + } + }, nil, nil) + + // Normal flow. + bearerAuthRoundTripper := NewBearerAuthRoundTripper(BearerToken, fakeRoundTripper) + request, _ := http.NewRequest("GET", "/hitchhiker", nil) + request.Header.Set("User-Agent", "Douglas Adams mind") + bearerAuthRoundTripper.RoundTrip(request) + + // Should honor already Authorization header set. + bearerAuthRoundTripperShouldNotModifyExistingAuthorization := NewBearerAuthRoundTripper(newBearerToken, fakeRoundTripper) + request, _ = http.NewRequest("GET", "/hitchhiker", nil) + request.Header.Set("Authorization", ExpectedBearer) + bearerAuthRoundTripperShouldNotModifyExistingAuthorization.RoundTrip(request) +} + +func TestBearerAuthFileRoundTripper(t *testing.T) { + const ( + newBearerToken = "goodbyeandthankyouforthefish" + ) + + fakeRoundTripper := NewRoundTripCheckRequest(func(req *http.Request) { + bearer := req.Header.Get("Authorization") + if bearer != ExpectedBearer { + t.Errorf("The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)", + ExpectedBearer, bearer) + } + }, nil, nil) + + // Normal flow. + bearerAuthRoundTripper := NewBearerAuthFileRoundTripper(BearerTokenFile, fakeRoundTripper) + request, _ := http.NewRequest("GET", "/hitchhiker", nil) + request.Header.Set("User-Agent", "Douglas Adams mind") + bearerAuthRoundTripper.RoundTrip(request) + + // Should honor already Authorization header set. + bearerAuthRoundTripperShouldNotModifyExistingAuthorization := NewBearerAuthFileRoundTripper(MissingBearerTokenFile, fakeRoundTripper) + request, _ = http.NewRequest("GET", "/hitchhiker", nil) + request.Header.Set("Authorization", ExpectedBearer) + bearerAuthRoundTripperShouldNotModifyExistingAuthorization.RoundTrip(request) +} + +func TestTLSConfig(t *testing.T) { + configTLSConfig := TLSConfig{ + CAFile: TLSCAChainPath, + CertFile: BarneyCertificatePath, + KeyFile: BarneyKeyNoPassPath, + ServerName: "localhost", + InsecureSkipVerify: false} + + tlsCAChain, err := ioutil.ReadFile(TLSCAChainPath) + if err != nil { + t.Fatalf("Can't read the CA certificate chain (%s)", + TLSCAChainPath) + } + rootCAs := x509.NewCertPool() + rootCAs.AppendCertsFromPEM(tlsCAChain) + + barneyCertificate, err := tls.LoadX509KeyPair(BarneyCertificatePath, BarneyKeyNoPassPath) + if err != nil { + t.Fatalf("Can't load the client key pair ('%s' and '%s'). Reason: %s", + BarneyCertificatePath, BarneyKeyNoPassPath, err) + } + + expectedTLSConfig := &tls.Config{ + RootCAs: rootCAs, + Certificates: []tls.Certificate{barneyCertificate}, + ServerName: configTLSConfig.ServerName, + InsecureSkipVerify: configTLSConfig.InsecureSkipVerify} + expectedTLSConfig.BuildNameToCertificate() + + tlsConfig, err := NewTLSConfig(&configTLSConfig) + if err != nil { + t.Fatalf("Can't create a new TLS Config from a configuration (%s).", err) + } + + if !reflect.DeepEqual(tlsConfig, expectedTLSConfig) { + t.Fatalf("Unexpected TLS Config result: \n\n%+v\n expected\n\n%+v", tlsConfig, expectedTLSConfig) + } +} + +func TestTLSConfigEmpty(t *testing.T) { + configTLSConfig := TLSConfig{ + CAFile: "", + CertFile: "", + KeyFile: "", + ServerName: "", + InsecureSkipVerify: true} + + expectedTLSConfig := &tls.Config{ + InsecureSkipVerify: configTLSConfig.InsecureSkipVerify} + expectedTLSConfig.BuildNameToCertificate() + + tlsConfig, err := NewTLSConfig(&configTLSConfig) + if err != nil { + t.Fatalf("Can't create a new TLS Config from a configuration (%s).", err) + } + + if !reflect.DeepEqual(tlsConfig, expectedTLSConfig) { + t.Fatalf("Unexpected TLS Config result: \n\n%+v\n expected\n\n%+v", tlsConfig, expectedTLSConfig) + } +} + +func TestTLSConfigInvalidCA(t *testing.T) { + var invalidTLSConfig = []struct { + configTLSConfig TLSConfig + errorMessage string + }{ + { + configTLSConfig: TLSConfig{ + CAFile: MissingCA, + CertFile: "", + KeyFile: "", + ServerName: "", + InsecureSkipVerify: false}, + errorMessage: fmt.Sprintf("unable to use specified CA cert %s:", MissingCA), + }, { + configTLSConfig: TLSConfig{ + CAFile: "", + CertFile: MissingCert, + KeyFile: BarneyKeyNoPassPath, + ServerName: "", + InsecureSkipVerify: false}, + errorMessage: fmt.Sprintf("unable to use specified client cert (%s) & key (%s):", MissingCert, BarneyKeyNoPassPath), + }, { + configTLSConfig: TLSConfig{ + CAFile: "", + CertFile: BarneyCertificatePath, + KeyFile: MissingKey, + ServerName: "", + InsecureSkipVerify: false}, + errorMessage: fmt.Sprintf("unable to use specified client cert (%s) & key (%s):", BarneyCertificatePath, MissingKey), + }, + } + + for _, anInvalididTLSConfig := range invalidTLSConfig { + tlsConfig, err := NewTLSConfig(&anInvalididTLSConfig.configTLSConfig) + if tlsConfig != nil && err == nil { + t.Errorf("The TLS Config could be created even with this %+v", anInvalididTLSConfig.configTLSConfig) + continue + } + if !strings.Contains(err.Error(), anInvalididTLSConfig.errorMessage) { + t.Errorf("The expected error should contain %s, but got %s", anInvalididTLSConfig.errorMessage, err) + } + } +} + +func TestBasicAuthNoPassword(t *testing.T) { + cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.basic-auth.no-password.yaml") + if err != nil { + t.Errorf("Error loading HTTP client config: %v", err) + } + client, err := NewClientFromConfig(*cfg, "test") + if err != nil { + t.Errorf("Error creating HTTP Client: %v", err) + } + + rt, ok := client.Transport.(*basicAuthRoundTripper) + if !ok { + t.Fatalf("Error casting to basic auth transport, %v", client.Transport) + } + + if rt.username != "user" { + t.Errorf("Bad HTTP client username: %s", rt.username) + } + if string(rt.password) != "" { + t.Errorf("Expected empty HTTP client password: %s", rt.password) + } + if string(rt.passwordFile) != "" { + t.Errorf("Expected empty HTTP client passwordFile: %s", rt.passwordFile) + } +} + +func TestBasicAuthNoUsername(t *testing.T) { + cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.basic-auth.no-username.yaml") + if err != nil { + t.Errorf("Error loading HTTP client config: %v", err) + } + client, err := NewClientFromConfig(*cfg, "test") + if err != nil { + t.Errorf("Error creating HTTP Client: %v", err) + } + + rt, ok := client.Transport.(*basicAuthRoundTripper) + if !ok { + t.Fatalf("Error casting to basic auth transport, %v", client.Transport) + } + + if rt.username != "" { + t.Errorf("Got unexpected username: %s", rt.username) + } + if string(rt.password) != "secret" { + t.Errorf("Unexpected HTTP client password: %s", string(rt.password)) + } + if string(rt.passwordFile) != "" { + t.Errorf("Expected empty HTTP client passwordFile: %s", rt.passwordFile) + } +} + +func TestBasicAuthPasswordFile(t *testing.T) { + cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.basic-auth.good.yaml") + if err != nil { + t.Errorf("Error loading HTTP client config: %v", err) + } + client, err := NewClientFromConfig(*cfg, "test") + if err != nil { + t.Errorf("Error creating HTTP Client: %v", err) + } + + rt, ok := client.Transport.(*basicAuthRoundTripper) + if !ok { + t.Errorf("Error casting to basic auth transport, %v", client.Transport) + } + + if rt.username != "user" { + t.Errorf("Bad HTTP client username: %s", rt.username) + } + if string(rt.password) != "" { + t.Errorf("Bad HTTP client password: %s", rt.password) + } + if string(rt.passwordFile) != "testdata/basic-auth-password" { + t.Errorf("Bad HTTP client passwordFile: %s", rt.passwordFile) + } +} + +func TestHideHTTPClientConfigSecrets(t *testing.T) { + c, _, err := LoadHTTPConfigFile("testdata/http.conf.good.yml") + if err != nil { + t.Errorf("Error parsing %s: %s", "testdata/http.conf.good.yml", err) + } + + // String method must not reveal authentication credentials. + s := c.String() + if strings.Contains(s, "mysecret") { + t.Fatal("http client config's String method reveals authentication credentials.") + } +} + +func TestValidateHTTPConfig(t *testing.T) { + cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.good.yml") + if err != nil { + t.Errorf("Error loading HTTP client config: %v", err) + } + err = cfg.Validate() + if err != nil { + t.Fatalf("Error validating %s: %s", "testdata/http.conf.good.yml", err) + } +} + +func TestInvalidHTTPConfigs(t *testing.T) { + for _, ee := range invalidHTTPClientConfigs { + _, _, err := LoadHTTPConfigFile(ee.httpClientConfigFile) + if err == nil { + t.Error("Expected error with config but got none") + continue + } + if !strings.Contains(err.Error(), ee.errMsg) { + t.Errorf("Expected error for invalid HTTP client configuration to contain %q but got: %s", ee.errMsg, err) + } + } +} + +// LoadHTTPConfig parses the YAML input s into a HTTPClientConfig. +func LoadHTTPConfig(s string) (*HTTPClientConfig, error) { + cfg := &HTTPClientConfig{} + err := yaml.UnmarshalStrict([]byte(s), cfg) + if err != nil { + return nil, err + } + return cfg, nil +} + +// LoadHTTPConfigFile parses the given YAML file into a HTTPClientConfig. +func LoadHTTPConfigFile(filename string) (*HTTPClientConfig, []byte, error) { + content, err := ioutil.ReadFile(filename) + if err != nil { + return nil, nil, err + } + cfg, err := LoadHTTPConfig(string(content)) + if err != nil { + return nil, nil, err + } + return cfg, content, nil +} + +type roundTrip struct { + theResponse *http.Response + theError error +} + +func (rt *roundTrip) RoundTrip(r *http.Request) (*http.Response, error) { + return rt.theResponse, rt.theError +} + +type roundTripCheckRequest struct { + checkRequest func(*http.Request) + roundTrip +} + +func (rt *roundTripCheckRequest) RoundTrip(r *http.Request) (*http.Response, error) { + rt.checkRequest(r) + return rt.theResponse, rt.theError +} + +// NewRoundTripCheckRequest creates a new instance of a type that implements http.RoundTripper, +// which before returning theResponse and theError, executes checkRequest against a http.Request. +func NewRoundTripCheckRequest(checkRequest func(*http.Request), theResponse *http.Response, theError error) http.RoundTripper { + return &roundTripCheckRequest{ + checkRequest: checkRequest, + roundTrip: roundTrip{ + theResponse: theResponse, + theError: theError}} +} diff --git a/vendor/github.com/prometheus/common/config/testdata/barney-no-pass.key b/vendor/github.com/prometheus/common/config/testdata/barney-no-pass.key new file mode 100644 index 00000000..b8e44f55 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/barney-no-pass.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAxmYjfBZhZbAup9uSULehoqPCv/U+77ETxUNyS2nviWEHDAb/ +pFS8Btx4oCQ1ECVSyxcUmXSlrvDjMY4sisOHvndNRlGi274M5a8Q5yD1BUqvxq3u +XB/+SYNVShBzaswrSjpzMe89AlOPxPjnE14OXh00j2hHunOG4jhlWgJnY0YyvUQQ +YWO6KrmKMiZ4MgmY0SWh/ZhlkDJPtkp3aUVM2sheCru/70E9viLGfdlhc2pIMshy +wNp4/5IkHBZwbqXFFGX4sRtSXI/auZNvcHOBse+3e3BonWvBWS2lIYbzpX3vLB7B +E9BGIxWn1fgNQr14yFPaccSszBvgtmEUONolnwIDAQABAoIBAQC7nBhQHgXKGBl2 +Z97rb0pstrjRtsLl/Cg68LWi9LEr0tHMIM4bgnkvb8qtfK+k7fZl0BSNrE2EqYvd +75jVO2MgzEYJieccLpKZm7u7JGIut9qSYSU2fpaCw6uiVv4dbqY9EhqejKG/km8w +j0JMATRK8Qkj1zOE7/wL7dKBlCZaK3u+OT17spuA/21PG/cLiPaSGSA3CU/eqbkU +BD6JeBxp33XNTytwWoOvarsigpL0dGqQ7+qhGq6t69qFfWoe9rimV7Ya+tB9zF/U +HzOIEspOYvzxe+C7VJjlVFr4haMYmsrO9qRUJ2ofp49OLVdfEANsdVISSvS63BEp +gBZN8Ko5AoGBAO1z8y8YCsI+2vBG6nxZ1eMba0KHi3bS8db1TaenJBV22w6WQATh +hEaU6VLMFcMvrOUjXN/7HJfnEMyvFT6gb9obPDVEMZw88s9lVN6njgGLZR/jodyN +7N7utLopN043Ra0WfEILAXPSz8esT1yn05OZV6AFHxJEWMrX3/4+spCLAoGBANXl +RomieVY4u3FF/uzhbzKNNb9ETxrQuexfbangKp5eLniwnr2SQWIbyPzeurwp15J8 +HvxB2vpNvs1khSwNx9dQfMdiUVPGLWj7MimAHTHsnQ9LVV9W28ghuSWbjQDGTUt1 +WCCu1MkKIOzupbi+zgsNlI33yilRQKAb9SRxdy29AoGBAOKpvyZiPcrkMxwPpb/k +BU7QGpgcSR25CQ+Xg3QZEVHH7h1DgYLnPtwdQ4g8tj1mohTsp7hKvSWndRrdulrY +zUyWmOeD3BN2/pTI9rW/nceNp49EPHsLo2O+2xelRlzMWB98ikqEtPM59gt1SSB6 +N3X6d3GR0fIe+d9PKEtK0Cs3AoGAZ9r8ReXSvm+ra5ON9Nx8znHMEAON2TpRnBi1 +uY7zgpO+QrGXUfqKrqVJEKbgym4SkribnuYm+fP32eid1McYKk6VV4ZAcMm/0MJv +F8Fx64S0ufFdEX6uFl1xdXYyn5apfyMJ2EyrWrYFSKWTZ8GVb753S/tteGRQWa1Z +eQly0Y0CgYEAnI6G9KFvXI+MLu5y2LPYAwsesDFzaWwyDl96ioQTA9hNSrjR33Vw +xwpiEe0T/WKF8NQ0QWnrQDbTvuCvZUK37TVxscYWuItL6vnBrYqr4Ck0j1BcGwV5 +jT581A/Vw8JJiR/vfcxgmrFYqoUmkMKDmCN1oImfz09GtQ4jQ1rlxz8= +-----END RSA PRIVATE KEY----- diff --git a/vendor/github.com/prometheus/common/config/testdata/barney.crt b/vendor/github.com/prometheus/common/config/testdata/barney.crt new file mode 100644 index 00000000..e2f95048 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/barney.crt @@ -0,0 +1,96 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 2 (0x2) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA + Validity + Not Before: Jul 13 04:02:47 2017 GMT + Not After : Jul 13 04:02:47 2019 GMT + Subject: C=NO, O=Telenor AS, OU=Support, CN=Barney Rubble + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:c6:66:23:7c:16:61:65:b0:2e:a7:db:92:50:b7: + a1:a2:a3:c2:bf:f5:3e:ef:b1:13:c5:43:72:4b:69: + ef:89:61:07:0c:06:ff:a4:54:bc:06:dc:78:a0:24: + 35:10:25:52:cb:17:14:99:74:a5:ae:f0:e3:31:8e: + 2c:8a:c3:87:be:77:4d:46:51:a2:db:be:0c:e5:af: + 10:e7:20:f5:05:4a:af:c6:ad:ee:5c:1f:fe:49:83: + 55:4a:10:73:6a:cc:2b:4a:3a:73:31:ef:3d:02:53: + 8f:c4:f8:e7:13:5e:0e:5e:1d:34:8f:68:47:ba:73: + 86:e2:38:65:5a:02:67:63:46:32:bd:44:10:61:63: + ba:2a:b9:8a:32:26:78:32:09:98:d1:25:a1:fd:98: + 65:90:32:4f:b6:4a:77:69:45:4c:da:c8:5e:0a:bb: + bf:ef:41:3d:be:22:c6:7d:d9:61:73:6a:48:32:c8: + 72:c0:da:78:ff:92:24:1c:16:70:6e:a5:c5:14:65: + f8:b1:1b:52:5c:8f:da:b9:93:6f:70:73:81:b1:ef: + b7:7b:70:68:9d:6b:c1:59:2d:a5:21:86:f3:a5:7d: + ef:2c:1e:c1:13:d0:46:23:15:a7:d5:f8:0d:42:bd: + 78:c8:53:da:71:c4:ac:cc:1b:e0:b6:61:14:38:da: + 25:9f + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Key Usage: critical + Digital Signature + X509v3 Basic Constraints: + CA:FALSE + X509v3 Extended Key Usage: + TLS Web Client Authentication + X509v3 Subject Key Identifier: + F4:17:02:DD:1B:01:AB:C5:BC:17:A4:5C:4B:75:8E:EC:B1:E0:C8:F1 + X509v3 Authority Key Identifier: + keyid:AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71 + + Authority Information Access: + CA Issuers - URI:http://green.no/ca/tls-ca.cer + + X509v3 CRL Distribution Points: + + Full Name: + URI:http://green.no/ca/tls-ca.crl + + X509v3 Subject Alternative Name: + email:barney@telenor.no + Signature Algorithm: sha1WithRSAEncryption + 96:9a:c5:41:8a:2f:4a:c4:80:d9:2b:1a:cf:07:85:e9:b6:18: + 01:20:41:b9:c3:d4:ca:d3:2d:66:c3:1d:52:7f:25:d7:92:0c: + e9:a9:ae:e6:2e:fa:9d:0a:cf:84:b9:03:f2:63:e3:d3:c9:70: + 6a:ac:04:5e:a9:2d:a2:43:7a:34:60:f7:a9:32:e1:48:ec:c6: + 03:ac:b3:06:2e:48:6e:d0:35:11:31:3d:0c:04:66:41:e6:b2: + ec:8c:68:f8:e4:bc:47:85:39:60:69:a9:8a:ee:2f:56:88:8a: + 19:45:d0:84:8e:c2:27:2c:82:9c:07:6c:34:ae:41:61:63:f9: + 32:cb:8b:33:ea:2c:15:5f:f9:35:b0:3c:51:4d:5f:30:de:0b: + 88:28:94:79:f3:bd:69:37:ad:12:20:e1:6b:1d:b6:77:d9:83: + db:81:a4:53:6c:0f:6a:17:5e:2b:c1:94:c6:42:e3:73:cd:9e: + 79:1b:8c:89:cd:da:ce:b0:f4:21:c5:32:25:04:6e:68:9f:a7: + ca:f4:c5:86:e5:4e:d9:fd:69:73:e6:15:50:6e:76:0f:73:5e: + 7a:a3:f4:dc:15:4a:ab:bb:3c:9a:fa:9f:01:7a:5c:47:a9:a3: + 68:1c:49:e0:37:37:77:af:87:07:16:e4:e1:d7:98:39:15:a6: + 51:5d:4c:db +-----BEGIN CERTIFICATE----- +MIIEITCCAwmgAwIBAgIBAjANBgkqhkiG9w0BAQUFADBdMQswCQYDVQQGEwJOTzER +MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1 +dGhvcml0eTEVMBMGA1UEAwwMR3JlZW4gVExTIENBMB4XDTE3MDcxMzA0MDI0N1oX +DTE5MDcxMzA0MDI0N1owTDELMAkGA1UEBhMCTk8xEzARBgNVBAoMClRlbGVub3Ig +QVMxEDAOBgNVBAsMB1N1cHBvcnQxFjAUBgNVBAMMDUJhcm5leSBSdWJibGUwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGZiN8FmFlsC6n25JQt6Gio8K/ +9T7vsRPFQ3JLae+JYQcMBv+kVLwG3HigJDUQJVLLFxSZdKWu8OMxjiyKw4e+d01G +UaLbvgzlrxDnIPUFSq/Gre5cH/5Jg1VKEHNqzCtKOnMx7z0CU4/E+OcTXg5eHTSP +aEe6c4biOGVaAmdjRjK9RBBhY7oquYoyJngyCZjRJaH9mGWQMk+2SndpRUzayF4K +u7/vQT2+IsZ92WFzakgyyHLA2nj/kiQcFnBupcUUZfixG1Jcj9q5k29wc4Gx77d7 +cGida8FZLaUhhvOlfe8sHsET0EYjFafV+A1CvXjIU9pxxKzMG+C2YRQ42iWfAgMB +AAGjgfwwgfkwDgYDVR0PAQH/BAQDAgeAMAkGA1UdEwQCMAAwEwYDVR0lBAwwCgYI +KwYBBQUHAwIwHQYDVR0OBBYEFPQXAt0bAavFvBekXEt1juyx4MjxMB8GA1UdIwQY +MBaAFK5CiHXdBaaOSH9Qafm3NCNJuLRxMDkGCCsGAQUFBwEBBC0wKzApBggrBgEF +BQcwAoYdaHR0cDovL2dyZWVuLm5vL2NhL3Rscy1jYS5jZXIwLgYDVR0fBCcwJTAj +oCGgH4YdaHR0cDovL2dyZWVuLm5vL2NhL3Rscy1jYS5jcmwwHAYDVR0RBBUwE4ER +YmFybmV5QHRlbGVub3Iubm8wDQYJKoZIhvcNAQEFBQADggEBAJaaxUGKL0rEgNkr +Gs8Hhem2GAEgQbnD1MrTLWbDHVJ/JdeSDOmpruYu+p0Kz4S5A/Jj49PJcGqsBF6p +LaJDejRg96ky4UjsxgOsswYuSG7QNRExPQwEZkHmsuyMaPjkvEeFOWBpqYruL1aI +ihlF0ISOwicsgpwHbDSuQWFj+TLLizPqLBVf+TWwPFFNXzDeC4golHnzvWk3rRIg +4WsdtnfZg9uBpFNsD2oXXivBlMZC43PNnnkbjInN2s6w9CHFMiUEbmifp8r0xYbl +Ttn9aXPmFVBudg9zXnqj9NwVSqu7PJr6nwF6XEepo2gcSeA3N3evhwcW5OHXmDkV +plFdTNs= +-----END CERTIFICATE----- diff --git a/vendor/github.com/prometheus/common/config/testdata/basic-auth-password b/vendor/github.com/prometheus/common/config/testdata/basic-auth-password new file mode 100644 index 00000000..323fae03 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/basic-auth-password @@ -0,0 +1 @@ +foobar diff --git a/vendor/github.com/prometheus/common/config/testdata/bearer.token b/vendor/github.com/prometheus/common/config/testdata/bearer.token new file mode 100644 index 00000000..3367abff --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/bearer.token @@ -0,0 +1 @@ +theanswertothegreatquestionoflifetheuniverseandeverythingisfortytwo diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.good.yaml b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.good.yaml new file mode 100644 index 00000000..5e7aa41f --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.good.yaml @@ -0,0 +1,3 @@ +basic_auth: + username: user + password_file: testdata/basic-auth-password diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-password.yaml b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-password.yaml new file mode 100644 index 00000000..38faad35 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-password.yaml @@ -0,0 +1,2 @@ +basic_auth: + username: user diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-username.yaml b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-username.yaml new file mode 100644 index 00000000..86e29037 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-username.yaml @@ -0,0 +1,2 @@ +basic_auth: + password: secret diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.too-much.bad.yaml b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.too-much.bad.yaml new file mode 100644 index 00000000..7cfb9220 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.too-much.bad.yaml @@ -0,0 +1,4 @@ +basic_auth: + username: user + password: foo + password_file: testdata/basic-auth-password diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.bearer-token-and-file-set.bad.yml b/vendor/github.com/prometheus/common/config/testdata/http.conf.bearer-token-and-file-set.bad.yml new file mode 100644 index 00000000..c613bacb --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.bearer-token-and-file-set.bad.yml @@ -0,0 +1,5 @@ +basic_auth: + username: username + password: "mysecret" +bearer_token: mysecret +bearer_token_file: file diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.empty.bad.yml b/vendor/github.com/prometheus/common/config/testdata/http.conf.empty.bad.yml new file mode 100644 index 00000000..ea2811f7 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.empty.bad.yml @@ -0,0 +1,4 @@ +basic_auth: + username: username + password: mysecret +bearer_token_file: file diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.good.yml b/vendor/github.com/prometheus/common/config/testdata/http.conf.good.yml new file mode 100644 index 00000000..46ca6390 --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.good.yml @@ -0,0 +1,4 @@ +basic_auth: + username: username + password: "mysecret" +proxy_url: "http://remote.host" diff --git a/vendor/github.com/prometheus/common/config/testdata/http.conf.invalid-bearer-token-file.bad.yml b/vendor/github.com/prometheus/common/config/testdata/http.conf.invalid-bearer-token-file.bad.yml new file mode 100644 index 00000000..4b1349bf --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/http.conf.invalid-bearer-token-file.bad.yml @@ -0,0 +1 @@ +bearer_token_file: file diff --git a/vendor/github.com/prometheus/common/config/testdata/server.crt b/vendor/github.com/prometheus/common/config/testdata/server.crt new file mode 100644 index 00000000..87ad202f --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/server.crt @@ -0,0 +1,96 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 4 (0x4) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA + Validity + Not Before: Jul 26 12:47:08 2017 GMT + Not After : Jul 26 12:47:08 2019 GMT + Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:97:43:c5:f6:24:b8:ce:30:12:70:ea:17:9c:c0: + ce:f2:ef:58:8b:12:7d:46:5e:01:f1:1a:93:b2:3e: + d8:cf:99:bc:10:32:f1:12:b0:ef:00:6c:d6:c4:45: + 85:a8:33:7b:cd:ec:8f:4a:92:d0:5a:4a:41:69:7f: + e3:dd:7e:71:d2:21:9c:df:43:b5:6c:60:bb:2a:12: + a8:08:cf:c5:ee:08:7d:48:ea:4b:54:e4:82:d9:88: + b0:b8:5e:02:12:cb:0e:09:99:b7:5f:42:b6:d7:26: + 34:0f:4a:e7:fc:ac:9c:59:cd:a1:50:4c:88:5f:f1: + d2:7e:5b:21:41:f0:37:50:80:48:71:50:26:61:26: + 79:64:4b:7e:91:8d:0e:f4:27:fe:19:80:bf:39:55: + b7:f3:d0:cd:61:6c:d8:c1:c7:d3:26:77:92:1a:14: + 42:56:cb:bc:fd:1a:4a:eb:17:d8:8d:af:d1:c0:46: + 9f:f0:40:5e:0e:34:2f:e7:db:be:66:fd:89:0b:6b: + 8c:71:c1:0b:0a:c5:c4:c4:eb:7f:44:c1:75:36:23: + fd:ed:b6:ee:87:d9:88:47:e1:4b:7c:60:53:e7:85: + 1c:2f:82:4b:2b:5e:63:1a:49:17:36:2c:fc:39:23: + 49:22:4d:43:b5:51:22:12:24:9e:31:44:d8:16:4e: + a8:eb + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Key Usage: critical + Digital Signature, Key Encipherment + X509v3 Basic Constraints: + CA:FALSE + X509v3 Extended Key Usage: + TLS Web Server Authentication, TLS Web Client Authentication + X509v3 Subject Key Identifier: + 70:A9:FB:44:66:3C:63:96:E6:05:B2:74:47:C8:18:7E:43:6D:EE:8B + X509v3 Authority Key Identifier: + keyid:AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71 + + Authority Information Access: + CA Issuers - URI:http://green.no/ca/tls-ca.cer + + X509v3 CRL Distribution Points: + + Full Name: + URI:http://green.no/ca/tls-ca.crl + + X509v3 Subject Alternative Name: + IP Address:127.0.0.1, IP Address:127.0.0.0, DNS:localhost + Signature Algorithm: sha1WithRSAEncryption + 56:1e:b8:52:ba:f5:72:42:ad:15:71:c1:5e:00:63:c9:4d:56: + f2:8d:a3:a9:91:db:d0:b5:1b:88:80:93:80:28:48:b2:d0:a9: + d0:ea:de:40:78:cc:57:8c:00:b8:65:99:68:95:98:9b:fb:a2: + 43:21:ea:00:37:01:77:c7:3b:1a:ec:58:2d:25:9c:ad:23:41: + 5e:ae:fd:ac:2f:26:81:b8:a7:49:9b:5a:10:fe:ad:c3:86:ab: + 59:67:b0:c7:81:72:95:60:b5:cb:fc:9f:ad:27:16:50:85:76: + 33:16:20:2c:1f:c6:14:09:0c:48:9f:c0:19:16:c9:fa:b0:d8: + bf:b7:8d:a7:aa:eb:fe:f8:6f:dd:2b:83:ee:c7:8a:df:c8:59: + e6:2e:13:1f:57:cc:6f:31:db:f7:b7:5c:3f:78:ad:22:2c:48: + bb:6d:c4:ab:dc:c1:76:34:29:d9:1e:67:e0:ac:37:2b:90:f9: + 71:bd:cf:a1:01:b9:eb:0b:0b:79:2e:8b:52:3d:8e:13:97:c8: + 05:a3:ef:68:82:49:12:2a:25:1a:48:49:b8:7c:3c:66:0d:74: + f9:00:8c:5b:57:d7:76:b1:26:95:86:b2:2e:a3:b2:9c:e0:eb: + 2d:fc:77:03:8f:cd:56:46:3a:c9:6a:fa:72:e3:19:d8:ef:de: + 4b:36:95:79 +-----BEGIN CERTIFICATE----- +MIIEQjCCAyqgAwIBAgIBBDANBgkqhkiG9w0BAQUFADBdMQswCQYDVQQGEwJOTzER +MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1 +dGhvcml0eTEVMBMGA1UEAwwMR3JlZW4gVExTIENBMB4XDTE3MDcyNjEyNDcwOFoX +DTE5MDcyNjEyNDcwOFowXTELMAkGA1UEBhMCTk8xETAPBgNVBAoMCEdyZWVuIEFT +MSQwIgYDVQQLDBtHcmVlbiBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFTATBgNVBAMM +DEdyZWVuIFRMUyBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJdD +xfYkuM4wEnDqF5zAzvLvWIsSfUZeAfEak7I+2M+ZvBAy8RKw7wBs1sRFhagze83s +j0qS0FpKQWl/491+cdIhnN9DtWxguyoSqAjPxe4IfUjqS1TkgtmIsLheAhLLDgmZ +t19CttcmNA9K5/ysnFnNoVBMiF/x0n5bIUHwN1CASHFQJmEmeWRLfpGNDvQn/hmA +vzlVt/PQzWFs2MHH0yZ3khoUQlbLvP0aSusX2I2v0cBGn/BAXg40L+fbvmb9iQtr +jHHBCwrFxMTrf0TBdTYj/e227ofZiEfhS3xgU+eFHC+CSyteYxpJFzYs/DkjSSJN +Q7VRIhIknjFE2BZOqOsCAwEAAaOCAQswggEHMA4GA1UdDwEB/wQEAwIFoDAJBgNV +HRMEAjAAMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQU +cKn7RGY8Y5bmBbJ0R8gYfkNt7oswHwYDVR0jBBgwFoAUrkKIdd0Fpo5If1Bp+bc0 +I0m4tHEwOQYIKwYBBQUHAQEELTArMCkGCCsGAQUFBzAChh1odHRwOi8vZ3JlZW4u +bm8vY2EvdGxzLWNhLmNlcjAuBgNVHR8EJzAlMCOgIaAfhh1odHRwOi8vZ3JlZW4u +bm8vY2EvdGxzLWNhLmNybDAgBgNVHREEGTAXhwR/AAABhwR/AAAAgglsb2NhbGhv +c3QwDQYJKoZIhvcNAQEFBQADggEBAFYeuFK69XJCrRVxwV4AY8lNVvKNo6mR29C1 +G4iAk4AoSLLQqdDq3kB4zFeMALhlmWiVmJv7okMh6gA3AXfHOxrsWC0lnK0jQV6u +/awvJoG4p0mbWhD+rcOGq1lnsMeBcpVgtcv8n60nFlCFdjMWICwfxhQJDEifwBkW +yfqw2L+3jaeq6/74b90rg+7Hit/IWeYuEx9XzG8x2/e3XD94rSIsSLttxKvcwXY0 +KdkeZ+CsNyuQ+XG9z6EBuesLC3kui1I9jhOXyAWj72iCSRIqJRpISbh8PGYNdPkA +jFtX13axJpWGsi6jspzg6y38dwOPzVZGOslq+nLjGdjv3ks2lXk= +-----END CERTIFICATE----- diff --git a/vendor/github.com/prometheus/common/config/testdata/server.key b/vendor/github.com/prometheus/common/config/testdata/server.key new file mode 100644 index 00000000..126c1b5d --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/server.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCXQ8X2JLjOMBJw +6hecwM7y71iLEn1GXgHxGpOyPtjPmbwQMvESsO8AbNbERYWoM3vN7I9KktBaSkFp +f+PdfnHSIZzfQ7VsYLsqEqgIz8XuCH1I6ktU5ILZiLC4XgISyw4JmbdfQrbXJjQP +Suf8rJxZzaFQTIhf8dJ+WyFB8DdQgEhxUCZhJnlkS36RjQ70J/4ZgL85Vbfz0M1h +bNjBx9Mmd5IaFEJWy7z9GkrrF9iNr9HARp/wQF4ONC/n275m/YkLa4xxwQsKxcTE +639EwXU2I/3ttu6H2YhH4Ut8YFPnhRwvgksrXmMaSRc2LPw5I0kiTUO1USISJJ4x +RNgWTqjrAgMBAAECggEAVurwo4FyV7gzwIIi00XPJLT3ceJL7dUy1HHrEG8gchnq +gHxlHdJhYyMnPVydcosyxp75r2YxJtCoSZDdRHbVvGLoGzpy0zW6FnDl8TpCh4aF +RxKp+rvbnFf5A9ew5U+cX1PelHRnT7V6EJeAOiaNKOUJnnR7oHX59/UxZQw9HJnX +3H4xUdRDmSS3BGKXEswbd7beQjqJtEIkbConfaw32yEod0w2MC0LI4miZ87/6Hsk +pyvfpeYxXp4z3BTvFBbf/GEBFuozu63VWHayB9PDmEN/TlphoQpJQihdR2r1lz/H +I5QwVlFTDvUSFitNLu+FoaHOfgLprQndbojBXb+tcQKBgQDHCPyM4V7k97RvJgmB +ELgZiDYufDrjRLXvFzrrZ7ySU3N+nx3Gz/EhtgbHicDjnRVagHBIwi/QAfBJksCd +xcioY5k2OW+8PSTsfFZTAA6XwJp/LGfJik/JjvAVv5CnxBu9lYG4WiSBJFp59ojC +zTmfEuB4GPwrjQvzjlqaSpij9QKBgQDCjriwAB2UJIdlgK+DkryLqgim5I4cteB3 ++juVKz+S8ufFmVvmIXkyDcpyy/26VLC6esy8dV0JoWc4EeitoJvQD1JVZ5+CBTY+ +r9umx18oe2A/ZgcEf/A3Zd94jM1MwriF6YC+eIOhwhpi7T1xTLf3hc9B0OJ5B1mA +vob9rGDtXwKBgD4rkW+UCictNIAvenKFPWxEPuBgT6ij0sx/DhlwCtgOFxprK0rp +syFbkVyMq+KtM3lUez5O4c5wfJUOsPnXSOlISxhD8qHy23C/GdvNPcGrGNc2kKjE +ek20R0wTzWSJ/jxG0gE6rwJjz5sfJfLrVd9ZbyI0c7hK03vdcHGXcXxtAoGAeGHl +BwnbQ3niyTx53VijD2wTVGjhQgSLstEDowYSnTNtk8eTpG6b1gvQc32jLnMOsyQe +oJGiEr5q5re2GBDjuDZyxGOMv9/Hs7wOlkCQsbS9Vh0kRHWBRlXjk2zT7yYhFMLp +pXFeSW2X9BRFS2CkCCUkm93K9AZHLDE3x6ishNMCgYEAsDsUCzGhI49Aqe+CMP2l +WPZl7SEMYS5AtdC5sLtbLYBl8+rMXVGL2opKXqVFYBYkqMJiHGdX3Ub6XSVKLYkN +vm4PWmlQS24ZT+jlUl4jk6JU6SAlM/o6ixZl5KNR7yQm6zN2O/RHDeYm0urUQ9tF +9dux7LbIFeOoJmoDTWG2+fI= +-----END PRIVATE KEY----- diff --git a/vendor/github.com/prometheus/common/config/testdata/tls-ca-chain.pem b/vendor/github.com/prometheus/common/config/testdata/tls-ca-chain.pem new file mode 100644 index 00000000..03e4189e --- /dev/null +++ b/vendor/github.com/prometheus/common/config/testdata/tls-ca-chain.pem @@ -0,0 +1,172 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 2 (0x2) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA + Validity + Not Before: Jul 13 03:47:20 2017 GMT + Not After : Jul 13 03:47:20 2027 GMT + Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:5a:b3:7a:7f:6a:5b:e9:ee:62:ee:4f:61:42: + 79:93:06:bf:81:fc:9a:1f:b5:80:83:7c:b3:a6:94: + 54:58:8a:b1:74:cb:c3:b8:3c:23:a8:69:1f:ca:2b: + af:be:97:ba:31:73:b5:b8:ce:d9:bf:bf:9a:7a:cf: + 3a:64:51:83:c9:36:d2:f7:3b:3a:0e:4c:c7:66:2e: + bf:1a:df:ce:10:aa:3d:0f:19:74:03:7e:b5:10:bb: + e8:37:bd:62:f0:42:2d:df:3d:ca:70:50:10:17:ce: + a9:ec:55:8e:87:6f:ce:9a:04:36:14:96:cb:d1:a5: + 48:d5:d2:87:02:62:93:4e:21:4a:ff:be:44:f1:d2: + 7e:ed:74:da:c2:51:26:8e:03:a0:c2:bd:bd:5f:b0: + 50:11:78:fd:ab:1d:04:86:6c:c1:8d:20:bd:05:5f: + 51:67:c6:d3:07:95:92:2d:92:90:00:c6:9f:2d:dd: + 36:5c:dc:78:10:7c:f6:68:39:1d:2c:e0:e1:26:64: + 4f:36:34:66:a7:84:6a:90:15:3a:94:b7:79:b1:47: + f5:d2:51:95:54:bf:92:76:9a:b9:88:ee:63:f9:6c: + 0d:38:c6:b6:1c:06:43:ed:24:1d:bb:6c:72:48:cc: + 8c:f4:35:bc:43:fe:a6:96:4c:31:5f:82:0d:0d:20: + 2a:3d + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Key Usage: critical + Certificate Sign, CRL Sign + X509v3 Basic Constraints: critical + CA:TRUE, pathlen:0 + X509v3 Subject Key Identifier: + AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71 + X509v3 Authority Key Identifier: + keyid:60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5 + + Authority Information Access: + CA Issuers - URI:http://green.no/ca/root-ca.cer + + X509v3 CRL Distribution Points: + + Full Name: + URI:http://green.no/ca/root-ca.crl + + Signature Algorithm: sha1WithRSAEncryption + 15:a7:ac:d7:25:9e:2a:d4:d1:14:b4:99:38:3d:2f:73:61:2a: + d9:b6:8b:13:ea:fe:db:78:d9:0a:6c:df:26:6e:c1:d5:4a:97: + 42:19:dd:97:05:03:e4:2b:fc:1e:1f:38:3c:4e:b0:3b:8c:38: + ad:2b:65:fa:35:2d:81:8e:e0:f6:0a:89:4c:38:97:01:4b:9c: + ac:4e:e1:55:17:ef:0a:ad:a7:eb:1e:4b:86:23:12:f1:52:69: + cb:a3:8a:ce:fb:14:8b:86:d7:bb:81:5e:bd:2a:c7:a7:79:58: + 00:10:c0:db:ff:d4:a5:b9:19:74:b3:23:19:4a:1f:78:4b:a8: + b6:f6:20:26:c1:69:f9:89:7f:b8:1c:3b:a2:f9:37:31:80:2c: + b0:b6:2b:d2:84:44:d7:42:e4:e6:44:51:04:35:d9:1c:a4:48: + c6:b7:35:de:f2:ae:da:4b:ba:c8:09:42:8d:ed:7a:81:dc:ed: + 9d:f0:de:6e:21:b9:01:1c:ad:64:3d:25:4c:91:94:f1:13:18: + bb:89:e9:48:ac:05:73:07:c8:db:bd:69:8e:6f:02:9d:b0:18: + c0:b9:e1:a8:b1:17:50:3d:ac:05:6e:6f:63:4f:b1:73:33:60: + 9a:77:d2:81:8a:01:38:43:e9:4c:3c:90:63:a4:99:4b:d2:1b: + f9:1b:ec:ee +-----BEGIN CERTIFICATE----- +MIIECzCCAvOgAwIBAgIBAjANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJOTzER +MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1 +dGhvcml0eTEWMBQGA1UEAwwNR3JlZW4gUm9vdCBDQTAeFw0xNzA3MTMwMzQ3MjBa +Fw0yNzA3MTMwMzQ3MjBaMF0xCzAJBgNVBAYTAk5PMREwDwYDVQQKDAhHcmVlbiBB +UzEkMCIGA1UECwwbR3JlZW4gQ2VydGlmaWNhdGUgQXV0aG9yaXR5MRUwEwYDVQQD +DAxHcmVlbiBUTFMgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1 +WrN6f2pb6e5i7k9hQnmTBr+B/JoftYCDfLOmlFRYirF0y8O4PCOoaR/KK6++l7ox +c7W4ztm/v5p6zzpkUYPJNtL3OzoOTMdmLr8a384Qqj0PGXQDfrUQu+g3vWLwQi3f +PcpwUBAXzqnsVY6Hb86aBDYUlsvRpUjV0ocCYpNOIUr/vkTx0n7tdNrCUSaOA6DC +vb1fsFAReP2rHQSGbMGNIL0FX1FnxtMHlZItkpAAxp8t3TZc3HgQfPZoOR0s4OEm +ZE82NGanhGqQFTqUt3mxR/XSUZVUv5J2mrmI7mP5bA04xrYcBkPtJB27bHJIzIz0 +NbxD/qaWTDFfgg0NICo9AgMBAAGjgdQwgdEwDgYDVR0PAQH/BAQDAgEGMBIGA1Ud +EwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFK5CiHXdBaaOSH9Qafm3NCNJuLRxMB8G +A1UdIwQYMBaAFGCTUy/HzyrX8wko9jyunFDsk2PlMDoGCCsGAQUFBwEBBC4wLDAq +BggrBgEFBQcwAoYeaHR0cDovL2dyZWVuLm5vL2NhL3Jvb3QtY2EuY2VyMC8GA1Ud +HwQoMCYwJKAioCCGHmh0dHA6Ly9ncmVlbi5uby9jYS9yb290LWNhLmNybDANBgkq +hkiG9w0BAQUFAAOCAQEAFaes1yWeKtTRFLSZOD0vc2Eq2baLE+r+23jZCmzfJm7B +1UqXQhndlwUD5Cv8Hh84PE6wO4w4rStl+jUtgY7g9gqJTDiXAUucrE7hVRfvCq2n +6x5LhiMS8VJpy6OKzvsUi4bXu4FevSrHp3lYABDA2//UpbkZdLMjGUofeEuotvYg +JsFp+Yl/uBw7ovk3MYAssLYr0oRE10Lk5kRRBDXZHKRIxrc13vKu2ku6yAlCje16 +gdztnfDebiG5ARytZD0lTJGU8RMYu4npSKwFcwfI271pjm8CnbAYwLnhqLEXUD2s +BW5vY0+xczNgmnfSgYoBOEPpTDyQY6SZS9Ib+Rvs7g== +-----END CERTIFICATE----- +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 1 (0x1) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA + Validity + Not Before: Jul 13 03:44:39 2017 GMT + Not After : Dec 31 23:59:59 2030 GMT + Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:a7:e8:ed:de:d4:54:08:41:07:40:d5:c0:43:d6: + ab:d3:9e:21:87:c6:13:bf:a7:cf:3d:08:4f:c1:fe: + 8f:e5:6c:c5:89:97:e5:27:75:26:c3:2a:73:2d:34: + 7c:6f:35:8d:40:66:61:05:c0:eb:e9:b3:38:47:f8: + 8b:26:35:2c:df:dc:24:31:fe:72:e3:87:10:d1:f7: + a0:57:b7:f3:b1:1a:fe:c7:4b:f8:7b:14:6d:73:08: + 54:eb:63:3c:0c:ce:22:95:5f:3f:f2:6f:89:ae:63: + da:80:74:36:21:13:e8:91:01:58:77:cc:c2:f2:42: + bf:eb:b3:60:a7:21:ed:88:24:7f:eb:ff:07:41:9b: + 93:c8:5f:6a:8e:a6:1a:15:3c:bc:e7:0d:fd:05:fd: + 3c:c1:1c:1d:1f:57:2b:40:27:62:a1:7c:48:63:c1: + 45:e7:2f:20:ed:92:1c:42:94:e4:58:70:7a:b6:d2: + 85:c5:61:d8:cd:c6:37:6b:72:3b:7f:af:55:81:d6: + 9d:dc:10:c9:d8:0e:81:e4:5e:40:13:2f:20:e8:6b: + 46:81:ce:88:47:dd:38:71:3d:ef:21:cc:c0:67:cf: + 0a:f4:e9:3f:a8:9d:26:25:2e:23:1e:a3:11:18:cb: + d1:70:1c:9e:7d:09:b1:a4:20:dc:95:15:1d:49:cf: + 1b:ad + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Key Usage: critical + Certificate Sign, CRL Sign + X509v3 Basic Constraints: critical + CA:TRUE + X509v3 Subject Key Identifier: + 60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5 + X509v3 Authority Key Identifier: + keyid:60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5 + + Signature Algorithm: sha1WithRSAEncryption + a7:77:71:8b:1a:e5:5a:5b:87:54:08:bf:07:3e:cb:99:2f:dc: + 0e:8d:63:94:95:83:19:c9:92:82:d5:cb:5b:8f:1f:86:55:bc: + 70:01:1d:33:46:ec:99:de:6b:1f:c3:c2:7a:dd:ef:69:ab:96: + 58:ec:6c:6f:6c:70:82:71:8a:7f:f0:3b:80:90:d5:64:fa:80: + 27:b8:7b:50:69:98:4b:37:99:ad:bf:a2:5b:93:22:5e:96:44: + 3c:5a:cf:0c:f4:62:63:4a:6f:72:a7:f6:89:1d:09:26:3d:8f: + a8:86:d4:b4:bc:dd:b3:38:ca:c0:59:16:8c:20:1f:89:35:12: + b4:2d:c0:e9:de:93:e0:39:76:32:fc:80:db:da:44:26:fd:01: + 32:74:97:f8:44:ae:fe:05:b1:34:96:13:34:56:73:b4:93:a5: + 55:56:d1:01:51:9d:9c:55:e7:38:53:28:12:4e:38:72:0c:8f: + bd:91:4c:45:48:3b:e1:0d:03:5f:58:40:c9:d3:a0:ac:b3:89: + ce:af:27:8a:0f:ab:ec:72:4d:40:77:30:6b:36:fd:32:46:9f: + ee:f9:c4:f5:17:06:0f:4b:d3:88:f5:a4:2f:3d:87:9e:f5:26: + 74:f0:c9:dc:cb:ad:d9:a7:8a:d3:71:15:00:d3:5d:9f:4c:59: + 3e:24:63:f5 +-----BEGIN CERTIFICATE----- +MIIDnDCCAoSgAwIBAgIBATANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJOTzER +MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1 +dGhvcml0eTEWMBQGA1UEAwwNR3JlZW4gUm9vdCBDQTAgFw0xNzA3MTMwMzQ0Mzla +GA8yMDMwMTIzMTIzNTk1OVowXjELMAkGA1UEBhMCTk8xETAPBgNVBAoMCEdyZWVu +IEFTMSQwIgYDVQQLDBtHcmVlbiBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFjAUBgNV +BAMMDUdyZWVuIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCn6O3e1FQIQQdA1cBD1qvTniGHxhO/p889CE/B/o/lbMWJl+UndSbDKnMtNHxv +NY1AZmEFwOvpszhH+IsmNSzf3CQx/nLjhxDR96BXt/OxGv7HS/h7FG1zCFTrYzwM +ziKVXz/yb4muY9qAdDYhE+iRAVh3zMLyQr/rs2CnIe2IJH/r/wdBm5PIX2qOphoV +PLznDf0F/TzBHB0fVytAJ2KhfEhjwUXnLyDtkhxClORYcHq20oXFYdjNxjdrcjt/ +r1WB1p3cEMnYDoHkXkATLyDoa0aBzohH3ThxPe8hzMBnzwr06T+onSYlLiMeoxEY +y9FwHJ59CbGkINyVFR1JzxutAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRgk1Mvx88q1/MJKPY8rpxQ7JNj5TAfBgNV +HSMEGDAWgBRgk1Mvx88q1/MJKPY8rpxQ7JNj5TANBgkqhkiG9w0BAQUFAAOCAQEA +p3dxixrlWluHVAi/Bz7LmS/cDo1jlJWDGcmSgtXLW48fhlW8cAEdM0bsmd5rH8PC +et3vaauWWOxsb2xwgnGKf/A7gJDVZPqAJ7h7UGmYSzeZrb+iW5MiXpZEPFrPDPRi +Y0pvcqf2iR0JJj2PqIbUtLzdszjKwFkWjCAfiTUStC3A6d6T4Dl2MvyA29pEJv0B +MnSX+ESu/gWxNJYTNFZztJOlVVbRAVGdnFXnOFMoEk44cgyPvZFMRUg74Q0DX1hA +ydOgrLOJzq8nig+r7HJNQHcwazb9Mkaf7vnE9RcGD0vTiPWkLz2HnvUmdPDJ3Mut +2aeK03EVANNdn0xZPiRj9Q== +-----END CERTIFICATE----- diff --git a/vendor/github.com/prometheus/common/config/tls_config.go b/vendor/github.com/prometheus/common/config/tls_config.go deleted file mode 100644 index 7c7e7cb0..00000000 --- a/vendor/github.com/prometheus/common/config/tls_config.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2016 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package config - -import ( - "crypto/tls" - "crypto/x509" - "fmt" - "io/ioutil" -) - -// TLSConfig configures the options for TLS connections. -type TLSConfig struct { - // The CA cert to use for the targets. - CAFile string `yaml:"ca_file,omitempty"` - // The client cert file for the targets. - CertFile string `yaml:"cert_file,omitempty"` - // The client key file for the targets. - KeyFile string `yaml:"key_file,omitempty"` - // Disable target certificate validation. - InsecureSkipVerify bool `yaml:"insecure_skip_verify"` - - // Catches all undefined fields and must be empty after parsing. - XXX map[string]interface{} `yaml:",inline"` -} - -// UnmarshalYAML implements the yaml.Unmarshaler interface. -func (c *TLSConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - type plain TLSConfig - if err := unmarshal((*plain)(c)); err != nil { - return err - } - return checkOverflow(c.XXX, "TLS config") -} - -// GenerateConfig produces a tls.Config based on TLS connection options. -// It loads certificate files from disk if they are defined. -func (c *TLSConfig) GenerateConfig() (*tls.Config, error) { - tlsConfig := &tls.Config{InsecureSkipVerify: c.InsecureSkipVerify} - - // If a CA cert is provided then let's read it in so we can validate the - // scrape target's certificate properly. - if len(c.CAFile) > 0 { - caCertPool := x509.NewCertPool() - // Load CA cert. - caCert, err := ioutil.ReadFile(c.CAFile) - if err != nil { - return nil, fmt.Errorf("unable to use specified CA cert %s: %s", c.CAFile, err) - } - caCertPool.AppendCertsFromPEM(caCert) - tlsConfig.RootCAs = caCertPool - } - - if len(c.CertFile) > 0 && len(c.KeyFile) == 0 { - return nil, fmt.Errorf("client cert file %q specified without client key file", c.CertFile) - } else if len(c.KeyFile) > 0 && len(c.CertFile) == 0 { - return nil, fmt.Errorf("client key file %q specified without client cert file", c.KeyFile) - } else if len(c.CertFile) > 0 && len(c.KeyFile) > 0 { - cert, err := tls.LoadX509KeyPair(c.CertFile, c.KeyFile) - if err != nil { - return nil, fmt.Errorf("unable to use specified client cert (%s) & key (%s): %s", c.CertFile, c.KeyFile, err) - } - tlsConfig.Certificates = []tls.Certificate{cert} - } - tlsConfig.BuildNameToCertificate() - - return tlsConfig, nil -} diff --git a/vendor/github.com/prometheus/common/config/tls_config_test.go b/vendor/github.com/prometheus/common/config/tls_config_test.go index 44430353..31ddb6e9 100644 --- a/vendor/github.com/prometheus/common/config/tls_config_test.go +++ b/vendor/github.com/prometheus/common/config/tls_config_test.go @@ -17,7 +17,6 @@ import ( "crypto/tls" "io/ioutil" "reflect" - "strings" "testing" "gopkg.in/yaml.v2" @@ -29,11 +28,11 @@ func LoadTLSConfig(filename string) (*tls.Config, error) { if err != nil { return nil, err } - cfg := &TLSConfig{} - if err = yaml.Unmarshal(content, cfg); err != nil { + cfg := TLSConfig{} + if err = yaml.UnmarshalStrict(content, &cfg); err != nil { return nil, err } - return cfg.GenerateConfig() + return NewTLSConfig(&cfg) } var expectedTLSConfigs = []struct { @@ -57,36 +56,7 @@ func TestValidTLSConfig(t *testing.T) { t.Errorf("Error parsing %s: %s", cfg.filename, err) } if !reflect.DeepEqual(*got, *cfg.config) { - t.Fatalf("%s: unexpected config result: \n\n%s\n expected\n\n%s", cfg.filename, got, cfg.config) - } - } -} - -var expectedTLSConfigErrors = []struct { - filename string - errMsg string -}{ - { - filename: "tls_config.invalid_field.bad.yml", - errMsg: "unknown fields in", - }, { - filename: "tls_config.cert_no_key.bad.yml", - errMsg: "specified without client key file", - }, { - filename: "tls_config.key_no_cert.bad.yml", - errMsg: "specified without client cert file", - }, -} - -func TestBadTLSConfigs(t *testing.T) { - for _, ee := range expectedTLSConfigErrors { - _, err := LoadTLSConfig("testdata/" + ee.filename) - if err == nil { - t.Errorf("Expected error parsing %s but got none", ee.filename) - continue - } - if !strings.Contains(err.Error(), ee.errMsg) { - t.Errorf("Expected error for %s to contain %q but got: %s", ee.filename, ee.errMsg, err) + t.Fatalf("%v: unexpected config result: \n\n%v\n expected\n\n%v", cfg.filename, got, cfg.config) } } } diff --git a/vendor/github.com/prometheus/common/expfmt/decode.go b/vendor/github.com/prometheus/common/expfmt/decode.go index a7a42d5e..c092723e 100644 --- a/vendor/github.com/prometheus/common/expfmt/decode.go +++ b/vendor/github.com/prometheus/common/expfmt/decode.go @@ -164,9 +164,9 @@ func (sd *SampleDecoder) Decode(s *model.Vector) error { } // ExtractSamples builds a slice of samples from the provided metric -// families. If an error occurs during sample extraction, it continues to +// families. If an error occurrs during sample extraction, it continues to // extract from the remaining metric families. The returned error is the last -// error that has occured. +// error that has occurred. func ExtractSamples(o *DecodeOptions, fams ...*dto.MetricFamily) (model.Vector, error) { var ( all model.Vector diff --git a/vendor/github.com/prometheus/common/expfmt/expfmt.go b/vendor/github.com/prometheus/common/expfmt/expfmt.go index 371ac750..c71bcb98 100644 --- a/vendor/github.com/prometheus/common/expfmt/expfmt.go +++ b/vendor/github.com/prometheus/common/expfmt/expfmt.go @@ -26,7 +26,7 @@ const ( // The Content-Type values for the different wire protocols. FmtUnknown Format = `` - FmtText Format = `text/plain; version=` + TextVersion + FmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` FmtProtoDelim Format = ProtoFmt + ` encoding=delimited` FmtProtoText Format = ProtoFmt + ` encoding=text` FmtProtoCompact Format = ProtoFmt + ` encoding=compact-text` diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index ef9a1507..b86290af 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -315,6 +315,10 @@ func (p *TextParser) startLabelValue() stateFn { if p.readTokenAsLabelValue(); p.err != nil { return nil } + if !model.LabelValue(p.currentToken.String()).IsValid() { + p.parseError(fmt.Sprintf("invalid label value %q", p.currentToken.String())) + return nil + } p.currentLabelPair.Value = proto.String(p.currentToken.String()) // Special treatment of summaries: // - Quantile labels are special, will result in dto.Quantile later. @@ -552,8 +556,8 @@ func (p *TextParser) readTokenUntilWhitespace() { // byte considered is the byte already read (now in p.currentByte). The first // newline byte encountered is still copied into p.currentByte, but not into // p.currentToken. If recognizeEscapeSequence is true, two escape sequences are -// recognized: '\\' tranlates into '\', and '\n' into a line-feed character. All -// other escape sequences are invalid and cause an error. +// recognized: '\\' translates into '\', and '\n' into a line-feed character. +// All other escape sequences are invalid and cause an error. func (p *TextParser) readTokenUntilNewline(recognizeEscapeSequence bool) { p.currentToken.Reset() escaped := false diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse_test.go b/vendor/github.com/prometheus/common/expfmt/text_parse_test.go index 7e7388ce..76c95118 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse_test.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse_test.go @@ -559,6 +559,11 @@ metric_bucket{le="bla"} 3.14 `, err: "text format parsing error in line 3: expected float as value for 'le' label", }, + // 19: Invalid UTF-8 in label value. + { + in: "metric{l=\"\xbd\"} 3.14\n", + err: "text format parsing error in line 1: invalid label value \"\\xbd\"", + }, } for i, scenario := range scenarios { diff --git a/vendor/github.com/prometheus/common/log/eventlog_formatter.go b/vendor/github.com/prometheus/common/log/eventlog_formatter.go index 6d41284c..bcf68e6f 100644 --- a/vendor/github.com/prometheus/common/log/eventlog_formatter.go +++ b/vendor/github.com/prometheus/common/log/eventlog_formatter.go @@ -21,22 +21,22 @@ import ( "golang.org/x/sys/windows/svc/eventlog" - "github.com/Sirupsen/logrus" + "github.com/sirupsen/logrus" ) func init() { - setEventlogFormatter = func(name string, debugAsInfo bool) error { + setEventlogFormatter = func(l logger, name string, debugAsInfo bool) error { if name == "" { return fmt.Errorf("missing name parameter") } - fmter, err := newEventlogger(name, debugAsInfo, origLogger.Formatter) + fmter, err := newEventlogger(name, debugAsInfo, l.entry.Logger.Formatter) if err != nil { fmt.Fprintf(os.Stderr, "error creating eventlog formatter: %v\n", err) - origLogger.Errorf("can't connect logger to eventlog: %v", err) + l.Errorf("can't connect logger to eventlog: %v", err) return err } - origLogger.Formatter = fmter + l.entry.Logger.Formatter = fmter return nil } } diff --git a/vendor/github.com/prometheus/common/log/log.go b/vendor/github.com/prometheus/common/log/log.go index 0a74a7f9..10883025 100644 --- a/vendor/github.com/prometheus/common/log/log.go +++ b/vendor/github.com/prometheus/common/log/log.go @@ -14,7 +14,6 @@ package log import ( - "flag" "fmt" "io" "io/ioutil" @@ -25,106 +24,46 @@ import ( "strconv" "strings" - "github.com/Sirupsen/logrus" + "github.com/sirupsen/logrus" + "gopkg.in/alecthomas/kingpin.v2" ) -type levelFlag string - -// String implements flag.Value. -func (f levelFlag) String() string { - return fmt.Sprintf("%q", origLogger.Level.String()) -} - -// Set implements flag.Value. -func (f levelFlag) Set(level string) error { - l, err := logrus.ParseLevel(level) - if err != nil { - return err - } - origLogger.Level = l - return nil -} - // setSyslogFormatter is nil if the target architecture does not support syslog. -var setSyslogFormatter func(string, string) error +var setSyslogFormatter func(logger, string, string) error // setEventlogFormatter is nil if the target OS does not support Eventlog (i.e., is not Windows). -var setEventlogFormatter func(string, bool) error +var setEventlogFormatter func(logger, string, bool) error func setJSONFormatter() { origLogger.Formatter = &logrus.JSONFormatter{} } -type logFormatFlag url.URL - -// String implements flag.Value. -func (f logFormatFlag) String() string { - u := url.URL(f) - return fmt.Sprintf("%q", u.String()) +type loggerSettings struct { + level string + format string } -// Set implements flag.Value. -func (f logFormatFlag) Set(format string) error { - u, err := url.Parse(format) +func (s *loggerSettings) apply(ctx *kingpin.ParseContext) error { + err := baseLogger.SetLevel(s.level) if err != nil { return err } - if u.Scheme != "logger" { - return fmt.Errorf("invalid scheme %s", u.Scheme) - } - jsonq := u.Query().Get("json") - if jsonq == "true" { - setJSONFormatter() - } - - switch u.Opaque { - case "syslog": - if setSyslogFormatter == nil { - return fmt.Errorf("system does not support syslog") - } - appname := u.Query().Get("appname") - facility := u.Query().Get("local") - return setSyslogFormatter(appname, facility) - case "eventlog": - if setEventlogFormatter == nil { - return fmt.Errorf("system does not support eventlog") - } - name := u.Query().Get("name") - debugAsInfo := false - debugAsInfoRaw := u.Query().Get("debugAsInfo") - if parsedDebugAsInfo, err := strconv.ParseBool(debugAsInfoRaw); err == nil { - debugAsInfo = parsedDebugAsInfo - } - return setEventlogFormatter(name, debugAsInfo) - case "stdout": - origLogger.Out = os.Stdout - case "stderr": - origLogger.Out = os.Stderr - default: - return fmt.Errorf("unsupported logger %q", u.Opaque) - } - return nil + err = baseLogger.SetFormat(s.format) + return err } -func init() { - AddFlags(flag.CommandLine) -} - -// AddFlags adds the flags used by this package to the given FlagSet. That's -// useful if working with a custom FlagSet. The init function of this package -// adds the flags to flag.CommandLine anyway. Thus, it's usually enough to call -// flag.Parse() to make the logging flags take effect. -func AddFlags(fs *flag.FlagSet) { - fs.Var( - levelFlag(origLogger.Level.String()), - "log.level", - "Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]", - ) - fs.Var( - logFormatFlag(url.URL{Scheme: "logger", Opaque: "stderr"}), - "log.format", - `Set the log target and format. Example: "logger:syslog?appname=bob&local=7" or "logger:stdout?json=true"`, - ) +// AddFlags adds the flags used by this package to the Kingpin application. +// To use the default Kingpin application, call AddFlags(kingpin.CommandLine) +func AddFlags(a *kingpin.Application) { + s := loggerSettings{} + a.Flag("log.level", "Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]"). + Default(origLogger.Level.String()). + StringVar(&s.level) + defaultFormat := url.URL{Scheme: "logger", Opaque: "stderr"} + a.Flag("log.format", `Set the log target and format. Example: "logger:syslog?appname=bob&local=7" or "logger:stdout?json=true"`). + Default(defaultFormat.String()). + StringVar(&s.format) + a.Action(s.apply) } // Logger is the interface for loggers used in the Prometheus components. @@ -150,6 +89,9 @@ type Logger interface { Fatalf(string, ...interface{}) With(key string, value interface{}) Logger + + SetFormat(string) error + SetLevel(string) error } type logger struct { @@ -235,6 +177,58 @@ func (l logger) Fatalf(format string, args ...interface{}) { l.sourced().Fatalf(format, args...) } +func (l logger) SetLevel(level string) error { + lvl, err := logrus.ParseLevel(level) + if err != nil { + return err + } + + l.entry.Logger.Level = lvl + return nil +} + +func (l logger) SetFormat(format string) error { + u, err := url.Parse(format) + if err != nil { + return err + } + if u.Scheme != "logger" { + return fmt.Errorf("invalid scheme %s", u.Scheme) + } + jsonq := u.Query().Get("json") + if jsonq == "true" { + setJSONFormatter() + } + + switch u.Opaque { + case "syslog": + if setSyslogFormatter == nil { + return fmt.Errorf("system does not support syslog") + } + appname := u.Query().Get("appname") + facility := u.Query().Get("local") + return setSyslogFormatter(l, appname, facility) + case "eventlog": + if setEventlogFormatter == nil { + return fmt.Errorf("system does not support eventlog") + } + name := u.Query().Get("name") + debugAsInfo := false + debugAsInfoRaw := u.Query().Get("debugAsInfo") + if parsedDebugAsInfo, err := strconv.ParseBool(debugAsInfoRaw); err == nil { + debugAsInfo = parsedDebugAsInfo + } + return setEventlogFormatter(l, name, debugAsInfo) + case "stdout": + l.entry.Logger.Out = os.Stdout + case "stderr": + l.entry.Logger.Out = os.Stderr + default: + return fmt.Errorf("unsupported logger %q", u.Opaque) + } + return nil +} + // sourced adds a source field to the logger that contains // the file name and line where the logging happened. func (l logger) sourced() *logrus.Entry { @@ -351,6 +345,11 @@ func Fatalf(format string, args ...interface{}) { baseLogger.sourced().Fatalf(format, args...) } +// AddHook adds hook to Prometheus' original logger. +func AddHook(hook logrus.Hook) { + origLogger.Hooks.Add(hook) +} + type errorLogWriter struct{} func (errorLogWriter) Write(b []byte) (int, error) { diff --git a/vendor/github.com/prometheus/common/log/log_test.go b/vendor/github.com/prometheus/common/log/log_test.go index 953adb79..f63b4417 100644 --- a/vendor/github.com/prometheus/common/log/log_test.go +++ b/vendor/github.com/prometheus/common/log/log_test.go @@ -18,7 +18,7 @@ import ( "regexp" "testing" - "github.com/Sirupsen/logrus" + "github.com/sirupsen/logrus" ) func TestFileLineLogging(t *testing.T) { @@ -32,7 +32,7 @@ func TestFileLineLogging(t *testing.T) { Debug("This debug-level line should not show up in the output.") Infof("This %s-level line should show up in the output.", "info") - re := `^time=".*" level=info msg="This info-level line should show up in the output." source="log_test.go:33" \n$` + re := `^time=".*" level=info msg="This info-level line should show up in the output." source="log_test.go:33"\n$` if !regexp.MustCompile(re).Match(buf.Bytes()) { t.Fatalf("%q did not match expected regex %q", buf.String(), re) } diff --git a/vendor/github.com/prometheus/common/log/syslog_formatter.go b/vendor/github.com/prometheus/common/log/syslog_formatter.go index 64f5fdac..f882f2f8 100644 --- a/vendor/github.com/prometheus/common/log/syslog_formatter.go +++ b/vendor/github.com/prometheus/common/log/syslog_formatter.go @@ -20,13 +20,13 @@ import ( "log/syslog" "os" - "github.com/Sirupsen/logrus" + "github.com/sirupsen/logrus" ) var _ logrus.Formatter = (*syslogger)(nil) func init() { - setSyslogFormatter = func(appname, local string) error { + setSyslogFormatter = func(l logger, appname, local string) error { if appname == "" { return fmt.Errorf("missing appname parameter") } @@ -34,13 +34,13 @@ func init() { return fmt.Errorf("missing local parameter") } - fmter, err := newSyslogger(appname, local, origLogger.Formatter) + fmter, err := newSyslogger(appname, local, l.entry.Logger.Formatter) if err != nil { fmt.Fprintf(os.Stderr, "error creating syslog formatter: %v\n", err) - origLogger.Errorf("can't connect logger to syslog: %v", err) + l.entry.Errorf("can't connect logger to syslog: %v", err) return err } - origLogger.Formatter = fmter + l.entry.Logger.Formatter = fmter return nil } } diff --git a/vendor/github.com/prometheus/common/model/silence.go b/vendor/github.com/prometheus/common/model/silence.go index 7538e299..bb99889d 100644 --- a/vendor/github.com/prometheus/common/model/silence.go +++ b/vendor/github.com/prometheus/common/model/silence.go @@ -59,8 +59,8 @@ func (m *Matcher) Validate() error { return nil } -// Silence defines the representation of a silence definiton -// in the Prometheus eco-system. +// Silence defines the representation of a silence definition in the Prometheus +// eco-system. type Silence struct { ID uint64 `json:"id,omitempty"` diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go index 548968ae..74ed5a9f 100644 --- a/vendor/github.com/prometheus/common/model/time.go +++ b/vendor/github.com/prometheus/common/model/time.go @@ -163,9 +163,21 @@ func (t *Time) UnmarshalJSON(b []byte) error { // This type should not propagate beyond the scope of input/output processing. type Duration time.Duration +// Set implements pflag/flag.Value +func (d *Duration) Set(s string) error { + var err error + *d, err = ParseDuration(s) + return err +} + +// Type implements pflag.Value +func (d *Duration) Type() string { + return "duration" +} + var durationRE = regexp.MustCompile("^([0-9]+)(y|w|d|h|m|s|ms)$") -// StringToDuration parses a string into a time.Duration, assuming that a year +// ParseDuration parses a string into a time.Duration, assuming that a year // always has 365d, a week always has 7d, and a day always has 24h. func ParseDuration(durationStr string) (Duration, error) { matches := durationRE.FindStringSubmatch(durationStr) @@ -202,6 +214,9 @@ func (d Duration) String() string { ms = int64(time.Duration(d) / time.Millisecond) unit = "ms" ) + if ms == 0 { + return "0s" + } factors := map[string]int64{ "y": 1000 * 60 * 60 * 24 * 365, "w": 1000 * 60 * 60 * 24 * 7, diff --git a/vendor/github.com/prometheus/common/model/time_test.go b/vendor/github.com/prometheus/common/model/time_test.go index 45ffd872..3efdd65f 100644 --- a/vendor/github.com/prometheus/common/model/time_test.go +++ b/vendor/github.com/prometheus/common/model/time_test.go @@ -91,6 +91,9 @@ func TestParseDuration(t *testing.T) { out time.Duration }{ { + in: "0s", + out: 0, + }, { in: "324ms", out: 324 * time.Millisecond, }, { diff --git a/vendor/github.com/prometheus/common/model/value.go b/vendor/github.com/prometheus/common/model/value.go index c9ed3ffd..c9d8fb1a 100644 --- a/vendor/github.com/prometheus/common/model/value.go +++ b/vendor/github.com/prometheus/common/model/value.go @@ -100,7 +100,7 @@ func (s *SamplePair) UnmarshalJSON(b []byte) error { } // Equal returns true if this SamplePair and o have equal Values and equal -// Timestamps. The sematics of Value equality is defined by SampleValue.Equal. +// Timestamps. The semantics of Value equality is defined by SampleValue.Equal. func (s *SamplePair) Equal(o *SamplePair) bool { return s == o || (s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)) } @@ -117,7 +117,7 @@ type Sample struct { } // Equal compares first the metrics, then the timestamp, then the value. The -// sematics of value equality is defined by SampleValue.Equal. +// semantics of value equality is defined by SampleValue.Equal. func (s *Sample) Equal(o *Sample) bool { if s == o { return true diff --git a/vendor/github.com/prometheus/common/promlog/flag/flag.go b/vendor/github.com/prometheus/common/promlog/flag/flag.go new file mode 100644 index 00000000..b9d361e4 --- /dev/null +++ b/vendor/github.com/prometheus/common/promlog/flag/flag.go @@ -0,0 +1,33 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package flag + +import ( + "github.com/prometheus/common/promlog" + kingpin "gopkg.in/alecthomas/kingpin.v2" +) + +// LevelFlagName is the canonical flag name to configure the allowed log level +// within Prometheus projects. +const LevelFlagName = "log.level" + +// LevelFlagHelp is the help description for the log.level flag. +const LevelFlagHelp = "Only log messages with the given severity or above. One of: [debug, info, warn, error]" + +// AddFlags adds the flags used by this package to the Kingpin application. +// To use the default Kingpin application, call AddFlags(kingpin.CommandLine) +func AddFlags(a *kingpin.Application, logLevel *promlog.AllowedLevel) { + a.Flag(LevelFlagName, LevelFlagHelp). + Default("info").SetValue(logLevel) +} diff --git a/vendor/github.com/prometheus/common/promlog/log.go b/vendor/github.com/prometheus/common/promlog/log.go new file mode 100644 index 00000000..cf8307ad --- /dev/null +++ b/vendor/github.com/prometheus/common/promlog/log.go @@ -0,0 +1,63 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package promlog defines standardised ways to initialize Go kit loggers +// across Prometheus components. +// It should typically only ever be imported by main packages. +package promlog + +import ( + "os" + + "github.com/go-kit/kit/log" + "github.com/go-kit/kit/log/level" + "github.com/pkg/errors" +) + +// AllowedLevel is a settable identifier for the minimum level a log entry +// must be have. +type AllowedLevel struct { + s string + o level.Option +} + +func (l *AllowedLevel) String() string { + return l.s +} + +// Set updates the value of the allowed level. +func (l *AllowedLevel) Set(s string) error { + switch s { + case "debug": + l.o = level.AllowDebug() + case "info": + l.o = level.AllowInfo() + case "warn": + l.o = level.AllowWarn() + case "error": + l.o = level.AllowError() + default: + return errors.Errorf("unrecognized log level %q", s) + } + l.s = s + return nil +} + +// New returns a new leveled oklog logger in the logfmt format. Each logged line will be annotated +// with a timestamp. The output always goes to stderr. +func New(al AllowedLevel) log.Logger { + l := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr)) + l = level.NewFilter(l, al.o) + l = log.With(l, "ts", log.DefaultTimestampUTC, "caller", log.DefaultCaller) + return l +} diff --git a/vendor/github.com/prometheus/common/route/route.go b/vendor/github.com/prometheus/common/route/route.go index bb468817..742e5754 100644 --- a/vendor/github.com/prometheus/common/route/route.go +++ b/vendor/github.com/prometheus/common/route/route.go @@ -19,11 +19,12 @@ func WithParam(ctx context.Context, p, v string) context.Context { return context.WithValue(ctx, param(p), v) } -// Router wraps httprouter.Router and adds support for prefixed sub-routers -// and per-request context injections. +// Router wraps httprouter.Router and adds support for prefixed sub-routers, +// per-request context injections and instrumentation. type Router struct { rtr *httprouter.Router prefix string + instrh func(handlerName string, handler http.HandlerFunc) http.HandlerFunc } // New returns a new Router. @@ -33,13 +34,22 @@ func New() *Router { } } +// WithInstrumentation returns a router with instrumentation support. +func (r *Router) WithInstrumentation(instrh func(handlerName string, handler http.HandlerFunc) http.HandlerFunc) *Router { + return &Router{rtr: r.rtr, prefix: r.prefix, instrh: instrh} +} + // WithPrefix returns a router that prefixes all registered routes with prefix. func (r *Router) WithPrefix(prefix string) *Router { - return &Router{rtr: r.rtr, prefix: r.prefix + prefix} + return &Router{rtr: r.rtr, prefix: r.prefix + prefix, instrh: r.instrh} } // handle turns a HandlerFunc into an httprouter.Handle. -func (r *Router) handle(h http.HandlerFunc) httprouter.Handle { +func (r *Router) handle(handlerName string, h http.HandlerFunc) httprouter.Handle { + if r.instrh != nil { + // This needs to be outside the closure to avoid data race when reading and writing to 'h'. + h = r.instrh(handlerName, h) + } return func(w http.ResponseWriter, req *http.Request, params httprouter.Params) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -53,27 +63,27 @@ func (r *Router) handle(h http.HandlerFunc) httprouter.Handle { // Get registers a new GET route. func (r *Router) Get(path string, h http.HandlerFunc) { - r.rtr.GET(r.prefix+path, r.handle(h)) + r.rtr.GET(r.prefix+path, r.handle(path, h)) } // Options registers a new OPTIONS route. func (r *Router) Options(path string, h http.HandlerFunc) { - r.rtr.OPTIONS(r.prefix+path, r.handle(h)) + r.rtr.OPTIONS(r.prefix+path, r.handle(path, h)) } // Del registers a new DELETE route. func (r *Router) Del(path string, h http.HandlerFunc) { - r.rtr.DELETE(r.prefix+path, r.handle(h)) + r.rtr.DELETE(r.prefix+path, r.handle(path, h)) } // Put registers a new PUT route. func (r *Router) Put(path string, h http.HandlerFunc) { - r.rtr.PUT(r.prefix+path, r.handle(h)) + r.rtr.PUT(r.prefix+path, r.handle(path, h)) } // Post registers a new POST route. func (r *Router) Post(path string, h http.HandlerFunc) { - r.rtr.POST(r.prefix+path, r.handle(h)) + r.rtr.POST(r.prefix+path, r.handle(path, h)) } // Redirect takes an absolute path and sends an internal HTTP redirect for it, diff --git a/vendor/github.com/prometheus/common/route/route_test.go b/vendor/github.com/prometheus/common/route/route_test.go index a9bb2099..d491cad6 100644 --- a/vendor/github.com/prometheus/common/route/route_test.go +++ b/vendor/github.com/prometheus/common/route/route_test.go @@ -42,3 +42,35 @@ func TestContext(t *testing.T) { } router.ServeHTTP(nil, r) } + +func TestInstrumentation(t *testing.T) { + var got string + cases := []struct { + router *Router + want string + }{ + { + router: New(), + want: "", + }, { + router: New().WithInstrumentation(func(handlerName string, handler http.HandlerFunc) http.HandlerFunc { + got = handlerName + return handler + }), + want: "/foo", + }, + } + + for _, c := range cases { + c.router.Get("/foo", func(w http.ResponseWriter, r *http.Request) {}) + + r, err := http.NewRequest("GET", "http://localhost:9090/foo", nil) + if err != nil { + t.Fatalf("Error building test request: %s", err) + } + c.router.ServeHTTP(nil, r) + if c.want != got { + t.Fatalf("Unexpected value: want %q, got %q", c.want, got) + } + } +} diff --git a/vendor/github.com/prometheus/procfs/.gitignore b/vendor/github.com/prometheus/procfs/.gitignore new file mode 100644 index 00000000..25e3659a --- /dev/null +++ b/vendor/github.com/prometheus/procfs/.gitignore @@ -0,0 +1 @@ +/fixtures/ diff --git a/vendor/github.com/prometheus/procfs/.travis.yml b/vendor/github.com/prometheus/procfs/.travis.yml index a9e28bf5..5416cf8a 100644 --- a/vendor/github.com/prometheus/procfs/.travis.yml +++ b/vendor/github.com/prometheus/procfs/.travis.yml @@ -1,5 +1,15 @@ sudo: false + language: go + go: - - 1.6.4 - - 1.7.4 +- 1.7.x +- 1.8.x +- 1.9.x +- 1.10.x +- 1.x + +go_import_path: github.com/prometheus/procfs + +script: +- make style check_license vet test staticcheck diff --git a/vendor/github.com/prometheus/procfs/Makefile b/vendor/github.com/prometheus/procfs/Makefile index c264a49d..5c8f7262 100644 --- a/vendor/github.com/prometheus/procfs/Makefile +++ b/vendor/github.com/prometheus/procfs/Makefile @@ -1,6 +1,71 @@ -ci: - ! gofmt -l *.go | read nothing - go vet - go test -v ./... - go get github.com/golang/lint/golint - golint *.go +# Copyright 2018 The Prometheus Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Ensure GOBIN is not set during build so that promu is installed to the correct path +unexport GOBIN + +GO ?= go +GOFMT ?= $(GO)fmt +FIRST_GOPATH := $(firstword $(subst :, ,$(shell $(GO) env GOPATH))) +STATICCHECK := $(FIRST_GOPATH)/bin/staticcheck +pkgs = $(shell $(GO) list ./... | grep -v /vendor/) + +PREFIX ?= $(shell pwd) +BIN_DIR ?= $(shell pwd) + +ifdef DEBUG + bindata_flags = -debug +endif + +STATICCHECK_IGNORE = + +all: format staticcheck build test + +style: + @echo ">> checking code style" + @! $(GOFMT) -d $(shell find . -path ./vendor -prune -o -name '*.go' -print) | grep '^' + +check_license: + @echo ">> checking license header" + @./scripts/check_license.sh + +test: fixtures/.unpacked sysfs/fixtures/.unpacked + @echo ">> running all tests" + @$(GO) test -race $(shell $(GO) list ./... | grep -v /vendor/ | grep -v examples) + +format: + @echo ">> formatting code" + @$(GO) fmt $(pkgs) + +vet: + @echo ">> vetting code" + @$(GO) vet $(pkgs) + +staticcheck: $(STATICCHECK) + @echo ">> running staticcheck" + @$(STATICCHECK) -ignore "$(STATICCHECK_IGNORE)" $(pkgs) + +%/.unpacked: %.ttar + ./ttar -C $(dir $*) -x -f $*.ttar + touch $@ + +$(FIRST_GOPATH)/bin/staticcheck: + @GOOS= GOARCH= $(GO) get -u honnef.co/go/tools/cmd/staticcheck + +.PHONY: all style check_license format test vet staticcheck + +# Declaring the binaries at their default locations as PHONY targets is a hack +# to ensure the latest version is downloaded on every make execution. +# If this is not desired, copy/symlink these binaries to a different path and +# set the respective environment variables. +.PHONY: $(GOPATH)/bin/staticcheck diff --git a/vendor/github.com/prometheus/procfs/bcache/bcache.go b/vendor/github.com/prometheus/procfs/bcache/bcache.go new file mode 100644 index 00000000..1db178ce --- /dev/null +++ b/vendor/github.com/prometheus/procfs/bcache/bcache.go @@ -0,0 +1,84 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package bcache provides access to statistics exposed by the bcache (Linux +// block cache). +package bcache + +// Stats contains bcache runtime statistics, parsed from /sys/fs/bcache/. +// +// The names and meanings of each statistic were taken from bcache.txt and +// files in drivers/md/bcache in the Linux kernel source. Counters are uint64 +// (in-kernel counters are mostly unsigned long). +type Stats struct { + // The name of the bcache used to source these statistics. + Name string + Bcache BcacheStats + Bdevs []BdevStats + Caches []CacheStats +} + +// BcacheStats contains statistics tied to a bcache ID. +type BcacheStats struct { + AverageKeySize uint64 + BtreeCacheSize uint64 + CacheAvailablePercent uint64 + Congested uint64 + RootUsagePercent uint64 + TreeDepth uint64 + Internal InternalStats + FiveMin PeriodStats + Total PeriodStats +} + +// BdevStats contains statistics for one backing device. +type BdevStats struct { + Name string + DirtyData uint64 + FiveMin PeriodStats + Total PeriodStats +} + +// CacheStats contains statistics for one cache device. +type CacheStats struct { + Name string + IOErrors uint64 + MetadataWritten uint64 + Written uint64 + Priority PriorityStats +} + +// PriorityStats contains statistics from the priority_stats file. +type PriorityStats struct { + UnusedPercent uint64 + MetadataPercent uint64 +} + +// InternalStats contains internal bcache statistics. +type InternalStats struct { + ActiveJournalEntries uint64 + BtreeNodes uint64 + BtreeReadAverageDurationNanoSeconds uint64 + CacheReadRaces uint64 +} + +// PeriodStats contains statistics for a time period (5 min or total). +type PeriodStats struct { + Bypassed uint64 + CacheBypassHits uint64 + CacheBypassMisses uint64 + CacheHits uint64 + CacheMissCollisions uint64 + CacheMisses uint64 + CacheReadaheads uint64 +} diff --git a/vendor/github.com/prometheus/procfs/bcache/get.go b/vendor/github.com/prometheus/procfs/bcache/get.go new file mode 100644 index 00000000..b6d97de1 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/bcache/get.go @@ -0,0 +1,330 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bcache + +import ( + "bufio" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "strconv" + "strings" +) + +// ParsePseudoFloat parses the peculiar format produced by bcache's bch_hprint. +func parsePseudoFloat(str string) (float64, error) { + ss := strings.Split(str, ".") + + intPart, err := strconv.ParseFloat(ss[0], 64) + if err != nil { + return 0, err + } + + if len(ss) == 1 { + // Pure integers are fine. + return intPart, nil + } + fracPart, err := strconv.ParseFloat(ss[1], 64) + if err != nil { + return 0, err + } + // fracPart is a number between 0 and 1023 divided by 100; it is off + // by a small amount. Unexpected bumps in time lines may occur because + // for bch_hprint .1 != .10 and .10 > .9 (at least up to Linux + // v4.12-rc3). + + // Restore the proper order: + fracPart = fracPart / 10.24 + return intPart + fracPart, nil +} + +// Dehumanize converts a human-readable byte slice into a uint64. +func dehumanize(hbytes []byte) (uint64, error) { + ll := len(hbytes) + if ll == 0 { + return 0, fmt.Errorf("zero-length reply") + } + lastByte := hbytes[ll-1] + mul := float64(1) + var ( + mant float64 + err error + ) + // If lastByte is beyond the range of ASCII digits, it must be a + // multiplier. + if lastByte > 57 { + // Remove multiplier from slice. + hbytes = hbytes[:len(hbytes)-1] + + const ( + _ = 1 << (10 * iota) + KiB + MiB + GiB + TiB + PiB + EiB + ZiB + YiB + ) + + multipliers := map[rune]float64{ + // Source for conversion rules: + // linux-kernel/drivers/md/bcache/util.c:bch_hprint() + 'k': KiB, + 'M': MiB, + 'G': GiB, + 'T': TiB, + 'P': PiB, + 'E': EiB, + 'Z': ZiB, + 'Y': YiB, + } + mul = multipliers[rune(lastByte)] + mant, err = parsePseudoFloat(string(hbytes)) + if err != nil { + return 0, err + } + } else { + // Not humanized by bch_hprint + mant, err = strconv.ParseFloat(string(hbytes), 64) + if err != nil { + return 0, err + } + } + res := uint64(mant * mul) + return res, nil +} + +type parser struct { + uuidPath string + subDir string + currentDir string + err error +} + +func (p *parser) setSubDir(pathElements ...string) { + p.subDir = path.Join(pathElements...) + p.currentDir = path.Join(p.uuidPath, p.subDir) +} + +func (p *parser) readValue(fileName string) uint64 { + if p.err != nil { + return 0 + } + path := path.Join(p.currentDir, fileName) + byt, err := ioutil.ReadFile(path) + if err != nil { + p.err = fmt.Errorf("failed to read: %s", path) + return 0 + } + // Remove trailing newline. + byt = byt[:len(byt)-1] + res, err := dehumanize(byt) + p.err = err + return res +} + +// ParsePriorityStats parses lines from the priority_stats file. +func parsePriorityStats(line string, ps *PriorityStats) error { + var ( + value uint64 + err error + ) + switch { + case strings.HasPrefix(line, "Unused:"): + fields := strings.Fields(line) + rawValue := fields[len(fields)-1] + valueStr := strings.TrimSuffix(rawValue, "%") + value, err = strconv.ParseUint(valueStr, 10, 64) + if err != nil { + return err + } + ps.UnusedPercent = value + case strings.HasPrefix(line, "Metadata:"): + fields := strings.Fields(line) + rawValue := fields[len(fields)-1] + valueStr := strings.TrimSuffix(rawValue, "%") + value, err = strconv.ParseUint(valueStr, 10, 64) + if err != nil { + return err + } + ps.MetadataPercent = value + } + return nil +} + +func (p *parser) getPriorityStats() PriorityStats { + var res PriorityStats + + if p.err != nil { + return res + } + + path := path.Join(p.currentDir, "priority_stats") + + file, err := os.Open(path) + if err != nil { + p.err = fmt.Errorf("failed to read: %s", path) + return res + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + err = parsePriorityStats(scanner.Text(), &res) + if err != nil { + p.err = fmt.Errorf("failed to parse: %s (%s)", path, err) + return res + } + } + if err := scanner.Err(); err != nil { + p.err = fmt.Errorf("failed to parse: %s (%s)", path, err) + return res + } + return res +} + +// GetStats collects from sysfs files data tied to one bcache ID. +func GetStats(uuidPath string) (*Stats, error) { + var bs Stats + + par := parser{uuidPath: uuidPath} + + // bcache stats + + // dir + par.setSubDir("") + bs.Bcache.AverageKeySize = par.readValue("average_key_size") + bs.Bcache.BtreeCacheSize = par.readValue("btree_cache_size") + bs.Bcache.CacheAvailablePercent = par.readValue("cache_available_percent") + bs.Bcache.Congested = par.readValue("congested") + bs.Bcache.RootUsagePercent = par.readValue("root_usage_percent") + bs.Bcache.TreeDepth = par.readValue("tree_depth") + + // bcache stats (internal) + + // dir /internal + par.setSubDir("internal") + bs.Bcache.Internal.ActiveJournalEntries = par.readValue("active_journal_entries") + bs.Bcache.Internal.BtreeNodes = par.readValue("btree_nodes") + bs.Bcache.Internal.BtreeReadAverageDurationNanoSeconds = par.readValue("btree_read_average_duration_us") + bs.Bcache.Internal.CacheReadRaces = par.readValue("cache_read_races") + + // bcache stats (period) + + // dir /stats_five_minute + par.setSubDir("stats_five_minute") + bs.Bcache.FiveMin.Bypassed = par.readValue("bypassed") + bs.Bcache.FiveMin.CacheHits = par.readValue("cache_hits") + + bs.Bcache.FiveMin.Bypassed = par.readValue("bypassed") + bs.Bcache.FiveMin.CacheBypassHits = par.readValue("cache_bypass_hits") + bs.Bcache.FiveMin.CacheBypassMisses = par.readValue("cache_bypass_misses") + bs.Bcache.FiveMin.CacheHits = par.readValue("cache_hits") + bs.Bcache.FiveMin.CacheMissCollisions = par.readValue("cache_miss_collisions") + bs.Bcache.FiveMin.CacheMisses = par.readValue("cache_misses") + bs.Bcache.FiveMin.CacheReadaheads = par.readValue("cache_readaheads") + + // dir /stats_total + par.setSubDir("stats_total") + bs.Bcache.Total.Bypassed = par.readValue("bypassed") + bs.Bcache.Total.CacheHits = par.readValue("cache_hits") + + bs.Bcache.Total.Bypassed = par.readValue("bypassed") + bs.Bcache.Total.CacheBypassHits = par.readValue("cache_bypass_hits") + bs.Bcache.Total.CacheBypassMisses = par.readValue("cache_bypass_misses") + bs.Bcache.Total.CacheHits = par.readValue("cache_hits") + bs.Bcache.Total.CacheMissCollisions = par.readValue("cache_miss_collisions") + bs.Bcache.Total.CacheMisses = par.readValue("cache_misses") + bs.Bcache.Total.CacheReadaheads = par.readValue("cache_readaheads") + + if par.err != nil { + return nil, par.err + } + + // bdev stats + + reg := path.Join(uuidPath, "bdev[0-9]*") + bdevDirs, err := filepath.Glob(reg) + if err != nil { + return nil, err + } + + bs.Bdevs = make([]BdevStats, len(bdevDirs)) + + for ii, bdevDir := range bdevDirs { + var bds = &bs.Bdevs[ii] + + bds.Name = filepath.Base(bdevDir) + + par.setSubDir(bds.Name) + bds.DirtyData = par.readValue("dirty_data") + + // dir //stats_five_minute + par.setSubDir(bds.Name, "stats_five_minute") + bds.FiveMin.Bypassed = par.readValue("bypassed") + bds.FiveMin.CacheBypassHits = par.readValue("cache_bypass_hits") + bds.FiveMin.CacheBypassMisses = par.readValue("cache_bypass_misses") + bds.FiveMin.CacheHits = par.readValue("cache_hits") + bds.FiveMin.CacheMissCollisions = par.readValue("cache_miss_collisions") + bds.FiveMin.CacheMisses = par.readValue("cache_misses") + bds.FiveMin.CacheReadaheads = par.readValue("cache_readaheads") + + // dir //stats_total + par.setSubDir("stats_total") + bds.Total.Bypassed = par.readValue("bypassed") + bds.Total.CacheBypassHits = par.readValue("cache_bypass_hits") + bds.Total.CacheBypassMisses = par.readValue("cache_bypass_misses") + bds.Total.CacheHits = par.readValue("cache_hits") + bds.Total.CacheMissCollisions = par.readValue("cache_miss_collisions") + bds.Total.CacheMisses = par.readValue("cache_misses") + bds.Total.CacheReadaheads = par.readValue("cache_readaheads") + } + + if par.err != nil { + return nil, par.err + } + + // cache stats + + reg = path.Join(uuidPath, "cache[0-9]*") + cacheDirs, err := filepath.Glob(reg) + if err != nil { + return nil, err + } + bs.Caches = make([]CacheStats, len(cacheDirs)) + + for ii, cacheDir := range cacheDirs { + var cs = &bs.Caches[ii] + cs.Name = filepath.Base(cacheDir) + + // dir is / + par.setSubDir(cs.Name) + cs.IOErrors = par.readValue("io_errors") + cs.MetadataWritten = par.readValue("metadata_written") + cs.Written = par.readValue("written") + + ps := par.getPriorityStats() + cs.Priority = ps + } + + if par.err != nil { + return nil, par.err + } + + return &bs, nil +} diff --git a/vendor/github.com/prometheus/procfs/bcache/get_test.go b/vendor/github.com/prometheus/procfs/bcache/get_test.go new file mode 100644 index 00000000..1d41a5ad --- /dev/null +++ b/vendor/github.com/prometheus/procfs/bcache/get_test.go @@ -0,0 +1,114 @@ +// Copyright 2017 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bcache + +import ( + "math" + "testing" +) + +func TestDehumanizeTests(t *testing.T) { + dehumanizeTests := []struct { + in []byte + out uint64 + invalid bool + }{ + { + in: []byte("542k"), + out: 555008, + }, + { + in: []byte("322M"), + out: 337641472, + }, + { + in: []byte("1.1k"), + out: 1124, + }, + { + in: []byte("1.9k"), + out: 1924, + }, + { + in: []byte("1.10k"), + out: 2024, + }, + { + in: []byte(""), + out: 0, + invalid: true, + }, + } + for _, tst := range dehumanizeTests { + got, err := dehumanize(tst.in) + if tst.invalid && err == nil { + t.Error("expected an error, but none occurred") + } + if !tst.invalid && err != nil { + t.Errorf("unexpected error: %v", err) + } + if got != tst.out { + t.Errorf("dehumanize: '%s', want %d, got %d", tst.in, tst.out, got) + } + } +} + +func TestParsePseudoFloatTests(t *testing.T) { + parsePseudoFloatTests := []struct { + in string + out float64 + }{ + { + in: "1.1", + out: float64(1.097656), + }, + { + in: "1.9", + out: float64(1.878906), + }, + { + in: "1.10", + out: float64(1.976562), + }, + } + for _, tst := range parsePseudoFloatTests { + got, err := parsePseudoFloat(tst.in) + if err != nil || math.Abs(got-tst.out) > 0.0001 { + t.Errorf("parsePseudoFloat: %s, want %f, got %f", tst.in, tst.out, got) + } + } +} + +func TestPriorityStats(t *testing.T) { + var want = PriorityStats{ + UnusedPercent: 99, + MetadataPercent: 5, + } + var ( + in string + gotErr error + got PriorityStats + ) + in = "Metadata: 5%" + gotErr = parsePriorityStats(in, &got) + if gotErr != nil || got.MetadataPercent != want.MetadataPercent { + t.Errorf("parsePriorityStats: '%s', want %d, got %d", in, want.MetadataPercent, got.MetadataPercent) + } + + in = "Unused: 99%" + gotErr = parsePriorityStats(in, &got) + if gotErr != nil || got.UnusedPercent != want.UnusedPercent { + t.Errorf("parsePriorityStats: '%s', want %d, got %d", in, want.UnusedPercent, got.UnusedPercent) + } +} diff --git a/vendor/github.com/prometheus/procfs/buddyinfo.go b/vendor/github.com/prometheus/procfs/buddyinfo.go index 680a9842..d3a82680 100644 --- a/vendor/github.com/prometheus/procfs/buddyinfo.go +++ b/vendor/github.com/prometheus/procfs/buddyinfo.go @@ -62,7 +62,7 @@ func parseBuddyInfo(r io.Reader) ([]BuddyInfo, error) { for scanner.Scan() { var err error line := scanner.Text() - parts := strings.Fields(string(line)) + parts := strings.Fields(line) if len(parts) < 4 { return nil, fmt.Errorf("invalid number of fields when parsing buddyinfo") diff --git a/vendor/github.com/prometheus/procfs/fixtures.ttar b/vendor/github.com/prometheus/procfs/fixtures.ttar new file mode 100644 index 00000000..3ee8291e --- /dev/null +++ b/vendor/github.com/prometheus/procfs/fixtures.ttar @@ -0,0 +1,446 @@ +# Archive created by ttar -c -f fixtures.ttar fixtures/ +Directory: fixtures +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26231 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/cmdline +Lines: 1 +vimNULLBYTEtest.goNULLBYTE+10NULLBYTEEOF +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/comm +Lines: 1 +vim +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/exe +SymlinkTo: /usr/bin/vim +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26231/fd +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/fd/0 +SymlinkTo: ../../symlinktargets/abc +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/fd/1 +SymlinkTo: ../../symlinktargets/def +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/fd/10 +SymlinkTo: ../../symlinktargets/xyz +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/fd/2 +SymlinkTo: ../../symlinktargets/ghi +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/fd/3 +SymlinkTo: ../../symlinktargets/uvw +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/io +Lines: 7 +rchar: 750339 +wchar: 818609 +syscr: 7405 +syscw: 5245 +read_bytes: 1024 +write_bytes: 2048 +cancelled_write_bytes: -1024 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/limits +Lines: 17 +Limit Soft Limit Hard Limit Units +Max cpu time unlimited unlimited seconds +Max file size unlimited unlimited bytes +Max data size unlimited unlimited bytes +Max stack size 8388608 unlimited bytes +Max core file size 0 unlimited bytes +Max resident set unlimited unlimited bytes +Max processes 62898 62898 processes +Max open files 2048 4096 files +Max locked memory 65536 65536 bytes +Max address space 8589934592 unlimited bytes +Max file locks unlimited unlimited locks +Max pending signals 62898 62898 signals +Max msgqueue size 819200 819200 bytes +Max nice priority 0 0 +Max realtime priority 0 0 +Max realtime timeout unlimited unlimited us +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/mountstats +Lines: 19 +device rootfs mounted on / with fstype rootfs +device sysfs mounted on /sys with fstype sysfs +device proc mounted on /proc with fstype proc +device /dev/sda1 mounted on / with fstype ext4 +device 192.168.1.1:/srv/test mounted on /mnt/nfs/test with fstype nfs4 statvers=1.1 + opts: rw,vers=4.0,rsize=1048576,wsize=1048576,namlen=255,acregmin=3,acregmax=60,acdirmin=30,acdirmax=60,hard,proto=tcp,port=0,timeo=600,retrans=2,sec=sys,clientaddr=192.168.1.5,local_lock=none + age: 13968 + caps: caps=0xfff7,wtmult=512,dtsize=32768,bsize=0,namlen=255 + nfsv4: bm0=0xfdffafff,bm1=0xf9be3e,bm2=0x0,acl=0x0,pnfs=not configured + sec: flavor=1,pseudoflavor=1 + events: 52 226 0 0 1 13 398 0 0 331 0 47 0 0 77 0 0 77 0 0 0 0 0 0 0 0 0 + bytes: 1207640230 0 0 0 1210214218 0 295483 0 + RPC iostats version: 1.0 p/v: 100003/4 (nfs) + xprt: tcp 832 0 1 0 11 6428 6428 0 12154 0 24 26 5726 + per-op statistics + NULL: 0 0 0 0 0 0 0 0 + READ: 1298 1298 0 207680 1210292152 6 79386 79407 + WRITE: 0 0 0 0 0 0 0 0 + +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26231/net +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/net/dev +Lines: 4 +Inter-| Receive | Transmit + face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed + lo: 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + eth0: 438 5 0 0 0 0 0 0 648 8 0 0 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26231/ns +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/ns/mnt +SymlinkTo: mnt:[4026531840] +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/ns/net +SymlinkTo: net:[4026531993] +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26231/stat +Lines: 1 +26231 (vim) R 5392 7446 5392 34835 7446 4218880 32533 309516 26 82 1677 44 158 99 20 0 1 0 82375 56274944 1981 18446744073709551615 4194304 6294284 140736914091744 140736914087944 139965136429984 0 0 12288 1870679807 0 0 0 17 0 0 0 31 0 0 8391624 8481048 16420864 140736914093252 140736914093279 140736914093279 140736914096107 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26232 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/cmdline +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/comm +Lines: 1 +ata_sff +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26232/fd +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/fd/0 +SymlinkTo: ../../symlinktargets/abc +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/fd/1 +SymlinkTo: ../../symlinktargets/def +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/fd/2 +SymlinkTo: ../../symlinktargets/ghi +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/fd/3 +SymlinkTo: ../../symlinktargets/uvw +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/fd/4 +SymlinkTo: ../../symlinktargets/xyz +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/limits +Lines: 17 +Limit Soft Limit Hard Limit Units +Max cpu time unlimited unlimited seconds +Max file size unlimited unlimited bytes +Max data size unlimited unlimited bytes +Max stack size 8388608 unlimited bytes +Max core file size 0 unlimited bytes +Max resident set unlimited unlimited bytes +Max processes 29436 29436 processes +Max open files 1024 4096 files +Max locked memory 65536 65536 bytes +Max address space unlimited unlimited bytes +Max file locks unlimited unlimited locks +Max pending signals 29436 29436 signals +Max msgqueue size 819200 819200 bytes +Max nice priority 0 0 +Max realtime priority 0 0 +Max realtime timeout unlimited unlimited us +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26232/stat +Lines: 1 +33 (ata_sff) S 2 0 0 0 -1 69238880 0 0 0 0 0 0 0 0 0 -20 1 0 5 0 0 18446744073709551615 0 0 0 0 0 0 0 2147483647 0 18446744073709551615 0 0 17 1 0 0 0 0 0 0 0 0 0 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/26233 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/26233/cmdline +Lines: 1 +com.github.uiautomatorNULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTENULLBYTEEOF +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/584 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/584/stat +Lines: 2 +1020 ((a b ) ( c d) ) R 28378 1020 28378 34842 1020 4218880 286 0 0 0 0 0 0 0 20 0 1 0 10839175 10395648 155 18446744073709551615 4194304 4238788 140736466511168 140736466511168 140609271124624 0 0 0 0 0 0 0 17 5 0 0 0 0 0 6336016 6337300 25579520 140736466515030 140736466515061 140736466515061 140736466518002 0 +#!/bin/cat /proc/self/stat +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/buddyinfo +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/buddyinfo/short +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/buddyinfo/short/buddyinfo +Lines: 3 +Node 0, zone +Node 0, zone +Node 0, zone +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/buddyinfo/sizemismatch +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/buddyinfo/sizemismatch/buddyinfo +Lines: 3 +Node 0, zone DMA 1 0 1 0 2 1 1 0 1 1 3 +Node 0, zone DMA32 759 572 791 475 194 45 12 0 0 0 0 0 +Node 0, zone Normal 4381 1093 185 1530 567 102 4 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/buddyinfo/valid +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/buddyinfo/valid/buddyinfo +Lines: 3 +Node 0, zone DMA 1 0 1 0 2 1 1 0 1 1 3 +Node 0, zone DMA32 759 572 791 475 194 45 12 0 0 0 0 +Node 0, zone Normal 4381 1093 185 1530 567 102 4 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/xfs/stat +Lines: 23 +extent_alloc 92447 97589 92448 93751 +abt 0 0 0 0 +blk_map 1767055 188820 184891 92447 92448 2140766 0 +bmbt 0 0 0 0 +dir 185039 92447 92444 136422 +trans 706 944304 0 +ig 185045 58807 0 126238 0 33637 22 +log 2883 113448 9 17360 739 +push_ail 945014 0 134260 15483 0 3940 464 159985 0 40 +xstrat 92447 0 +rw 107739 94045 +attr 4 0 0 0 +icluster 8677 7849 135802 +vnodes 92601 0 0 0 92444 92444 92444 0 +buf 2666287 7122 2659202 3599 2 7085 0 10297 7085 +abtb2 184941 1277345 13257 13278 0 0 0 0 0 0 0 0 0 0 2746147 +abtc2 345295 2416764 172637 172658 0 0 0 0 0 0 0 0 0 0 21406023 +bmbt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +ibt2 343004 1358467 0 0 0 0 0 0 0 0 0 0 0 0 0 +fibt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +qm 0 0 0 0 0 0 0 0 +xpc 399724544 92823103 86219234 +debug 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/mdstat +Lines: 26 +Personalities : [linear] [multipath] [raid0] [raid1] [raid6] [raid5] [raid4] [raid10] +md3 : active raid6 sda1[8] sdh1[7] sdg1[6] sdf1[5] sde1[11] sdd1[3] sdc1[10] sdb1[9] + 5853468288 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU] + +md127 : active raid1 sdi2[0] sdj2[1] + 312319552 blocks [2/2] [UU] + +md0 : active raid1 sdk[2](S) sdi1[0] sdj1[1] + 248896 blocks [2/2] [UU] + +md4 : inactive raid1 sda3[0] sdb3[1] + 4883648 blocks [2/2] [UU] + +md6 : active raid1 sdb2[2] sda2[0] + 195310144 blocks [2/1] [U_] + [=>...................] recovery = 8.5% (16775552/195310144) finish=17.0min speed=259783K/sec + +md8 : active raid1 sdb1[1] sda1[0] + 195310144 blocks [2/2] [UU] + [=>...................] resync = 8.5% (16775552/195310144) finish=17.0min speed=259783K/sec + +md7 : active raid6 sdb1[0] sde1[3] sdd1[2] sdc1[1] + 7813735424 blocks super 1.2 level 6, 512k chunk, algorithm 2 [4/3] [U_UU] + bitmap: 0/30 pages [0KB], 65536KB chunk + +unused devices: +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/net +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/dev +Lines: 6 +Inter-| Receive | Transmit + face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed +vethf345468: 648 8 0 0 0 0 0 0 438 5 0 0 0 0 0 0 + lo: 1664039048 1566805 0 0 0 0 0 0 1664039048 1566805 0 0 0 0 0 0 +docker0: 2568 38 0 0 0 0 0 0 438 5 0 0 0 0 0 0 + eth0: 874354587 1036395 0 0 0 0 0 0 563352563 732147 0 0 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/ip_vs +Lines: 21 +IP Virtual Server version 1.2.1 (size=4096) +Prot LocalAddress:Port Scheduler Flags + -> RemoteAddress:Port Forward Weight ActiveConn InActConn +TCP C0A80016:0CEA wlc + -> C0A85216:0CEA Tunnel 100 248 2 + -> C0A85318:0CEA Tunnel 100 248 2 + -> C0A85315:0CEA Tunnel 100 248 1 +TCP C0A80039:0CEA wlc + -> C0A85416:0CEA Tunnel 0 0 0 + -> C0A85215:0CEA Tunnel 100 1499 0 + -> C0A83215:0CEA Tunnel 100 1498 0 +TCP C0A80037:0CEA wlc + -> C0A8321A:0CEA Tunnel 0 0 0 + -> C0A83120:0CEA Tunnel 100 0 0 +TCP [2620:0000:0000:0000:0000:0000:0000:0001]:0050 sh + -> [2620:0000:0000:0000:0000:0000:0000:0002]:0050 Route 1 0 0 + -> [2620:0000:0000:0000:0000:0000:0000:0003]:0050 Route 1 0 0 + -> [2620:0000:0000:0000:0000:0000:0000:0004]:0050 Route 1 1 1 +FWM 10001000 wlc + -> C0A8321A:0CEA Route 0 0 1 + -> C0A83215:0CEA Route 0 0 2 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/ip_vs_stats +Lines: 6 + Total Incoming Outgoing Incoming Outgoing + Conns Packets Packets Bytes Bytes + 16AA370 E33656E5 0 51D8C8883AB3 0 + + Conns/s Pkts/s Pkts/s Bytes/s Bytes/s + 4 1FB3C 0 1282A8F 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/net/rpc +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/rpc/nfs +Lines: 5 +net 18628 0 18628 6 +rpc 4329785 0 4338291 +proc2 18 2 69 0 0 4410 0 0 0 0 0 0 0 0 0 0 0 99 2 +proc3 22 1 4084749 29200 94754 32580 186 47747 7981 8639 0 6356 0 6962 0 7958 0 0 241 4 4 2 39 +proc4 61 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/rpc/nfsd +Lines: 11 +rc 0 6 18622 +fh 0 0 0 0 0 +io 157286400 0 +th 8 0 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 +ra 32 0 0 0 0 0 0 0 0 0 0 0 +net 18628 0 18628 6 +rpc 18628 0 0 0 0 +proc2 18 2 69 0 0 4410 0 0 0 0 0 0 0 0 0 0 0 99 2 +proc3 22 2 112 0 2719 111 0 0 0 0 0 0 0 0 0 0 0 27 216 0 2 1 0 +proc4 2 2 10853 +proc4ops 72 0 0 0 1098 2 0 0 0 0 8179 5896 0 0 0 0 5900 0 0 2 0 2 0 9609 0 2 150 1272 0 0 0 1236 0 0 0 0 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/net/xfrm_stat +Lines: 28 +XfrmInError 1 +XfrmInBufferError 2 +XfrmInHdrError 4 +XfrmInNoStates 3 +XfrmInStateProtoError 40 +XfrmInStateModeError 100 +XfrmInStateSeqError 6000 +XfrmInStateExpired 4 +XfrmInStateMismatch 23451 +XfrmInStateInvalid 55555 +XfrmInTmplMismatch 51 +XfrmInNoPols 65432 +XfrmInPolBlock 100 +XfrmInPolError 10000 +XfrmOutError 1000000 +XfrmOutBundleGenError 43321 +XfrmOutBundleCheckError 555 +XfrmOutNoStates 869 +XfrmOutStateProtoError 4542 +XfrmOutStateModeError 4 +XfrmOutStateSeqError 543 +XfrmOutStateExpired 565 +XfrmOutPolBlock 43456 +XfrmOutPolDead 7656 +XfrmOutPolError 1454 +XfrmFwdHdrError 6654 +XfrmOutStateInvalid 28765 +XfrmAcquireError 24532 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/self +SymlinkTo: 26231 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/stat +Lines: 16 +cpu 301854 612 111922 8979004 3552 2 3944 0 0 0 +cpu0 44490 19 21045 1087069 220 1 3410 0 0 0 +cpu1 47869 23 16474 1110787 591 0 46 0 0 0 +cpu2 46504 36 15916 1112321 441 0 326 0 0 0 +cpu3 47054 102 15683 1113230 533 0 60 0 0 0 +cpu4 28413 25 10776 1140321 217 0 8 0 0 0 +cpu5 29271 101 11586 1136270 672 0 30 0 0 0 +cpu6 29152 36 10276 1139721 319 0 29 0 0 0 +cpu7 29098 268 10164 1139282 555 0 31 0 0 0 +intr 8885917 17 0 0 0 0 0 0 0 1 79281 0 0 0 0 0 0 0 231237 0 0 0 0 250586 103 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 223424 190745 13 906 1283803 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +ctxt 38014093 +btime 1418183276 +processes 26442 +procs_running 2 +procs_blocked 1 +softirq 5057579 250191 1481983 1647 211099 186066 0 1783454 622196 12499 508444 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/symlinktargets +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/README +Lines: 2 +This directory contains some empty files that are the symlinks the files in the "fd" directory point to. +They are otherwise ignored by the tests +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/abc +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/def +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/ghi +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/uvw +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/symlinktargets/xyz +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/cmdline b/vendor/github.com/prometheus/procfs/fixtures/26231/cmdline deleted file mode 100644 index d2d8ef88..00000000 Binary files a/vendor/github.com/prometheus/procfs/fixtures/26231/cmdline and /dev/null differ diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/comm b/vendor/github.com/prometheus/procfs/fixtures/26231/comm deleted file mode 100644 index f027e0d4..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/comm +++ /dev/null @@ -1 +0,0 @@ -vim diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/exe b/vendor/github.com/prometheus/procfs/fixtures/26231/exe deleted file mode 120000 index a91bec4d..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/exe +++ /dev/null @@ -1 +0,0 @@ -/usr/bin/vim \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/0 b/vendor/github.com/prometheus/procfs/fixtures/26231/fd/0 deleted file mode 120000 index da9c5dff..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/0 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/abc \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/1 b/vendor/github.com/prometheus/procfs/fixtures/26231/fd/1 deleted file mode 120000 index ca47b50c..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/1 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/def \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/10 b/vendor/github.com/prometheus/procfs/fixtures/26231/fd/10 deleted file mode 120000 index c0868316..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/10 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/xyz \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/2 b/vendor/github.com/prometheus/procfs/fixtures/26231/fd/2 deleted file mode 120000 index 66731c06..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/2 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/ghi \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/3 b/vendor/github.com/prometheus/procfs/fixtures/26231/fd/3 deleted file mode 120000 index 0135dce3..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/fd/3 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/uvw \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/io b/vendor/github.com/prometheus/procfs/fixtures/26231/io deleted file mode 100644 index b6210a7a..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/io +++ /dev/null @@ -1,7 +0,0 @@ -rchar: 750339 -wchar: 818609 -syscr: 7405 -syscw: 5245 -read_bytes: 1024 -write_bytes: 2048 -cancelled_write_bytes: -1024 diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/limits b/vendor/github.com/prometheus/procfs/fixtures/26231/limits deleted file mode 100644 index 23c6b689..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/limits +++ /dev/null @@ -1,17 +0,0 @@ -Limit Soft Limit Hard Limit Units -Max cpu time unlimited unlimited seconds -Max file size unlimited unlimited bytes -Max data size unlimited unlimited bytes -Max stack size 8388608 unlimited bytes -Max core file size 0 unlimited bytes -Max resident set unlimited unlimited bytes -Max processes 62898 62898 processes -Max open files 2048 4096 files -Max locked memory 65536 65536 bytes -Max address space unlimited unlimited bytes -Max file locks unlimited unlimited locks -Max pending signals 62898 62898 signals -Max msgqueue size 819200 819200 bytes -Max nice priority 0 0 -Max realtime priority 0 0 -Max realtime timeout unlimited unlimited us diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/mountstats b/vendor/github.com/prometheus/procfs/fixtures/26231/mountstats deleted file mode 100644 index a665c33d..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/mountstats +++ /dev/null @@ -1,19 +0,0 @@ -device rootfs mounted on / with fstype rootfs -device sysfs mounted on /sys with fstype sysfs -device proc mounted on /proc with fstype proc -device /dev/sda1 mounted on / with fstype ext4 -device 192.168.1.1:/srv/test mounted on /mnt/nfs/test with fstype nfs4 statvers=1.1 - opts: rw,vers=4.0,rsize=1048576,wsize=1048576,namlen=255,acregmin=3,acregmax=60,acdirmin=30,acdirmax=60,hard,proto=tcp,port=0,timeo=600,retrans=2,sec=sys,clientaddr=192.168.1.5,local_lock=none - age: 13968 - caps: caps=0xfff7,wtmult=512,dtsize=32768,bsize=0,namlen=255 - nfsv4: bm0=0xfdffafff,bm1=0xf9be3e,bm2=0x0,acl=0x0,pnfs=not configured - sec: flavor=1,pseudoflavor=1 - events: 52 226 0 0 1 13 398 0 0 331 0 47 0 0 77 0 0 77 0 0 0 0 0 0 0 0 0 - bytes: 1207640230 0 0 0 1210214218 0 295483 0 - RPC iostats version: 1.0 p/v: 100003/4 (nfs) - xprt: tcp 832 0 1 0 11 6428 6428 0 12154 0 24 26 5726 - per-op statistics - NULL: 0 0 0 0 0 0 0 0 - READ: 1298 1298 0 207680 1210292152 6 79386 79407 - WRITE: 0 0 0 0 0 0 0 0 - diff --git a/vendor/github.com/prometheus/procfs/fixtures/26231/stat b/vendor/github.com/prometheus/procfs/fixtures/26231/stat deleted file mode 100644 index 438aaa9d..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26231/stat +++ /dev/null @@ -1 +0,0 @@ -26231 (vim) R 5392 7446 5392 34835 7446 4218880 32533 309516 26 82 1677 44 158 99 20 0 1 0 82375 56274944 1981 18446744073709551615 4194304 6294284 140736914091744 140736914087944 139965136429984 0 0 12288 1870679807 0 0 0 17 0 0 0 31 0 0 8391624 8481048 16420864 140736914093252 140736914093279 140736914093279 140736914096107 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/cmdline b/vendor/github.com/prometheus/procfs/fixtures/26232/cmdline deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/comm b/vendor/github.com/prometheus/procfs/fixtures/26232/comm deleted file mode 100644 index 62361ca7..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/comm +++ /dev/null @@ -1 +0,0 @@ -ata_sff diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/0 b/vendor/github.com/prometheus/procfs/fixtures/26232/fd/0 deleted file mode 120000 index da9c5dff..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/0 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/abc \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/1 b/vendor/github.com/prometheus/procfs/fixtures/26232/fd/1 deleted file mode 120000 index ca47b50c..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/1 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/def \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/2 b/vendor/github.com/prometheus/procfs/fixtures/26232/fd/2 deleted file mode 120000 index 66731c06..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/2 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/ghi \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/3 b/vendor/github.com/prometheus/procfs/fixtures/26232/fd/3 deleted file mode 120000 index 0135dce3..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/3 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/uvw \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/4 b/vendor/github.com/prometheus/procfs/fixtures/26232/fd/4 deleted file mode 120000 index c0868316..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/fd/4 +++ /dev/null @@ -1 +0,0 @@ -../../symlinktargets/xyz \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/limits b/vendor/github.com/prometheus/procfs/fixtures/26232/limits deleted file mode 100644 index 3f9bf16a..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/limits +++ /dev/null @@ -1,17 +0,0 @@ -Limit Soft Limit Hard Limit Units -Max cpu time unlimited unlimited seconds -Max file size unlimited unlimited bytes -Max data size unlimited unlimited bytes -Max stack size 8388608 unlimited bytes -Max core file size 0 unlimited bytes -Max resident set unlimited unlimited bytes -Max processes 29436 29436 processes -Max open files 1024 4096 files -Max locked memory 65536 65536 bytes -Max address space unlimited unlimited bytes -Max file locks unlimited unlimited locks -Max pending signals 29436 29436 signals -Max msgqueue size 819200 819200 bytes -Max nice priority 0 0 -Max realtime priority 0 0 -Max realtime timeout unlimited unlimited us diff --git a/vendor/github.com/prometheus/procfs/fixtures/26232/stat b/vendor/github.com/prometheus/procfs/fixtures/26232/stat deleted file mode 100644 index 321b1607..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/26232/stat +++ /dev/null @@ -1 +0,0 @@ -33 (ata_sff) S 2 0 0 0 -1 69238880 0 0 0 0 0 0 0 0 0 -20 1 0 5 0 0 18446744073709551615 0 0 0 0 0 0 0 2147483647 0 18446744073709551615 0 0 17 1 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/584/stat b/vendor/github.com/prometheus/procfs/fixtures/584/stat deleted file mode 100644 index 65b9369d..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/584/stat +++ /dev/null @@ -1,2 +0,0 @@ -1020 ((a b ) ( c d) ) R 28378 1020 28378 34842 1020 4218880 286 0 0 0 0 0 0 0 20 0 1 0 10839175 10395648 155 18446744073709551615 4194304 4238788 140736466511168 140736466511168 140609271124624 0 0 0 0 0 0 0 17 5 0 0 0 0 0 6336016 6337300 25579520 140736466515030 140736466515061 140736466515061 140736466518002 0 -#!/bin/cat /proc/self/stat diff --git a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/short/buddyinfo b/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/short/buddyinfo deleted file mode 100644 index 40e71ca3..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/short/buddyinfo +++ /dev/null @@ -1,3 +0,0 @@ -Node 0, zone -Node 0, zone -Node 0, zone diff --git a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/sizemismatch/buddyinfo b/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/sizemismatch/buddyinfo deleted file mode 100644 index 94563618..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/sizemismatch/buddyinfo +++ /dev/null @@ -1,3 +0,0 @@ -Node 0, zone DMA 1 0 1 0 2 1 1 0 1 1 3 -Node 0, zone DMA32 759 572 791 475 194 45 12 0 0 0 0 0 -Node 0, zone Normal 4381 1093 185 1530 567 102 4 0 0 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/valid/buddyinfo b/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/valid/buddyinfo deleted file mode 100644 index f90594a8..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/buddyinfo/valid/buddyinfo +++ /dev/null @@ -1,3 +0,0 @@ -Node 0, zone DMA 1 0 1 0 2 1 1 0 1 1 3 -Node 0, zone DMA32 759 572 791 475 194 45 12 0 0 0 0 -Node 0, zone Normal 4381 1093 185 1530 567 102 4 0 0 0 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/fs/xfs/stat b/vendor/github.com/prometheus/procfs/fixtures/fs/xfs/stat deleted file mode 100644 index f7ca7f94..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/fs/xfs/stat +++ /dev/null @@ -1,23 +0,0 @@ -extent_alloc 92447 97589 92448 93751 -abt 0 0 0 0 -blk_map 1767055 188820 184891 92447 92448 2140766 0 -bmbt 0 0 0 0 -dir 185039 92447 92444 136422 -trans 706 944304 0 -ig 185045 58807 0 126238 0 33637 22 -log 2883 113448 9 17360 739 -push_ail 945014 0 134260 15483 0 3940 464 159985 0 40 -xstrat 92447 0 -rw 107739 94045 -attr 4 0 0 0 -icluster 8677 7849 135802 -vnodes 92601 0 0 0 92444 92444 92444 0 -buf 2666287 7122 2659202 3599 2 7085 0 10297 7085 -abtb2 184941 1277345 13257 13278 0 0 0 0 0 0 0 0 0 0 2746147 -abtc2 345295 2416764 172637 172658 0 0 0 0 0 0 0 0 0 0 21406023 -bmbt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -ibt2 343004 1358467 0 0 0 0 0 0 0 0 0 0 0 0 0 -fibt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -qm 0 0 0 0 0 0 0 0 -xpc 399724544 92823103 86219234 -debug 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/mdstat b/vendor/github.com/prometheus/procfs/fixtures/mdstat deleted file mode 100644 index 4430bdee..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/mdstat +++ /dev/null @@ -1,26 +0,0 @@ -Personalities : [linear] [multipath] [raid0] [raid1] [raid6] [raid5] [raid4] [raid10] -md3 : active raid6 sda1[8] sdh1[7] sdg1[6] sdf1[5] sde1[11] sdd1[3] sdc1[10] sdb1[9] - 5853468288 blocks super 1.2 level 6, 64k chunk, algorithm 2 [8/8] [UUUUUUUU] - -md127 : active raid1 sdi2[0] sdj2[1] - 312319552 blocks [2/2] [UU] - -md0 : active raid1 sdk[2](S) sdi1[0] sdj1[1] - 248896 blocks [2/2] [UU] - -md4 : inactive raid1 sda3[0] sdb3[1] - 4883648 blocks [2/2] [UU] - -md6 : active raid1 sdb2[2] sda2[0] - 195310144 blocks [2/1] [U_] - [=>...................] recovery = 8.5% (16775552/195310144) finish=17.0min speed=259783K/sec - -md8 : active raid1 sdb1[1] sda1[0] - 195310144 blocks [2/2] [UU] - [=>...................] resync = 8.5% (16775552/195310144) finish=17.0min speed=259783K/sec - -md7 : active raid6 sdb1[0] sde1[3] sdd1[2] sdc1[1] - 7813735424 blocks super 1.2 level 6, 512k chunk, algorithm 2 [4/3] [U_UU] - bitmap: 0/30 pages [0KB], 65536KB chunk - -unused devices: diff --git a/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs b/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs deleted file mode 100644 index 5ee4bd2b..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs +++ /dev/null @@ -1,21 +0,0 @@ -IP Virtual Server version 1.2.1 (size=4096) -Prot LocalAddress:Port Scheduler Flags - -> RemoteAddress:Port Forward Weight ActiveConn InActConn -TCP C0A80016:0CEA wlc - -> C0A85216:0CEA Tunnel 100 248 2 - -> C0A85318:0CEA Tunnel 100 248 2 - -> C0A85315:0CEA Tunnel 100 248 1 -TCP C0A80039:0CEA wlc - -> C0A85416:0CEA Tunnel 0 0 0 - -> C0A85215:0CEA Tunnel 100 1499 0 - -> C0A83215:0CEA Tunnel 100 1498 0 -TCP C0A80037:0CEA wlc - -> C0A8321A:0CEA Tunnel 0 0 0 - -> C0A83120:0CEA Tunnel 100 0 0 -TCP [2620:0000:0000:0000:0000:0000:0000:0001]:0050 sh - -> [2620:0000:0000:0000:0000:0000:0000:0002]:0050 Route 1 0 0 - -> [2620:0000:0000:0000:0000:0000:0000:0003]:0050 Route 1 0 0 - -> [2620:0000:0000:0000:0000:0000:0000:0004]:0050 Route 1 1 1 -FWM 10001000 wlc - -> C0A8321A:0CEA Route 0 0 1 - -> C0A83215:0CEA Route 0 0 2 diff --git a/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs_stats b/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs_stats deleted file mode 100644 index c00724e0..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/net/ip_vs_stats +++ /dev/null @@ -1,6 +0,0 @@ - Total Incoming Outgoing Incoming Outgoing - Conns Packets Packets Bytes Bytes - 16AA370 E33656E5 0 51D8C8883AB3 0 - - Conns/s Pkts/s Pkts/s Bytes/s Bytes/s - 4 1FB3C 0 1282A8F 0 diff --git a/vendor/github.com/prometheus/procfs/fixtures/net/xfrm_stat b/vendor/github.com/prometheus/procfs/fixtures/net/xfrm_stat deleted file mode 100644 index d278ace9..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/net/xfrm_stat +++ /dev/null @@ -1,28 +0,0 @@ -XfrmInError 1 -XfrmInBufferError 2 -XfrmInHdrError 4 -XfrmInNoStates 3 -XfrmInStateProtoError 40 -XfrmInStateModeError 100 -XfrmInStateSeqError 6000 -XfrmInStateExpired 4 -XfrmInStateMismatch 23451 -XfrmInStateInvalid 55555 -XfrmInTmplMismatch 51 -XfrmInNoPols 65432 -XfrmInPolBlock 100 -XfrmInPolError 10000 -XfrmOutError 1000000 -XfrmOutBundleGenError 43321 -XfrmOutBundleCheckError 555 -XfrmOutNoStates 869 -XfrmOutStateProtoError 4542 -XfrmOutStateModeError 4 -XfrmOutStateSeqError 543 -XfrmOutStateExpired 565 -XfrmOutPolBlock 43456 -XfrmOutPolDead 7656 -XfrmOutPolError 1454 -XfrmFwdHdrError 6654 -XfrmOutStateInvalid 28765 -XfrmAcquireError 24532 diff --git a/vendor/github.com/prometheus/procfs/fixtures/self b/vendor/github.com/prometheus/procfs/fixtures/self deleted file mode 120000 index 1eeedea3..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/self +++ /dev/null @@ -1 +0,0 @@ -26231 \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/fixtures/stat b/vendor/github.com/prometheus/procfs/fixtures/stat deleted file mode 100644 index dabb96f7..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/stat +++ /dev/null @@ -1,16 +0,0 @@ -cpu 301854 612 111922 8979004 3552 2 3944 0 0 0 -cpu0 44490 19 21045 1087069 220 1 3410 0 0 0 -cpu1 47869 23 16474 1110787 591 0 46 0 0 0 -cpu2 46504 36 15916 1112321 441 0 326 0 0 0 -cpu3 47054 102 15683 1113230 533 0 60 0 0 0 -cpu4 28413 25 10776 1140321 217 0 8 0 0 0 -cpu5 29271 101 11586 1136270 672 0 30 0 0 0 -cpu6 29152 36 10276 1139721 319 0 29 0 0 0 -cpu7 29098 268 10164 1139282 555 0 31 0 0 0 -intr 8885917 17 0 0 0 0 0 0 0 1 79281 0 0 0 0 0 0 0 231237 0 0 0 0 250586 103 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 223424 190745 13 906 1283803 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -ctxt 38014093 -btime 1418183276 -processes 26442 -procs_running 2 -procs_blocked 0 -softirq 5057579 250191 1481983 1647 211099 186066 0 1783454 622196 12499 508444 diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/README b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/README deleted file mode 100644 index 5cf184ea..00000000 --- a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/README +++ /dev/null @@ -1,2 +0,0 @@ -This directory contains some empty files that are the symlinks the files in the "fd" directory point to. -They are otherwise ignored by the tests diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/abc b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/abc deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/def b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/def deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/ghi b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/ghi deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/uvw b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/uvw deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/xyz b/vendor/github.com/prometheus/procfs/fixtures/symlinktargets/xyz deleted file mode 100644 index e69de29b..00000000 diff --git a/vendor/github.com/prometheus/procfs/fs.go b/vendor/github.com/prometheus/procfs/fs.go index 17546756..b6c6b2ce 100644 --- a/vendor/github.com/prometheus/procfs/fs.go +++ b/vendor/github.com/prometheus/procfs/fs.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( @@ -5,6 +18,7 @@ import ( "os" "path" + "github.com/prometheus/procfs/nfs" "github.com/prometheus/procfs/xfs" ) @@ -44,3 +58,25 @@ func (fs FS) XFSStats() (*xfs.Stats, error) { return xfs.ParseStats(f) } + +// NFSClientRPCStats retrieves NFS client RPC statistics. +func (fs FS) NFSClientRPCStats() (*nfs.ClientRPCStats, error) { + f, err := os.Open(fs.Path("net/rpc/nfs")) + if err != nil { + return nil, err + } + defer f.Close() + + return nfs.ParseClientRPCStats(f) +} + +// NFSdServerRPCStats retrieves NFS daemon RPC statistics. +func (fs FS) NFSdServerRPCStats() (*nfs.ServerRPCStats, error) { + f, err := os.Open(fs.Path("net/rpc/nfsd")) + if err != nil { + return nil, err + } + defer f.Close() + + return nfs.ParseServerRPCStats(f) +} diff --git a/vendor/github.com/prometheus/procfs/fs_test.go b/vendor/github.com/prometheus/procfs/fs_test.go index e492cde5..a4e07f5c 100644 --- a/vendor/github.com/prometheus/procfs/fs_test.go +++ b/vendor/github.com/prometheus/procfs/fs_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import "testing" diff --git a/vendor/github.com/prometheus/procfs/internal/util/parse.go b/vendor/github.com/prometheus/procfs/internal/util/parse.go new file mode 100644 index 00000000..1ad21c91 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/internal/util/parse.go @@ -0,0 +1,46 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import "strconv" + +// ParseUint32s parses a slice of strings into a slice of uint32s. +func ParseUint32s(ss []string) ([]uint32, error) { + us := make([]uint32, 0, len(ss)) + for _, s := range ss { + u, err := strconv.ParseUint(s, 10, 32) + if err != nil { + return nil, err + } + + us = append(us, uint32(u)) + } + + return us, nil +} + +// ParseUint64s parses a slice of strings into a slice of uint64s. +func ParseUint64s(ss []string) ([]uint64, error) { + us := make([]uint64, 0, len(ss)) + for _, s := range ss { + u, err := strconv.ParseUint(s, 10, 64) + if err != nil { + return nil, err + } + + us = append(us, u) + } + + return us, nil +} diff --git a/vendor/github.com/prometheus/procfs/ipvs.go b/vendor/github.com/prometheus/procfs/ipvs.go index 696d114e..e36d4a3b 100644 --- a/vendor/github.com/prometheus/procfs/ipvs.go +++ b/vendor/github.com/prometheus/procfs/ipvs.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( @@ -31,16 +44,16 @@ type IPVSStats struct { type IPVSBackendStatus struct { // The local (virtual) IP address. LocalAddress net.IP + // The remote (real) IP address. + RemoteAddress net.IP // The local (virtual) port. LocalPort uint16 + // The remote (real) port. + RemotePort uint16 // The local firewall mark LocalMark string // The transport protocol (TCP, UDP). Proto string - // The remote (real) IP address. - RemoteAddress net.IP - // The remote (real) port. - RemotePort uint16 // The current number of active connections for this virtual/real address pair. ActiveConn uint64 // The current number of inactive connections for this virtual/real address pair. @@ -151,7 +164,7 @@ func parseIPVSBackendStatus(file io.Reader) ([]IPVSBackendStatus, error) { ) for scanner.Scan() { - fields := strings.Fields(string(scanner.Text())) + fields := strings.Fields(scanner.Text()) if len(fields) == 0 { continue } diff --git a/vendor/github.com/prometheus/procfs/ipvs_test.go b/vendor/github.com/prometheus/procfs/ipvs_test.go index 13ceab80..9c34e6d0 100644 --- a/vendor/github.com/prometheus/procfs/ipvs_test.go +++ b/vendor/github.com/prometheus/procfs/ipvs_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/mdstat.go b/vendor/github.com/prometheus/procfs/mdstat.go index d7a248c0..9dc19583 100644 --- a/vendor/github.com/prometheus/procfs/mdstat.go +++ b/vendor/github.com/prometheus/procfs/mdstat.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/mdstat_test.go b/vendor/github.com/prometheus/procfs/mdstat_test.go index fa463c2f..8819228f 100644 --- a/vendor/github.com/prometheus/procfs/mdstat_test.go +++ b/vendor/github.com/prometheus/procfs/mdstat_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go index 6b2b0ba9..e95ddbc6 100644 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ b/vendor/github.com/prometheus/procfs/mountstats.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs // While implementing parsing of /proc/[pid]/mountstats, this blog was used diff --git a/vendor/github.com/prometheus/procfs/mountstats_test.go b/vendor/github.com/prometheus/procfs/mountstats_test.go index 8f04f535..7df1d15f 100644 --- a/vendor/github.com/prometheus/procfs/mountstats_test.go +++ b/vendor/github.com/prometheus/procfs/mountstats_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/net_dev.go b/vendor/github.com/prometheus/procfs/net_dev.go new file mode 100644 index 00000000..3f252337 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/net_dev.go @@ -0,0 +1,216 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "bufio" + "errors" + "os" + "sort" + "strconv" + "strings" +) + +// NetDevLine is single line parsed from /proc/net/dev or /proc/[pid]/net/dev. +type NetDevLine struct { + Name string `json:"name"` // The name of the interface. + RxBytes uint64 `json:"rx_bytes"` // Cumulative count of bytes received. + RxPackets uint64 `json:"rx_packets"` // Cumulative count of packets received. + RxErrors uint64 `json:"rx_errors"` // Cumulative count of receive errors encountered. + RxDropped uint64 `json:"rx_dropped"` // Cumulative count of packets dropped while receiving. + RxFIFO uint64 `json:"rx_fifo"` // Cumulative count of FIFO buffer errors. + RxFrame uint64 `json:"rx_frame"` // Cumulative count of packet framing errors. + RxCompressed uint64 `json:"rx_compressed"` // Cumulative count of compressed packets received by the device driver. + RxMulticast uint64 `json:"rx_multicast"` // Cumulative count of multicast frames received by the device driver. + TxBytes uint64 `json:"tx_bytes"` // Cumulative count of bytes transmitted. + TxPackets uint64 `json:"tx_packets"` // Cumulative count of packets transmitted. + TxErrors uint64 `json:"tx_errors"` // Cumulative count of transmit errors encountered. + TxDropped uint64 `json:"tx_dropped"` // Cumulative count of packets dropped while transmitting. + TxFIFO uint64 `json:"tx_fifo"` // Cumulative count of FIFO buffer errors. + TxCollisions uint64 `json:"tx_collisions"` // Cumulative count of collisions detected on the interface. + TxCarrier uint64 `json:"tx_carrier"` // Cumulative count of carrier losses detected by the device driver. + TxCompressed uint64 `json:"tx_compressed"` // Cumulative count of compressed packets transmitted by the device driver. +} + +// NetDev is parsed from /proc/net/dev or /proc/[pid]/net/dev. The map keys +// are interface names. +type NetDev map[string]NetDevLine + +// NewNetDev returns kernel/system statistics read from /proc/net/dev. +func NewNetDev() (NetDev, error) { + fs, err := NewFS(DefaultMountPoint) + if err != nil { + return nil, err + } + + return fs.NewNetDev() +} + +// NewNetDev returns kernel/system statistics read from /proc/net/dev. +func (fs FS) NewNetDev() (NetDev, error) { + return newNetDev(fs.Path("net/dev")) +} + +// NewNetDev returns kernel/system statistics read from /proc/[pid]/net/dev. +func (p Proc) NewNetDev() (NetDev, error) { + return newNetDev(p.path("net/dev")) +} + +// newNetDev creates a new NetDev from the contents of the given file. +func newNetDev(file string) (NetDev, error) { + f, err := os.Open(file) + if err != nil { + return NetDev{}, err + } + defer f.Close() + + nd := NetDev{} + s := bufio.NewScanner(f) + for n := 0; s.Scan(); n++ { + // Skip the 2 header lines. + if n < 2 { + continue + } + + line, err := nd.parseLine(s.Text()) + if err != nil { + return nd, err + } + + nd[line.Name] = *line + } + + return nd, s.Err() +} + +// parseLine parses a single line from the /proc/net/dev file. Header lines +// must be filtered prior to calling this method. +func (nd NetDev) parseLine(rawLine string) (*NetDevLine, error) { + parts := strings.SplitN(rawLine, ":", 2) + if len(parts) != 2 { + return nil, errors.New("invalid net/dev line, missing colon") + } + fields := strings.Fields(strings.TrimSpace(parts[1])) + + var err error + line := &NetDevLine{} + + // Interface Name + line.Name = strings.TrimSpace(parts[0]) + if line.Name == "" { + return nil, errors.New("invalid net/dev line, empty interface name") + } + + // RX + line.RxBytes, err = strconv.ParseUint(fields[0], 10, 64) + if err != nil { + return nil, err + } + line.RxPackets, err = strconv.ParseUint(fields[1], 10, 64) + if err != nil { + return nil, err + } + line.RxErrors, err = strconv.ParseUint(fields[2], 10, 64) + if err != nil { + return nil, err + } + line.RxDropped, err = strconv.ParseUint(fields[3], 10, 64) + if err != nil { + return nil, err + } + line.RxFIFO, err = strconv.ParseUint(fields[4], 10, 64) + if err != nil { + return nil, err + } + line.RxFrame, err = strconv.ParseUint(fields[5], 10, 64) + if err != nil { + return nil, err + } + line.RxCompressed, err = strconv.ParseUint(fields[6], 10, 64) + if err != nil { + return nil, err + } + line.RxMulticast, err = strconv.ParseUint(fields[7], 10, 64) + if err != nil { + return nil, err + } + + // TX + line.TxBytes, err = strconv.ParseUint(fields[8], 10, 64) + if err != nil { + return nil, err + } + line.TxPackets, err = strconv.ParseUint(fields[9], 10, 64) + if err != nil { + return nil, err + } + line.TxErrors, err = strconv.ParseUint(fields[10], 10, 64) + if err != nil { + return nil, err + } + line.TxDropped, err = strconv.ParseUint(fields[11], 10, 64) + if err != nil { + return nil, err + } + line.TxFIFO, err = strconv.ParseUint(fields[12], 10, 64) + if err != nil { + return nil, err + } + line.TxCollisions, err = strconv.ParseUint(fields[13], 10, 64) + if err != nil { + return nil, err + } + line.TxCarrier, err = strconv.ParseUint(fields[14], 10, 64) + if err != nil { + return nil, err + } + line.TxCompressed, err = strconv.ParseUint(fields[15], 10, 64) + if err != nil { + return nil, err + } + + return line, nil +} + +// Total aggregates the values across interfaces and returns a new NetDevLine. +// The Name field will be a sorted comma separated list of interface names. +func (nd NetDev) Total() NetDevLine { + total := NetDevLine{} + + names := make([]string, 0, len(nd)) + for _, ifc := range nd { + names = append(names, ifc.Name) + total.RxBytes += ifc.RxBytes + total.RxPackets += ifc.RxPackets + total.RxPackets += ifc.RxPackets + total.RxErrors += ifc.RxErrors + total.RxDropped += ifc.RxDropped + total.RxFIFO += ifc.RxFIFO + total.RxFrame += ifc.RxFrame + total.RxCompressed += ifc.RxCompressed + total.RxMulticast += ifc.RxMulticast + total.TxBytes += ifc.TxBytes + total.TxPackets += ifc.TxPackets + total.TxErrors += ifc.TxErrors + total.TxDropped += ifc.TxDropped + total.TxFIFO += ifc.TxFIFO + total.TxCollisions += ifc.TxCollisions + total.TxCarrier += ifc.TxCarrier + total.TxCompressed += ifc.TxCompressed + } + sort.Strings(names) + total.Name = strings.Join(names, ", ") + + return total +} diff --git a/vendor/github.com/prometheus/procfs/net_dev_test.go b/vendor/github.com/prometheus/procfs/net_dev_test.go new file mode 100644 index 00000000..b162e9c9 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/net_dev_test.go @@ -0,0 +1,86 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "testing" +) + +func TestNetDevParseLine(t *testing.T) { + const rawLine = ` eth0: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16` + + have, err := NetDev{}.parseLine(rawLine) + if err != nil { + t.Fatal(err) + } + + want := NetDevLine{"eth0", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16} + if want != *have { + t.Errorf("want %v, have %v", want, have) + } +} + +func TestNewNetDev(t *testing.T) { + fs, err := NewFS("fixtures") + if err != nil { + t.Fatal(err) + } + + nd, err := fs.NewNetDev() + if err != nil { + t.Fatal(err) + } + + lines := map[string]NetDevLine{ + "vethf345468": {Name: "vethf345468", RxBytes: 648, RxPackets: 8, TxBytes: 438, TxPackets: 5}, + "lo": {Name: "lo", RxBytes: 1664039048, RxPackets: 1566805, TxBytes: 1664039048, TxPackets: 1566805}, + "docker0": {Name: "docker0", RxBytes: 2568, RxPackets: 38, TxBytes: 438, TxPackets: 5}, + "eth0": {Name: "eth0", RxBytes: 874354587, RxPackets: 1036395, TxBytes: 563352563, TxPackets: 732147}, + } + + if want, have := len(lines), len(nd); want != have { + t.Errorf("want %d parsed net/dev lines, have %d", want, have) + } + for _, line := range nd { + if want, have := lines[line.Name], line; want != have { + t.Errorf("%s: want %v, have %v", line.Name, want, have) + } + } +} + +func TestProcNewNetDev(t *testing.T) { + p, err := FS("fixtures").NewProc(26231) + if err != nil { + t.Fatal(err) + } + + nd, err := p.NewNetDev() + if err != nil { + t.Fatal(err) + } + + lines := map[string]NetDevLine{ + "lo": {Name: "lo"}, + "eth0": {Name: "eth0", RxBytes: 438, RxPackets: 5, TxBytes: 648, TxPackets: 8}, + } + + if want, have := len(lines), len(nd); want != have { + t.Errorf("want %d parsed net/dev lines, have %d", want, have) + } + for _, line := range nd { + if want, have := lines[line.Name], line; want != have { + t.Errorf("%s: want %v, have %v", line.Name, want, have) + } + } +} diff --git a/vendor/github.com/prometheus/procfs/nfs/nfs.go b/vendor/github.com/prometheus/procfs/nfs/nfs.go new file mode 100644 index 00000000..651bf681 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/nfs.go @@ -0,0 +1,263 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package nfs implements parsing of /proc/net/rpc/nfsd. +// Fields are documented in https://www.svennd.be/nfsd-stats-explained-procnetrpcnfsd/ +package nfs + +// ReplyCache models the "rc" line. +type ReplyCache struct { + Hits uint64 + Misses uint64 + NoCache uint64 +} + +// FileHandles models the "fh" line. +type FileHandles struct { + Stale uint64 + TotalLookups uint64 + AnonLookups uint64 + DirNoCache uint64 + NoDirNoCache uint64 +} + +// InputOutput models the "io" line. +type InputOutput struct { + Read uint64 + Write uint64 +} + +// Threads models the "th" line. +type Threads struct { + Threads uint64 + FullCnt uint64 +} + +// ReadAheadCache models the "ra" line. +type ReadAheadCache struct { + CacheSize uint64 + CacheHistogram []uint64 + NotFound uint64 +} + +// Network models the "net" line. +type Network struct { + NetCount uint64 + UDPCount uint64 + TCPCount uint64 + TCPConnect uint64 +} + +// ClientRPC models the nfs "rpc" line. +type ClientRPC struct { + RPCCount uint64 + Retransmissions uint64 + AuthRefreshes uint64 +} + +// ServerRPC models the nfsd "rpc" line. +type ServerRPC struct { + RPCCount uint64 + BadCnt uint64 + BadFmt uint64 + BadAuth uint64 + BadcInt uint64 +} + +// V2Stats models the "proc2" line. +type V2Stats struct { + Null uint64 + GetAttr uint64 + SetAttr uint64 + Root uint64 + Lookup uint64 + ReadLink uint64 + Read uint64 + WrCache uint64 + Write uint64 + Create uint64 + Remove uint64 + Rename uint64 + Link uint64 + SymLink uint64 + MkDir uint64 + RmDir uint64 + ReadDir uint64 + FsStat uint64 +} + +// V3Stats models the "proc3" line. +type V3Stats struct { + Null uint64 + GetAttr uint64 + SetAttr uint64 + Lookup uint64 + Access uint64 + ReadLink uint64 + Read uint64 + Write uint64 + Create uint64 + MkDir uint64 + SymLink uint64 + MkNod uint64 + Remove uint64 + RmDir uint64 + Rename uint64 + Link uint64 + ReadDir uint64 + ReadDirPlus uint64 + FsStat uint64 + FsInfo uint64 + PathConf uint64 + Commit uint64 +} + +// ClientV4Stats models the nfs "proc4" line. +type ClientV4Stats struct { + Null uint64 + Read uint64 + Write uint64 + Commit uint64 + Open uint64 + OpenConfirm uint64 + OpenNoattr uint64 + OpenDowngrade uint64 + Close uint64 + Setattr uint64 + FsInfo uint64 + Renew uint64 + SetClientID uint64 + SetClientIDConfirm uint64 + Lock uint64 + Lockt uint64 + Locku uint64 + Access uint64 + Getattr uint64 + Lookup uint64 + LookupRoot uint64 + Remove uint64 + Rename uint64 + Link uint64 + Symlink uint64 + Create uint64 + Pathconf uint64 + StatFs uint64 + ReadLink uint64 + ReadDir uint64 + ServerCaps uint64 + DelegReturn uint64 + GetACL uint64 + SetACL uint64 + FsLocations uint64 + ReleaseLockowner uint64 + Secinfo uint64 + FsidPresent uint64 + ExchangeID uint64 + CreateSession uint64 + DestroySession uint64 + Sequence uint64 + GetLeaseTime uint64 + ReclaimComplete uint64 + LayoutGet uint64 + GetDeviceInfo uint64 + LayoutCommit uint64 + LayoutReturn uint64 + SecinfoNoName uint64 + TestStateID uint64 + FreeStateID uint64 + GetDeviceList uint64 + BindConnToSession uint64 + DestroyClientID uint64 + Seek uint64 + Allocate uint64 + DeAllocate uint64 + LayoutStats uint64 + Clone uint64 +} + +// ServerV4Stats models the nfsd "proc4" line. +type ServerV4Stats struct { + Null uint64 + Compound uint64 +} + +// V4Ops models the "proc4ops" line: NFSv4 operations +// Variable list, see: +// v4.0 https://tools.ietf.org/html/rfc3010 (38 operations) +// v4.1 https://tools.ietf.org/html/rfc5661 (58 operations) +// v4.2 https://tools.ietf.org/html/draft-ietf-nfsv4-minorversion2-41 (71 operations) +type V4Ops struct { + //Values uint64 // Variable depending on v4.x sub-version. TODO: Will this always at least include the fields in this struct? + Op0Unused uint64 + Op1Unused uint64 + Op2Future uint64 + Access uint64 + Close uint64 + Commit uint64 + Create uint64 + DelegPurge uint64 + DelegReturn uint64 + GetAttr uint64 + GetFH uint64 + Link uint64 + Lock uint64 + Lockt uint64 + Locku uint64 + Lookup uint64 + LookupRoot uint64 + Nverify uint64 + Open uint64 + OpenAttr uint64 + OpenConfirm uint64 + OpenDgrd uint64 + PutFH uint64 + PutPubFH uint64 + PutRootFH uint64 + Read uint64 + ReadDir uint64 + ReadLink uint64 + Remove uint64 + Rename uint64 + Renew uint64 + RestoreFH uint64 + SaveFH uint64 + SecInfo uint64 + SetAttr uint64 + Verify uint64 + Write uint64 + RelLockOwner uint64 +} + +// ClientRPCStats models all stats from /proc/net/rpc/nfs. +type ClientRPCStats struct { + Network Network + ClientRPC ClientRPC + V2Stats V2Stats + V3Stats V3Stats + ClientV4Stats ClientV4Stats +} + +// ServerRPCStats models all stats from /proc/net/rpc/nfsd. +type ServerRPCStats struct { + ReplyCache ReplyCache + FileHandles FileHandles + InputOutput InputOutput + Threads Threads + ReadAheadCache ReadAheadCache + Network Network + ServerRPC ServerRPC + V2Stats V2Stats + V3Stats V3Stats + ServerV4Stats ServerV4Stats + V4Ops V4Ops +} diff --git a/vendor/github.com/prometheus/procfs/nfs/parse.go b/vendor/github.com/prometheus/procfs/nfs/parse.go new file mode 100644 index 00000000..95a83cc5 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/parse.go @@ -0,0 +1,317 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package nfs + +import ( + "fmt" +) + +func parseReplyCache(v []uint64) (ReplyCache, error) { + if len(v) != 3 { + return ReplyCache{}, fmt.Errorf("invalid ReplyCache line %q", v) + } + + return ReplyCache{ + Hits: v[0], + Misses: v[1], + NoCache: v[2], + }, nil +} + +func parseFileHandles(v []uint64) (FileHandles, error) { + if len(v) != 5 { + return FileHandles{}, fmt.Errorf("invalid FileHandles, line %q", v) + } + + return FileHandles{ + Stale: v[0], + TotalLookups: v[1], + AnonLookups: v[2], + DirNoCache: v[3], + NoDirNoCache: v[4], + }, nil +} + +func parseInputOutput(v []uint64) (InputOutput, error) { + if len(v) != 2 { + return InputOutput{}, fmt.Errorf("invalid InputOutput line %q", v) + } + + return InputOutput{ + Read: v[0], + Write: v[1], + }, nil +} + +func parseThreads(v []uint64) (Threads, error) { + if len(v) != 2 { + return Threads{}, fmt.Errorf("invalid Threads line %q", v) + } + + return Threads{ + Threads: v[0], + FullCnt: v[1], + }, nil +} + +func parseReadAheadCache(v []uint64) (ReadAheadCache, error) { + if len(v) != 12 { + return ReadAheadCache{}, fmt.Errorf("invalid ReadAheadCache line %q", v) + } + + return ReadAheadCache{ + CacheSize: v[0], + CacheHistogram: v[1:11], + NotFound: v[11], + }, nil +} + +func parseNetwork(v []uint64) (Network, error) { + if len(v) != 4 { + return Network{}, fmt.Errorf("invalid Network line %q", v) + } + + return Network{ + NetCount: v[0], + UDPCount: v[1], + TCPCount: v[2], + TCPConnect: v[3], + }, nil +} + +func parseServerRPC(v []uint64) (ServerRPC, error) { + if len(v) != 5 { + return ServerRPC{}, fmt.Errorf("invalid RPC line %q", v) + } + + return ServerRPC{ + RPCCount: v[0], + BadCnt: v[1], + BadFmt: v[2], + BadAuth: v[3], + BadcInt: v[4], + }, nil +} + +func parseClientRPC(v []uint64) (ClientRPC, error) { + if len(v) != 3 { + return ClientRPC{}, fmt.Errorf("invalid RPC line %q", v) + } + + return ClientRPC{ + RPCCount: v[0], + Retransmissions: v[1], + AuthRefreshes: v[2], + }, nil +} + +func parseV2Stats(v []uint64) (V2Stats, error) { + values := int(v[0]) + if len(v[1:]) != values || values != 18 { + return V2Stats{}, fmt.Errorf("invalid V2Stats line %q", v) + } + + return V2Stats{ + Null: v[1], + GetAttr: v[2], + SetAttr: v[3], + Root: v[4], + Lookup: v[5], + ReadLink: v[6], + Read: v[7], + WrCache: v[8], + Write: v[9], + Create: v[10], + Remove: v[11], + Rename: v[12], + Link: v[13], + SymLink: v[14], + MkDir: v[15], + RmDir: v[16], + ReadDir: v[17], + FsStat: v[18], + }, nil +} + +func parseV3Stats(v []uint64) (V3Stats, error) { + values := int(v[0]) + if len(v[1:]) != values || values != 22 { + return V3Stats{}, fmt.Errorf("invalid V3Stats line %q", v) + } + + return V3Stats{ + Null: v[1], + GetAttr: v[2], + SetAttr: v[3], + Lookup: v[4], + Access: v[5], + ReadLink: v[6], + Read: v[7], + Write: v[8], + Create: v[9], + MkDir: v[10], + SymLink: v[11], + MkNod: v[12], + Remove: v[13], + RmDir: v[14], + Rename: v[15], + Link: v[16], + ReadDir: v[17], + ReadDirPlus: v[18], + FsStat: v[19], + FsInfo: v[20], + PathConf: v[21], + Commit: v[22], + }, nil +} + +func parseClientV4Stats(v []uint64) (ClientV4Stats, error) { + values := int(v[0]) + if len(v[1:]) != values { + return ClientV4Stats{}, fmt.Errorf("invalid ClientV4Stats line %q", v) + } + + // This function currently supports mapping 59 NFS v4 client stats. Older + // kernels may emit fewer stats, so we must detect this and pad out the + // values to match the expected slice size. + if values < 59 { + newValues := make([]uint64, 60) + copy(newValues, v) + v = newValues + } + + return ClientV4Stats{ + Null: v[1], + Read: v[2], + Write: v[3], + Commit: v[4], + Open: v[5], + OpenConfirm: v[6], + OpenNoattr: v[7], + OpenDowngrade: v[8], + Close: v[9], + Setattr: v[10], + FsInfo: v[11], + Renew: v[12], + SetClientID: v[13], + SetClientIDConfirm: v[14], + Lock: v[15], + Lockt: v[16], + Locku: v[17], + Access: v[18], + Getattr: v[19], + Lookup: v[20], + LookupRoot: v[21], + Remove: v[22], + Rename: v[23], + Link: v[24], + Symlink: v[25], + Create: v[26], + Pathconf: v[27], + StatFs: v[28], + ReadLink: v[29], + ReadDir: v[30], + ServerCaps: v[31], + DelegReturn: v[32], + GetACL: v[33], + SetACL: v[34], + FsLocations: v[35], + ReleaseLockowner: v[36], + Secinfo: v[37], + FsidPresent: v[38], + ExchangeID: v[39], + CreateSession: v[40], + DestroySession: v[41], + Sequence: v[42], + GetLeaseTime: v[43], + ReclaimComplete: v[44], + LayoutGet: v[45], + GetDeviceInfo: v[46], + LayoutCommit: v[47], + LayoutReturn: v[48], + SecinfoNoName: v[49], + TestStateID: v[50], + FreeStateID: v[51], + GetDeviceList: v[52], + BindConnToSession: v[53], + DestroyClientID: v[54], + Seek: v[55], + Allocate: v[56], + DeAllocate: v[57], + LayoutStats: v[58], + Clone: v[59], + }, nil +} + +func parseServerV4Stats(v []uint64) (ServerV4Stats, error) { + values := int(v[0]) + if len(v[1:]) != values || values != 2 { + return ServerV4Stats{}, fmt.Errorf("invalid V4Stats line %q", v) + } + + return ServerV4Stats{ + Null: v[1], + Compound: v[2], + }, nil +} + +func parseV4Ops(v []uint64) (V4Ops, error) { + values := int(v[0]) + if len(v[1:]) != values || values < 39 { + return V4Ops{}, fmt.Errorf("invalid V4Ops line %q", v) + } + + stats := V4Ops{ + Op0Unused: v[1], + Op1Unused: v[2], + Op2Future: v[3], + Access: v[4], + Close: v[5], + Commit: v[6], + Create: v[7], + DelegPurge: v[8], + DelegReturn: v[9], + GetAttr: v[10], + GetFH: v[11], + Link: v[12], + Lock: v[13], + Lockt: v[14], + Locku: v[15], + Lookup: v[16], + LookupRoot: v[17], + Nverify: v[18], + Open: v[19], + OpenAttr: v[20], + OpenConfirm: v[21], + OpenDgrd: v[22], + PutFH: v[23], + PutPubFH: v[24], + PutRootFH: v[25], + Read: v[26], + ReadDir: v[27], + ReadLink: v[28], + Remove: v[29], + Rename: v[30], + Renew: v[31], + RestoreFH: v[32], + SaveFH: v[33], + SecInfo: v[34], + SetAttr: v[35], + Verify: v[36], + Write: v[37], + RelLockOwner: v[38], + } + + return stats, nil +} diff --git a/vendor/github.com/prometheus/procfs/nfs/parse_nfs.go b/vendor/github.com/prometheus/procfs/nfs/parse_nfs.go new file mode 100644 index 00000000..c0d3a5ad --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/parse_nfs.go @@ -0,0 +1,67 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package nfs + +import ( + "bufio" + "fmt" + "io" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// ParseClientRPCStats returns stats read from /proc/net/rpc/nfs +func ParseClientRPCStats(r io.Reader) (*ClientRPCStats, error) { + stats := &ClientRPCStats{} + + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := scanner.Text() + parts := strings.Fields(scanner.Text()) + // require at least + if len(parts) < 2 { + return nil, fmt.Errorf("invalid NFS metric line %q", line) + } + + values, err := util.ParseUint64s(parts[1:]) + if err != nil { + return nil, fmt.Errorf("error parsing NFS metric line: %s", err) + } + + switch metricLine := parts[0]; metricLine { + case "net": + stats.Network, err = parseNetwork(values) + case "rpc": + stats.ClientRPC, err = parseClientRPC(values) + case "proc2": + stats.V2Stats, err = parseV2Stats(values) + case "proc3": + stats.V3Stats, err = parseV3Stats(values) + case "proc4": + stats.ClientV4Stats, err = parseClientV4Stats(values) + default: + return nil, fmt.Errorf("unknown NFS metric line %q", metricLine) + } + if err != nil { + return nil, fmt.Errorf("errors parsing NFS metric line: %s", err) + } + } + + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("error scanning NFS file: %s", err) + } + + return stats, nil +} diff --git a/vendor/github.com/prometheus/procfs/nfs/parse_nfs_test.go b/vendor/github.com/prometheus/procfs/nfs/parse_nfs_test.go new file mode 100644 index 00000000..8ebcfd16 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/parse_nfs_test.go @@ -0,0 +1,305 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package nfs_test + +import ( + "reflect" + "strings" + "testing" + + "github.com/prometheus/procfs/nfs" +) + +func TestNewNFSClientRPCStats(t *testing.T) { + tests := []struct { + name string + content string + stats *nfs.ClientRPCStats + invalid bool + }{ + { + name: "invalid file", + content: "invalid", + invalid: true, + }, { + name: "good old kernel version file", + content: `net 70 70 69 45 +rpc 1218785755 374636 1218815394 +proc2 18 16 57 74 52 71 73 45 86 0 52 83 61 17 53 50 23 70 82 +proc3 22 0 1061909262 48906 4077635 117661341 5 29391916 2570425 2993289 590 0 0 7815 15 1130 0 3983 92385 13332 2 1 23729 +proc4 48 98 51 54 83 85 23 24 1 28 73 68 83 12 84 39 68 59 58 88 29 74 69 96 21 84 15 53 86 54 66 56 97 36 49 32 85 81 11 58 32 67 13 28 35 90 1 26 1337 +`, + stats: &nfs.ClientRPCStats{ + Network: nfs.Network{ + NetCount: 70, + UDPCount: 70, + TCPCount: 69, + TCPConnect: 45, + }, + ClientRPC: nfs.ClientRPC{ + RPCCount: 1218785755, + Retransmissions: 374636, + AuthRefreshes: 1218815394, + }, + V2Stats: nfs.V2Stats{ + Null: 16, + GetAttr: 57, + SetAttr: 74, + Root: 52, + Lookup: 71, + ReadLink: 73, + Read: 45, + WrCache: 86, + Write: 0, + Create: 52, + Remove: 83, + Rename: 61, + Link: 17, + SymLink: 53, + MkDir: 50, + RmDir: 23, + ReadDir: 70, + FsStat: 82, + }, + V3Stats: nfs.V3Stats{ + Null: 0, + GetAttr: 1061909262, + SetAttr: 48906, + Lookup: 4077635, + Access: 117661341, + ReadLink: 5, + Read: 29391916, + Write: 2570425, + Create: 2993289, + MkDir: 590, + SymLink: 0, + MkNod: 0, + Remove: 7815, + RmDir: 15, + Rename: 1130, + Link: 0, + ReadDir: 3983, + ReadDirPlus: 92385, + FsStat: 13332, + FsInfo: 2, + PathConf: 1, + Commit: 23729}, + ClientV4Stats: nfs.ClientV4Stats{ + Null: 98, + Read: 51, + Write: 54, + Commit: 83, + Open: 85, + OpenConfirm: 23, + OpenNoattr: 24, + OpenDowngrade: 1, + Close: 28, + Setattr: 73, + FsInfo: 68, + Renew: 83, + SetClientID: 12, + SetClientIDConfirm: 84, + Lock: 39, + Lockt: 68, + Locku: 59, + Access: 58, + Getattr: 88, + Lookup: 29, + LookupRoot: 74, + Remove: 69, + Rename: 96, + Link: 21, + Symlink: 84, + Create: 15, + Pathconf: 53, + StatFs: 86, + ReadLink: 54, + ReadDir: 66, + ServerCaps: 56, + DelegReturn: 97, + GetACL: 36, + SetACL: 49, + FsLocations: 32, + ReleaseLockowner: 85, + Secinfo: 81, + FsidPresent: 11, + ExchangeID: 58, + CreateSession: 32, + DestroySession: 67, + Sequence: 13, + GetLeaseTime: 28, + ReclaimComplete: 35, + LayoutGet: 90, + GetDeviceInfo: 1, + LayoutCommit: 26, + LayoutReturn: 1337, + SecinfoNoName: 0, + TestStateID: 0, + FreeStateID: 0, + GetDeviceList: 0, + BindConnToSession: 0, + DestroyClientID: 0, + Seek: 0, + Allocate: 0, + DeAllocate: 0, + LayoutStats: 0, + Clone: 0, + }, + }, + }, { + name: "good file", + content: `net 18628 0 18628 6 +rpc 4329785 0 4338291 +proc2 18 2 69 0 0 4410 0 0 0 0 0 0 0 0 0 0 0 99 2 +proc3 22 1 4084749 29200 94754 32580 186 47747 7981 8639 0 6356 0 6962 0 7958 0 0 241 4 4 2 39 +proc4 61 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +`, + stats: &nfs.ClientRPCStats{ + Network: nfs.Network{ + NetCount: 18628, + UDPCount: 0, + TCPCount: 18628, + TCPConnect: 6, + }, + ClientRPC: nfs.ClientRPC{ + RPCCount: 4329785, + Retransmissions: 0, + AuthRefreshes: 4338291, + }, + V2Stats: nfs.V2Stats{ + Null: 2, + GetAttr: 69, + SetAttr: 0, + Root: 0, + Lookup: 4410, + ReadLink: 0, + Read: 0, + WrCache: 0, + Write: 0, + Create: 0, + Remove: 0, + Rename: 0, + Link: 0, + SymLink: 0, + MkDir: 0, + RmDir: 0, + ReadDir: 99, + FsStat: 2, + }, + V3Stats: nfs.V3Stats{ + Null: 1, + GetAttr: 4084749, + SetAttr: 29200, + Lookup: 94754, + Access: 32580, + ReadLink: 186, + Read: 47747, + Write: 7981, + Create: 8639, + MkDir: 0, + SymLink: 6356, + MkNod: 0, + Remove: 6962, + RmDir: 0, + Rename: 7958, + Link: 0, + ReadDir: 0, + ReadDirPlus: 241, + FsStat: 4, + FsInfo: 4, + PathConf: 2, + Commit: 39, + }, + ClientV4Stats: nfs.ClientV4Stats{ + Null: 1, + Read: 0, + Write: 0, + Commit: 0, + Open: 0, + OpenConfirm: 0, + OpenNoattr: 0, + OpenDowngrade: 0, + Close: 0, + Setattr: 0, + FsInfo: 0, + Renew: 0, + SetClientID: 1, + SetClientIDConfirm: 1, + Lock: 0, + Lockt: 0, + Locku: 0, + Access: 0, + Getattr: 0, + Lookup: 0, + LookupRoot: 0, + Remove: 2, + Rename: 0, + Link: 0, + Symlink: 0, + Create: 0, + Pathconf: 0, + StatFs: 0, + ReadLink: 0, + ReadDir: 0, + ServerCaps: 0, + DelegReturn: 0, + GetACL: 0, + SetACL: 0, + FsLocations: 0, + ReleaseLockowner: 0, + Secinfo: 0, + FsidPresent: 0, + ExchangeID: 0, + CreateSession: 0, + DestroySession: 0, + Sequence: 0, + GetLeaseTime: 0, + ReclaimComplete: 0, + LayoutGet: 0, + GetDeviceInfo: 0, + LayoutCommit: 0, + LayoutReturn: 0, + SecinfoNoName: 0, + TestStateID: 0, + FreeStateID: 0, + GetDeviceList: 0, + BindConnToSession: 0, + DestroyClientID: 0, + Seek: 0, + Allocate: 0, + DeAllocate: 0, + LayoutStats: 0, + Clone: 0, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + stats, err := nfs.ParseClientRPCStats(strings.NewReader(tt.content)) + + if tt.invalid && err == nil { + t.Fatal("expected an error, but none occurred") + } + if !tt.invalid && err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if want, have := tt.stats, stats; !reflect.DeepEqual(want, have) { + t.Fatalf("unexpected NFS stats:\nwant:\n%v\nhave:\n%v", want, have) + } + }) + } +} diff --git a/vendor/github.com/prometheus/procfs/nfs/parse_nfsd.go b/vendor/github.com/prometheus/procfs/nfs/parse_nfsd.go new file mode 100644 index 00000000..57bb4a35 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/parse_nfsd.go @@ -0,0 +1,89 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package nfs + +import ( + "bufio" + "fmt" + "io" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// ParseServerRPCStats returns stats read from /proc/net/rpc/nfsd +func ParseServerRPCStats(r io.Reader) (*ServerRPCStats, error) { + stats := &ServerRPCStats{} + + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := scanner.Text() + parts := strings.Fields(scanner.Text()) + // require at least + if len(parts) < 2 { + return nil, fmt.Errorf("invalid NFSd metric line %q", line) + } + label := parts[0] + + var values []uint64 + var err error + if label == "th" { + if len(parts) < 3 { + return nil, fmt.Errorf("invalid NFSd th metric line %q", line) + } + values, err = util.ParseUint64s(parts[1:3]) + } else { + values, err = util.ParseUint64s(parts[1:]) + } + if err != nil { + return nil, fmt.Errorf("error parsing NFSd metric line: %s", err) + } + + switch metricLine := parts[0]; metricLine { + case "rc": + stats.ReplyCache, err = parseReplyCache(values) + case "fh": + stats.FileHandles, err = parseFileHandles(values) + case "io": + stats.InputOutput, err = parseInputOutput(values) + case "th": + stats.Threads, err = parseThreads(values) + case "ra": + stats.ReadAheadCache, err = parseReadAheadCache(values) + case "net": + stats.Network, err = parseNetwork(values) + case "rpc": + stats.ServerRPC, err = parseServerRPC(values) + case "proc2": + stats.V2Stats, err = parseV2Stats(values) + case "proc3": + stats.V3Stats, err = parseV3Stats(values) + case "proc4": + stats.ServerV4Stats, err = parseServerV4Stats(values) + case "proc4ops": + stats.V4Ops, err = parseV4Ops(values) + default: + return nil, fmt.Errorf("unknown NFSd metric line %q", metricLine) + } + if err != nil { + return nil, fmt.Errorf("errors parsing NFSd metric line: %s", err) + } + } + + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("error scanning NFSd file: %s", err) + } + + return stats, nil +} diff --git a/vendor/github.com/prometheus/procfs/nfs/parse_nfsd_test.go b/vendor/github.com/prometheus/procfs/nfs/parse_nfsd_test.go new file mode 100644 index 00000000..b09b3b58 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/nfs/parse_nfsd_test.go @@ -0,0 +1,196 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package nfs_test + +import ( + "reflect" + "strings" + "testing" + + "github.com/prometheus/procfs/nfs" +) + +func TestNewNFSdServerRPCStats(t *testing.T) { + tests := []struct { + name string + content string + stats *nfs.ServerRPCStats + invalid bool + }{ + { + name: "invalid file", + content: "invalid", + invalid: true, + }, { + name: "good file", + content: `rc 0 6 18622 +fh 0 0 0 0 0 +io 157286400 0 +th 8 0 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 +ra 32 0 0 0 0 0 0 0 0 0 0 0 +net 18628 0 18628 6 +rpc 18628 0 0 0 0 +proc2 18 2 69 0 0 4410 0 0 0 0 0 0 0 0 0 0 0 99 2 +proc3 22 2 112 0 2719 111 0 0 0 0 0 0 0 0 0 0 0 27 216 0 2 1 0 +proc4 2 2 10853 +proc4ops 72 0 0 0 1098 2 0 0 0 0 8179 5896 0 0 0 0 5900 0 0 2 0 2 0 9609 0 2 150 1272 0 0 0 1236 0 0 0 0 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +`, + stats: &nfs.ServerRPCStats{ + ReplyCache: nfs.ReplyCache{ + Hits: 0, + Misses: 6, + NoCache: 18622, + }, + FileHandles: nfs.FileHandles{ + Stale: 0, + TotalLookups: 0, + AnonLookups: 0, + DirNoCache: 0, + NoDirNoCache: 0, + }, + InputOutput: nfs.InputOutput{ + Read: 157286400, + Write: 0, + }, + Threads: nfs.Threads{ + Threads: 8, + FullCnt: 0, + }, + ReadAheadCache: nfs.ReadAheadCache{ + CacheSize: 32, + CacheHistogram: []uint64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + NotFound: 0, + }, + Network: nfs.Network{ + NetCount: 18628, + UDPCount: 0, + TCPCount: 18628, + TCPConnect: 6, + }, + ServerRPC: nfs.ServerRPC{ + RPCCount: 18628, + BadCnt: 0, + BadFmt: 0, + BadAuth: 0, + BadcInt: 0, + }, + V2Stats: nfs.V2Stats{ + Null: 2, + GetAttr: 69, + SetAttr: 0, + Root: 0, + Lookup: 4410, + ReadLink: 0, + Read: 0, + WrCache: 0, + Write: 0, + Create: 0, + Remove: 0, + Rename: 0, + Link: 0, + SymLink: 0, + MkDir: 0, + RmDir: 0, + ReadDir: 99, + FsStat: 2, + }, + V3Stats: nfs.V3Stats{ + Null: 2, + GetAttr: 112, + SetAttr: 0, + Lookup: 2719, + Access: 111, + ReadLink: 0, + Read: 0, + Write: 0, + Create: 0, + MkDir: 0, + SymLink: 0, + MkNod: 0, + Remove: 0, + RmDir: 0, + Rename: 0, + Link: 0, + ReadDir: 27, + ReadDirPlus: 216, + FsStat: 0, + FsInfo: 2, + PathConf: 1, + Commit: 0, + }, + ServerV4Stats: nfs.ServerV4Stats{ + Null: 2, + Compound: 10853, + }, + V4Ops: nfs.V4Ops{ + Op0Unused: 0, + Op1Unused: 0, + Op2Future: 0, + Access: 1098, + Close: 2, + Commit: 0, + Create: 0, + DelegPurge: 0, + DelegReturn: 0, + GetAttr: 8179, + GetFH: 5896, + Link: 0, + Lock: 0, + Lockt: 0, + Locku: 0, + Lookup: 5900, + LookupRoot: 0, + Nverify: 0, + Open: 2, + OpenAttr: 0, + OpenConfirm: 2, + OpenDgrd: 0, + PutFH: 9609, + PutPubFH: 0, + PutRootFH: 2, + Read: 150, + ReadDir: 1272, + ReadLink: 0, + Remove: 0, + Rename: 0, + Renew: 1236, + RestoreFH: 0, + SaveFH: 0, + SecInfo: 0, + SetAttr: 0, + Verify: 3, + Write: 3, + RelLockOwner: 0, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + stats, err := nfs.ParseServerRPCStats(strings.NewReader(tt.content)) + + if tt.invalid && err == nil { + t.Fatal("expected an error, but none occurred") + } + if !tt.invalid && err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if want, have := tt.stats, stats; !reflect.DeepEqual(want, have) { + t.Fatalf("unexpected NFS stats:\nwant:\n%v\nhave:\n%v", want, have) + } + }) + } +} diff --git a/vendor/github.com/prometheus/procfs/proc.go b/vendor/github.com/prometheus/procfs/proc.go index 8717e1fe..7cf5b8ac 100644 --- a/vendor/github.com/prometheus/procfs/proc.go +++ b/vendor/github.com/prometheus/procfs/proc.go @@ -1,6 +1,20 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( + "bytes" "fmt" "io/ioutil" "os" @@ -113,7 +127,7 @@ func (p Proc) CmdLine() ([]string, error) { return []string{}, nil } - return strings.Split(string(data[:len(data)-1]), string(byte(0))), nil + return strings.Split(string(bytes.TrimRight(data, string("\x00"))), string(byte(0))), nil } // Comm returns the command name of a process. diff --git a/vendor/github.com/prometheus/procfs/proc_io.go b/vendor/github.com/prometheus/procfs/proc_io.go index b4e31d7b..0251c83b 100644 --- a/vendor/github.com/prometheus/procfs/proc_io.go +++ b/vendor/github.com/prometheus/procfs/proc_io.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( @@ -47,9 +60,6 @@ func (p Proc) NewIO() (ProcIO, error) { _, err = fmt.Sscanf(string(data), ioFormat, &pio.RChar, &pio.WChar, &pio.SyscR, &pio.SyscW, &pio.ReadBytes, &pio.WriteBytes, &pio.CancelledWriteBytes) - if err != nil { - return pio, err - } - return pio, nil + return pio, err } diff --git a/vendor/github.com/prometheus/procfs/proc_io_test.go b/vendor/github.com/prometheus/procfs/proc_io_test.go index 3aa1a129..1afdbd46 100644 --- a/vendor/github.com/prometheus/procfs/proc_io_test.go +++ b/vendor/github.com/prometheus/procfs/proc_io_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import "testing" diff --git a/vendor/github.com/prometheus/procfs/proc_limits.go b/vendor/github.com/prometheus/procfs/proc_limits.go index 2df997ce..f04ba6fd 100644 --- a/vendor/github.com/prometheus/procfs/proc_limits.go +++ b/vendor/github.com/prometheus/procfs/proc_limits.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( @@ -13,46 +26,46 @@ import ( // http://man7.org/linux/man-pages/man2/getrlimit.2.html. type ProcLimits struct { // CPU time limit in seconds. - CPUTime int + CPUTime int64 // Maximum size of files that the process may create. - FileSize int + FileSize int64 // Maximum size of the process's data segment (initialized data, // uninitialized data, and heap). - DataSize int + DataSize int64 // Maximum size of the process stack in bytes. - StackSize int + StackSize int64 // Maximum size of a core file. - CoreFileSize int + CoreFileSize int64 // Limit of the process's resident set in pages. - ResidentSet int + ResidentSet int64 // Maximum number of processes that can be created for the real user ID of // the calling process. - Processes int + Processes int64 // Value one greater than the maximum file descriptor number that can be // opened by this process. - OpenFiles int + OpenFiles int64 // Maximum number of bytes of memory that may be locked into RAM. - LockedMemory int + LockedMemory int64 // Maximum size of the process's virtual memory address space in bytes. - AddressSpace int + AddressSpace int64 // Limit on the combined number of flock(2) locks and fcntl(2) leases that // this process may establish. - FileLocks int + FileLocks int64 // Limit of signals that may be queued for the real user ID of the calling // process. - PendingSignals int + PendingSignals int64 // Limit on the number of bytes that can be allocated for POSIX message // queues for the real user ID of the calling process. - MsqqueueSize int + MsqqueueSize int64 // Limit of the nice priority set using setpriority(2) or nice(2). - NicePriority int + NicePriority int64 // Limit of the real-time priority set using sched_setscheduler(2) or // sched_setparam(2). - RealtimePriority int + RealtimePriority int64 // Limit (in microseconds) on the amount of CPU time that a process // scheduled under a real-time scheduling policy may consume without making // a blocking system call. - RealtimeTimeout int + RealtimeTimeout int64 } const ( @@ -125,13 +138,13 @@ func (p Proc) NewLimits() (ProcLimits, error) { return l, s.Err() } -func parseInt(s string) (int, error) { +func parseInt(s string) (int64, error) { if s == limitsUnlimited { return -1, nil } - i, err := strconv.ParseInt(s, 10, 32) + i, err := strconv.ParseInt(s, 10, 64) if err != nil { return 0, fmt.Errorf("couldn't parse value %s: %s", s, err) } - return int(i), nil + return i, nil } diff --git a/vendor/github.com/prometheus/procfs/proc_limits_test.go b/vendor/github.com/prometheus/procfs/proc_limits_test.go index 70bf04ec..ebb43ae7 100644 --- a/vendor/github.com/prometheus/procfs/proc_limits_test.go +++ b/vendor/github.com/prometheus/procfs/proc_limits_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import "testing" @@ -15,14 +28,14 @@ func TestNewLimits(t *testing.T) { for _, test := range []struct { name string - want int - have int + want int64 + have int64 }{ {name: "cpu time", want: -1, have: l.CPUTime}, {name: "open files", want: 2048, have: l.OpenFiles}, {name: "msgqueue size", want: 819200, have: l.MsqqueueSize}, {name: "nice priority", want: 0, have: l.NicePriority}, - {name: "address space", want: -1, have: l.AddressSpace}, + {name: "address space", want: 8589934592, have: l.AddressSpace}, } { if test.want != test.have { t.Errorf("want %s %d, have %d", test.name, test.want, test.have) diff --git a/vendor/github.com/prometheus/procfs/proc_ns.go b/vendor/github.com/prometheus/procfs/proc_ns.go new file mode 100644 index 00000000..d06c26eb --- /dev/null +++ b/vendor/github.com/prometheus/procfs/proc_ns.go @@ -0,0 +1,68 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "fmt" + "os" + "strconv" + "strings" +) + +// Namespace represents a single namespace of a process. +type Namespace struct { + Type string // Namespace type. + Inode uint32 // Inode number of the namespace. If two processes are in the same namespace their inodes will match. +} + +// Namespaces contains all of the namespaces that the process is contained in. +type Namespaces map[string]Namespace + +// NewNamespaces reads from /proc/[pid/ns/* to get the namespaces of which the +// process is a member. +func (p Proc) NewNamespaces() (Namespaces, error) { + d, err := os.Open(p.path("ns")) + if err != nil { + return nil, err + } + defer d.Close() + + names, err := d.Readdirnames(-1) + if err != nil { + return nil, fmt.Errorf("failed to read contents of ns dir: %v", err) + } + + ns := make(Namespaces, len(names)) + for _, name := range names { + target, err := os.Readlink(p.path("ns", name)) + if err != nil { + return nil, err + } + + fields := strings.SplitN(target, ":", 2) + if len(fields) != 2 { + return nil, fmt.Errorf("failed to parse namespace type and inode from '%v'", target) + } + + typ := fields[0] + inode, err := strconv.ParseUint(strings.Trim(fields[1], "[]"), 10, 32) + if err != nil { + return nil, fmt.Errorf("failed to parse inode from '%v': %v", fields[1], err) + } + + ns[name] = Namespace{typ, uint32(inode)} + } + + return ns, nil +} diff --git a/vendor/github.com/prometheus/procfs/proc_ns_test.go b/vendor/github.com/prometheus/procfs/proc_ns_test.go new file mode 100644 index 00000000..abfd63e5 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/proc_ns_test.go @@ -0,0 +1,44 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "testing" +) + +func TestNewNamespaces(t *testing.T) { + p, err := FS("fixtures").NewProc(26231) + if err != nil { + t.Fatal(err) + } + + namespaces, err := p.NewNamespaces() + if err != nil { + t.Fatal(err) + } + + expectedNamespaces := map[string]Namespace{ + "mnt": {"mnt", 4026531840}, + "net": {"net", 4026531993}, + } + + if want, have := len(expectedNamespaces), len(namespaces); want != have { + t.Errorf("want %d parsed namespaces, have %d", want, have) + } + for _, ns := range namespaces { + if want, have := expectedNamespaces[ns.Type], ns; want != have { + t.Errorf("%s: want %v, have %v", ns.Type, want, have) + } + } +} diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go index 724e271b..3cf2a9f1 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ b/vendor/github.com/prometheus/procfs/proc_stat.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/proc_stat_test.go b/vendor/github.com/prometheus/procfs/proc_stat_test.go index a2ebcde7..e2df8845 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat_test.go +++ b/vendor/github.com/prometheus/procfs/proc_stat_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( diff --git a/vendor/github.com/prometheus/procfs/proc_test.go b/vendor/github.com/prometheus/procfs/proc_test.go index 104b3245..ee7e69d6 100644 --- a/vendor/github.com/prometheus/procfs/proc_test.go +++ b/vendor/github.com/prometheus/procfs/proc_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( @@ -43,6 +56,7 @@ func TestCmdLine(t *testing.T) { }{ {process: 26231, want: []string{"vim", "test.go", "+10"}}, {process: 26232, want: []string{}}, + {process: 26233, want: []string{"com.github.uiautomator"}}, } { p1, err := FS("fixtures").NewProc(tt.process) if err != nil { diff --git a/vendor/github.com/prometheus/procfs/scripts/check_license.sh b/vendor/github.com/prometheus/procfs/scripts/check_license.sh new file mode 100755 index 00000000..ac13e960 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/scripts/check_license.sh @@ -0,0 +1,29 @@ +#!/bin/sh +# +# Copyright 2018 The Prometheus Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +check_license() { + local file="" + for file in $(find . -type f -iname '*.go' ! -path './vendor/*'); do + head -n3 "${file}" | grep -Eq "(Copyright|generated|GENERATED)" || echo " ${file}" + done +} + +licRes=$(check_license) + +if [ -n "${licRes}" ]; then + echo "license header checking failed:" + echo "${licRes}" + exit 255 +fi diff --git a/vendor/github.com/prometheus/procfs/stat.go b/vendor/github.com/prometheus/procfs/stat.go index 1ca217e8..61eb6b0e 100644 --- a/vendor/github.com/prometheus/procfs/stat.go +++ b/vendor/github.com/prometheus/procfs/stat.go @@ -1,17 +1,81 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import ( "bufio" "fmt" + "io" "os" "strconv" "strings" ) +// CPUStat shows how much time the cpu spend in various stages. +type CPUStat struct { + User float64 + Nice float64 + System float64 + Idle float64 + Iowait float64 + IRQ float64 + SoftIRQ float64 + Steal float64 + Guest float64 + GuestNice float64 +} + +// SoftIRQStat represent the softirq statistics as exported in the procfs stat file. +// A nice introduction can be found at https://0xax.gitbooks.io/linux-insides/content/interrupts/interrupts-9.html +// It is possible to get per-cpu stats by reading /proc/softirqs +type SoftIRQStat struct { + Hi uint64 + Timer uint64 + NetTx uint64 + NetRx uint64 + Block uint64 + BlockIoPoll uint64 + Tasklet uint64 + Sched uint64 + Hrtimer uint64 + Rcu uint64 +} + // Stat represents kernel/system statistics. type Stat struct { // Boot time in seconds since the Epoch. - BootTime int64 + BootTime uint64 + // Summed up cpu statistics. + CPUTotal CPUStat + // Per-CPU statistics. + CPU []CPUStat + // Number of times interrupts were handled, which contains numbered and unnumbered IRQs. + IRQTotal uint64 + // Number of times a numbered IRQ was triggered. + IRQ []uint64 + // Number of times a context switch happened. + ContextSwitches uint64 + // Number of times a process was created. + ProcessCreated uint64 + // Number of processes currently running. + ProcessesRunning uint64 + // Number of processes currently blocked (waiting for IO). + ProcessesBlocked uint64 + // Number of times a softirq was scheduled. + SoftIRQTotal uint64 + // Detailed softirq statistics. + SoftIRQ SoftIRQStat } // NewStat returns kernel/system statistics read from /proc/stat. @@ -24,33 +88,145 @@ func NewStat() (Stat, error) { return fs.NewStat() } +// Parse a cpu statistics line and returns the CPUStat struct plus the cpu id (or -1 for the overall sum). +func parseCPUStat(line string) (CPUStat, int64, error) { + cpuStat := CPUStat{} + var cpu string + + count, err := fmt.Sscanf(line, "%s %f %f %f %f %f %f %f %f %f %f", + &cpu, + &cpuStat.User, &cpuStat.Nice, &cpuStat.System, &cpuStat.Idle, + &cpuStat.Iowait, &cpuStat.IRQ, &cpuStat.SoftIRQ, &cpuStat.Steal, + &cpuStat.Guest, &cpuStat.GuestNice) + + if err != nil && err != io.EOF { + return CPUStat{}, -1, fmt.Errorf("couldn't parse %s (cpu): %s", line, err) + } + if count == 0 { + return CPUStat{}, -1, fmt.Errorf("couldn't parse %s (cpu): 0 elements parsed", line) + } + + cpuStat.User /= userHZ + cpuStat.Nice /= userHZ + cpuStat.System /= userHZ + cpuStat.Idle /= userHZ + cpuStat.Iowait /= userHZ + cpuStat.IRQ /= userHZ + cpuStat.SoftIRQ /= userHZ + cpuStat.Steal /= userHZ + cpuStat.Guest /= userHZ + cpuStat.GuestNice /= userHZ + + if cpu == "cpu" { + return cpuStat, -1, nil + } + + cpuID, err := strconv.ParseInt(cpu[3:], 10, 64) + if err != nil { + return CPUStat{}, -1, fmt.Errorf("couldn't parse %s (cpu/cpuid): %s", line, err) + } + + return cpuStat, cpuID, nil +} + +// Parse a softirq line. +func parseSoftIRQStat(line string) (SoftIRQStat, uint64, error) { + softIRQStat := SoftIRQStat{} + var total uint64 + var prefix string + + _, err := fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d %d %d", + &prefix, &total, + &softIRQStat.Hi, &softIRQStat.Timer, &softIRQStat.NetTx, &softIRQStat.NetRx, + &softIRQStat.Block, &softIRQStat.BlockIoPoll, + &softIRQStat.Tasklet, &softIRQStat.Sched, + &softIRQStat.Hrtimer, &softIRQStat.Rcu) + + if err != nil { + return SoftIRQStat{}, 0, fmt.Errorf("couldn't parse %s (softirq): %s", line, err) + } + + return softIRQStat, total, nil +} + // NewStat returns an information about current kernel/system statistics. func (fs FS) NewStat() (Stat, error) { + // See https://www.kernel.org/doc/Documentation/filesystems/proc.txt + f, err := os.Open(fs.Path("stat")) if err != nil { return Stat{}, err } defer f.Close() - s := bufio.NewScanner(f) - for s.Scan() { - line := s.Text() - if !strings.HasPrefix(line, "btime") { + stat := Stat{} + + scanner := bufio.NewScanner(f) + for scanner.Scan() { + line := scanner.Text() + parts := strings.Fields(scanner.Text()) + // require at least + if len(parts) < 2 { continue } - fields := strings.Fields(line) - if len(fields) != 2 { - return Stat{}, fmt.Errorf("couldn't parse %s line %s", f.Name(), line) + switch { + case parts[0] == "btime": + if stat.BootTime, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (btime): %s", parts[1], err) + } + case parts[0] == "intr": + if stat.IRQTotal, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (intr): %s", parts[1], err) + } + numberedIRQs := parts[2:] + stat.IRQ = make([]uint64, len(numberedIRQs)) + for i, count := range numberedIRQs { + if stat.IRQ[i], err = strconv.ParseUint(count, 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (intr%d): %s", count, i, err) + } + } + case parts[0] == "ctxt": + if stat.ContextSwitches, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (ctxt): %s", parts[1], err) + } + case parts[0] == "processes": + if stat.ProcessCreated, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (processes): %s", parts[1], err) + } + case parts[0] == "procs_running": + if stat.ProcessesRunning, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (procs_running): %s", parts[1], err) + } + case parts[0] == "procs_blocked": + if stat.ProcessesBlocked, err = strconv.ParseUint(parts[1], 10, 64); err != nil { + return Stat{}, fmt.Errorf("couldn't parse %s (procs_blocked): %s", parts[1], err) + } + case parts[0] == "softirq": + softIRQStats, total, err := parseSoftIRQStat(line) + if err != nil { + return Stat{}, err + } + stat.SoftIRQTotal = total + stat.SoftIRQ = softIRQStats + case strings.HasPrefix(parts[0], "cpu"): + cpuStat, cpuID, err := parseCPUStat(line) + if err != nil { + return Stat{}, err + } + if cpuID == -1 { + stat.CPUTotal = cpuStat + } else { + for int64(len(stat.CPU)) <= cpuID { + stat.CPU = append(stat.CPU, CPUStat{}) + } + stat.CPU[cpuID] = cpuStat + } } - i, err := strconv.ParseInt(fields[1], 10, 32) - if err != nil { - return Stat{}, fmt.Errorf("couldn't parse %s: %s", fields[1], err) - } - return Stat{BootTime: i}, nil } - if err := s.Err(); err != nil { + + if err := scanner.Err(); err != nil { return Stat{}, fmt.Errorf("couldn't parse %s: %s", f.Name(), err) } - return Stat{}, fmt.Errorf("couldn't parse %s, missing btime", f.Name()) + return stat, nil } diff --git a/vendor/github.com/prometheus/procfs/stat_test.go b/vendor/github.com/prometheus/procfs/stat_test.go index 6eb79247..2043b5e4 100644 --- a/vendor/github.com/prometheus/procfs/stat_test.go +++ b/vendor/github.com/prometheus/procfs/stat_test.go @@ -1,3 +1,16 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package procfs import "testing" @@ -8,7 +21,54 @@ func TestStat(t *testing.T) { t.Fatal(err) } - if want, have := int64(1418183276), s.BootTime; want != have { - t.Errorf("want boot time %d, have %d", want, have) + // cpu + if want, have := float64(301854)/userHZ, s.CPUTotal.User; want != have { + t.Errorf("want cpu/user %v, have %v", want, have) } + if want, have := float64(31)/userHZ, s.CPU[7].SoftIRQ; want != have { + t.Errorf("want cpu7/softirq %v, have %v", want, have) + } + + // intr + if want, have := uint64(8885917), s.IRQTotal; want != have { + t.Errorf("want irq/total %d, have %d", want, have) + } + if want, have := uint64(1), s.IRQ[8]; want != have { + t.Errorf("want irq8 %d, have %d", want, have) + } + + // ctxt + if want, have := uint64(38014093), s.ContextSwitches; want != have { + t.Errorf("want context switches (ctxt) %d, have %d", want, have) + } + + // btime + if want, have := uint64(1418183276), s.BootTime; want != have { + t.Errorf("want boot time (btime) %d, have %d", want, have) + } + + // processes + if want, have := uint64(26442), s.ProcessCreated; want != have { + t.Errorf("want process created (processes) %d, have %d", want, have) + } + + // procs_running + if want, have := uint64(2), s.ProcessesRunning; want != have { + t.Errorf("want processes running (procs_running) %d, have %d", want, have) + } + + // procs_blocked + if want, have := uint64(1), s.ProcessesBlocked; want != have { + t.Errorf("want processes blocked (procs_blocked) %d, have %d", want, have) + } + + // softirq + if want, have := uint64(5057579), s.SoftIRQTotal; want != have { + t.Errorf("want softirq total %d, have %d", want, have) + } + + if want, have := uint64(508444), s.SoftIRQ.Rcu; want != have { + t.Errorf("want softirq RCU %d, have %d", want, have) + } + } diff --git a/vendor/github.com/prometheus/procfs/sysfs/.gitignore b/vendor/github.com/prometheus/procfs/sysfs/.gitignore new file mode 100644 index 00000000..67fc140b --- /dev/null +++ b/vendor/github.com/prometheus/procfs/sysfs/.gitignore @@ -0,0 +1 @@ +fixtures/ diff --git a/vendor/github.com/prometheus/procfs/sysfs/fixtures.ttar b/vendor/github.com/prometheus/procfs/sysfs/fixtures.ttar new file mode 100644 index 00000000..8e665ce5 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/sysfs/fixtures.ttar @@ -0,0 +1,851 @@ +# Archive created by ttar -c -f fixtures.ttar fixtures/ +Directory: fixtures +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/dirty_data +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_hits +Lines: 1 +289 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_day/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_five_minute/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_hour/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_hits +Lines: 1 +546 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata4/host3/target3:0:0/3:0:0:0/block/sdb/bcache/stats_total/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc/bcache +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc/bcache/io_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc/bcache/metadata_written +Lines: 1 +512 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc/bcache/priority_stats +Lines: 5 +Unused: 99% +Metadata: 0% +Average: 10473 +Sectors per Q: 64 +Quantiles: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946] +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/devices/pci0000:00/0000:00:0d.0/ata5/host4/target4:0:0/4:0:0:0/block/sdc/bcache/written +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/average_key_size +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0 +Mode: 777 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/dirty_data +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_hits +Lines: 1 +289 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_day/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_five_minute/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_hour/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_hits +Lines: 1 +546 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/bdev0/stats_total/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/btree_cache_size +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache0 +Mode: 777 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache0/io_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache0/metadata_written +Lines: 1 +512 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache0/priority_stats +Lines: 5 +Unused: 99% +Metadata: 0% +Average: 10473 +Sectors per Q: 64 +Quantiles: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946 20946] +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache0/written +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/cache_available_percent +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/congested +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/internal +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/internal/active_journal_entries +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/internal/btree_nodes +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/internal/btree_read_average_duration_us +Lines: 1 +1305 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/internal/cache_read_races +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/root_usage_percent +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_hits +Lines: 1 +289 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_day/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_five_minute/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_hit_ratio +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_hour/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/bypassed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_bypass_hits +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_bypass_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_hit_ratio +Lines: 1 +100 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_hits +Lines: 1 +546 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_miss_collisions +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_misses +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/stats_total/cache_readaheads +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/bcache/deaddd54-c735-46d5-868e-f331c5fd7c74/tree_depth +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs/sda1 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs/sda1/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/xfs/sda1/stats/stats +Lines: 1 +extent_alloc 1 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs/sdb1 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/fs/xfs/sdb1/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/fs/xfs/sdb1/stats/stats +Lines: 1 +extent_alloc 2 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/class/net/eth0/ +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/addr_assign_type +Lines: 1 +3 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/addr_len +Lines: 1 +6 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/address +Lines: 1 +01:01:01:01:01:01 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/broadcast +Lines: 1 +ff:ff:ff:ff:ff:ff +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/carrier +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/carrier_changes +Lines: 1 +2 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/carrier_down_count +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/carrier_up_count +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/dev_id +Lines: 1 +0x20 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/dormant +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/duplex +Lines: 1 +full +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/flags +Lines: 1 +0x1303 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/ifalias +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/ifindex +Lines: 1 +2 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/iflink +Lines: 1 +2 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/link_mode +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/mtu +Lines: 1 +1500 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/name_assign_type +Lines: 1 +2 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/netdev_group +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/operstate +Lines: 1 +up +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/phys_port_id +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/phys_port_name +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/phys_switch_id +Lines: 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/speed +Lines: 1 +1000 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/tx_queue_len +Lines: 1 +1000 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/class/net/eth0/type +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sda1/stats/stats b/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sda1/stats/stats deleted file mode 100644 index 0db7520b..00000000 --- a/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sda1/stats/stats +++ /dev/null @@ -1 +0,0 @@ -extent_alloc 1 0 0 0 diff --git a/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sdb1/stats/stats b/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sdb1/stats/stats deleted file mode 100644 index 85a03840..00000000 --- a/vendor/github.com/prometheus/procfs/sysfs/fixtures/fs/xfs/sdb1/stats/stats +++ /dev/null @@ -1 +0,0 @@ -extent_alloc 2 0 0 0 diff --git a/vendor/github.com/prometheus/procfs/sysfs/fs.go b/vendor/github.com/prometheus/procfs/sysfs/fs.go index 8e838008..fb15d438 100644 --- a/vendor/github.com/prometheus/procfs/sysfs/fs.go +++ b/vendor/github.com/prometheus/procfs/sysfs/fs.go @@ -18,6 +18,7 @@ import ( "os" "path/filepath" + "github.com/prometheus/procfs/bcache" "github.com/prometheus/procfs/xfs" ) @@ -80,3 +81,28 @@ func (fs FS) XFSStats() ([]*xfs.Stats, error) { return stats, nil } + +// BcacheStats retrieves bcache runtime statistics for each bcache. +func (fs FS) BcacheStats() ([]*bcache.Stats, error) { + matches, err := filepath.Glob(fs.Path("fs/bcache/*-*")) + if err != nil { + return nil, err + } + + stats := make([]*bcache.Stats, 0, len(matches)) + for _, uuidPath := range matches { + // "*-*" in glob above indicates the name of the bcache. + name := filepath.Base(uuidPath) + + // stats + s, err := bcache.GetStats(uuidPath) + if err != nil { + return nil, err + } + + s.Name = name + stats = append(stats, s) + } + + return stats, nil +} diff --git a/vendor/github.com/prometheus/procfs/sysfs/fs_test.go b/vendor/github.com/prometheus/procfs/sysfs/fs_test.go index d7f2b736..2b7402ec 100644 --- a/vendor/github.com/prometheus/procfs/sysfs/fs_test.go +++ b/vendor/github.com/prometheus/procfs/sysfs/fs_test.go @@ -64,3 +64,45 @@ func TestFSXFSStats(t *testing.T) { } } } + +func TestFSBcacheStats(t *testing.T) { + stats, err := FS("fixtures").BcacheStats() + if err != nil { + t.Fatalf("failed to parse bcache stats: %v", err) + } + + tests := []struct { + name string + bdevs int + caches int + }{ + { + name: "deaddd54-c735-46d5-868e-f331c5fd7c74", + bdevs: 1, + caches: 1, + }, + } + + const expect = 1 + + if l := len(stats); l != expect { + t.Fatalf("unexpected number of bcache stats: %d", l) + } + if l := len(tests); l != expect { + t.Fatalf("unexpected number of tests: %d", l) + } + + for i, tt := range tests { + if want, got := tt.name, stats[i].Name; want != got { + t.Errorf("unexpected stats name:\nwant: %q\nhave: %q", want, got) + } + + if want, got := tt.bdevs, len(stats[i].Bdevs); want != got { + t.Errorf("unexpected value allocated:\nwant: %d\nhave: %d", want, got) + } + + if want, got := tt.caches, len(stats[i].Caches); want != got { + t.Errorf("unexpected value allocated:\nwant: %d\nhave: %d", want, got) + } + } +} diff --git a/vendor/github.com/prometheus/procfs/sysfs/net_class.go b/vendor/github.com/prometheus/procfs/sysfs/net_class.go new file mode 100644 index 00000000..46affd7f --- /dev/null +++ b/vendor/github.com/prometheus/procfs/sysfs/net_class.go @@ -0,0 +1,165 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sysfs + +import ( + "fmt" + "io/ioutil" + "os" + "reflect" + "strconv" + "strings" + "syscall" +) + +// NetClassIface contains info from files in /sys/class/net/ +// for single interface (iface). +type NetClassIface struct { + Name string // Interface name + AddrAssignType int64 `fileName:"addr_assign_type"` // /sys/class/net//addr_assign_type + AddrLen int64 `fileName:"addr_len"` // /sys/class/net//addr_len + Address string `fileName:"address"` // /sys/class/net//address + Broadcast string `fileName:"broadcast"` // /sys/class/net//broadcast + Carrier int64 `fileName:"carrier"` // /sys/class/net//carrier + CarrierChanges int64 `fileName:"carrier_changes"` // /sys/class/net//carrier_changes + CarrierUpCount int64 `fileName:"carrier_up_count"` // /sys/class/net//carrier_up_count + CarrierDownCount int64 `fileName:"carrier_down_count"` // /sys/class/net//carrier_down_count + DevID int64 `fileName:"dev_id"` // /sys/class/net//dev_id + Dormant int64 `fileName:"dormant"` // /sys/class/net//dormant + Duplex string `fileName:"duplex"` // /sys/class/net//duplex + Flags int64 `fileName:"flags"` // /sys/class/net//flags + IfAlias string `fileName:"ifalias"` // /sys/class/net//ifalias + IfIndex int64 `fileName:"ifindex"` // /sys/class/net//ifindex + IfLink int64 `fileName:"iflink"` // /sys/class/net//iflink + LinkMode int64 `fileName:"link_mode"` // /sys/class/net//link_mode + MTU int64 `fileName:"mtu"` // /sys/class/net//mtu + NameAssignType int64 `fileName:"name_assign_type"` // /sys/class/net//name_assign_type + NetDevGroup int64 `fileName:"netdev_group"` // /sys/class/net//netdev_group + OperState string `fileName:"operstate"` // /sys/class/net//operstate + PhysPortID string `fileName:"phys_port_id"` // /sys/class/net//phys_port_id + PhysPortName string `fileName:"phys_port_name"` // /sys/class/net//phys_port_name + PhysSwitchID string `fileName:"phys_switch_id"` // /sys/class/net//phys_switch_id + Speed int64 `fileName:"speed"` // /sys/class/net//speed + TxQueueLen int64 `fileName:"tx_queue_len"` // /sys/class/net//tx_queue_len + Type int64 `fileName:"type"` // /sys/class/net//type +} + +// NetClass is collection of info for every interface (iface) in /sys/class/net. The map keys +// are interface (iface) names. +type NetClass map[string]NetClassIface + +// NewNetClass returns info for all net interfaces (iface) read from /sys/class/net/. +func NewNetClass() (NetClass, error) { + fs, err := NewFS(DefaultMountPoint) + if err != nil { + return nil, err + } + + return fs.NewNetClass() +} + +// NewNetClass returns info for all net interfaces (iface) read from /sys/class/net/. +func (fs FS) NewNetClass() (NetClass, error) { + path := fs.Path("class/net") + + devices, err := ioutil.ReadDir(path) + if err != nil { + return NetClass{}, fmt.Errorf("cannot access %s dir %s", path, err) + } + + netClass := NetClass{} + for _, deviceDir := range devices { + interfaceClass, err := netClass.parseNetClassIface(path + "/" + deviceDir.Name()) + if err != nil { + return nil, err + } + interfaceClass.Name = deviceDir.Name() + netClass[deviceDir.Name()] = *interfaceClass + } + return netClass, nil +} + +// parseNetClassIface scans predefined files in /sys/class/net/ +// directory and gets their contents. +func (nc NetClass) parseNetClassIface(devicePath string) (*NetClassIface, error) { + interfaceClass := NetClassIface{} + interfaceElem := reflect.ValueOf(&interfaceClass).Elem() + interfaceType := reflect.TypeOf(interfaceClass) + + //start from 1 - skip the Name field + for i := 1; i < interfaceElem.NumField(); i++ { + fieldType := interfaceType.Field(i) + fieldValue := interfaceElem.Field(i) + + if fieldType.Tag.Get("fileName") == "" { + panic(fmt.Errorf("field %s does not have a filename tag", fieldType.Name)) + } + + fileContents, err := sysReadFile(devicePath + "/" + fieldType.Tag.Get("fileName")) + + if err != nil { + if os.IsNotExist(err) || err.Error() == "operation not supported" || err.Error() == "invalid argument" { + continue + } + return nil, fmt.Errorf("could not access file %s: %s", fieldType.Tag.Get("fileName"), err) + } + value := strings.TrimSpace(string(fileContents)) + + switch fieldValue.Kind() { + case reflect.Int64: + if strings.HasPrefix(value, "0x") { + intValue, err := strconv.ParseInt(value[2:], 16, 64) + if err != nil { + return nil, fmt.Errorf("expected hex value for %s, got: %s", fieldType.Name, value) + } + fieldValue.SetInt(intValue) + } else { + intValue, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return nil, fmt.Errorf("expected Uint64 value for %s, got: %s", fieldType.Name, value) + } + fieldValue.SetInt(intValue) + } + case reflect.String: + fieldValue.SetString(value) + default: + return nil, fmt.Errorf("unhandled type %q", fieldValue.Kind()) + } + } + + return &interfaceClass, nil +} + +// sysReadFile is a simplified ioutil.ReadFile that invokes syscall.Read directly. +// https://github.com/prometheus/node_exporter/pull/728/files +func sysReadFile(file string) ([]byte, error) { + f, err := os.Open(file) + if err != nil { + return nil, err + } + defer f.Close() + + // On some machines, hwmon drivers are broken and return EAGAIN. This causes + // Go's ioutil.ReadFile implementation to poll forever. + // + // Since we either want to read data or bail immediately, do the simplest + // possible read using syscall directly. + b := make([]byte, 128) + n, err := syscall.Read(int(f.Fd()), b) + if err != nil { + return nil, err + } + + return b[:n], nil +} diff --git a/vendor/github.com/prometheus/procfs/sysfs/net_class_test.go b/vendor/github.com/prometheus/procfs/sysfs/net_class_test.go new file mode 100644 index 00000000..5c356dc7 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/sysfs/net_class_test.go @@ -0,0 +1,67 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sysfs + +import ( + "reflect" + "testing" +) + +func TestNewNetClass(t *testing.T) { + fs, err := NewFS("fixtures") + if err != nil { + t.Fatal(err) + } + + nc, err := fs.NewNetClass() + if err != nil { + t.Fatal(err) + } + + netClass := NetClass{ + "eth0": { + Address: "01:01:01:01:01:01", + AddrAssignType: 3, + AddrLen: 6, + Broadcast: "ff:ff:ff:ff:ff:ff", + Carrier: 1, + CarrierChanges: 2, + CarrierDownCount: 1, + CarrierUpCount: 1, + DevID: 32, + Dormant: 1, + Duplex: "full", + Flags: 4867, + IfAlias: "", + IfIndex: 2, + IfLink: 2, + LinkMode: 1, + MTU: 1500, + Name: "eth0", + NameAssignType: 2, + NetDevGroup: 0, + OperState: "up", + PhysPortID: "", + PhysPortName: "", + PhysSwitchID: "", + Speed: 1000, + TxQueueLen: 1000, + Type: 1, + }, + } + + if !reflect.DeepEqual(netClass, nc) { + t.Errorf("Result not correct: want %v, have %v", netClass, nc) + } +} diff --git a/vendor/github.com/prometheus/procfs/ttar b/vendor/github.com/prometheus/procfs/ttar new file mode 100755 index 00000000..b0171a12 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/ttar @@ -0,0 +1,389 @@ +#!/usr/bin/env bash + +# Purpose: plain text tar format +# Limitations: - only suitable for text files, directories, and symlinks +# - stores only filename, content, and mode +# - not designed for untrusted input +# +# Note: must work with bash version 3.2 (macOS) + +# Copyright 2017 Roger Luethi +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit -o nounset + +# Sanitize environment (for instance, standard sorting of glob matches) +export LC_ALL=C + +path="" +CMD="" +ARG_STRING="$*" + +#------------------------------------------------------------------------------ +# Not all sed implementations can work on null bytes. In order to make ttar +# work out of the box on macOS, use Python as a stream editor. + +USE_PYTHON=0 + +PYTHON_CREATE_FILTER=$(cat << 'PCF' +#!/usr/bin/env python + +import re +import sys + +for line in sys.stdin: + line = re.sub(r'EOF', r'\EOF', line) + line = re.sub(r'NULLBYTE', r'\NULLBYTE', line) + line = re.sub('\x00', r'NULLBYTE', line) + sys.stdout.write(line) +PCF +) + +PYTHON_EXTRACT_FILTER=$(cat << 'PEF' +#!/usr/bin/env python + +import re +import sys + +for line in sys.stdin: + line = re.sub(r'(?/dev/null; then + echo "ERROR Python not found. Aborting." + exit 2 + fi + USE_PYTHON=1 + fi +} + +#------------------------------------------------------------------------------ + +function usage { + bname=$(basename "$0") + cat << USAGE +Usage: $bname [-C ] -c -f (create archive) + $bname -t -f (list archive contents) + $bname [-C ] -x -f (extract archive) + +Options: + -C (change directory) + -v (verbose) + +Example: Change to sysfs directory, create ttar file from fixtures directory + $bname -C sysfs -c -f sysfs/fixtures.ttar fixtures/ +USAGE +exit "$1" +} + +function vecho { + if [ "${VERBOSE:-}" == "yes" ]; then + echo >&7 "$@" + fi +} + +function set_cmd { + if [ -n "$CMD" ]; then + echo "ERROR: more than one command given" + echo + usage 2 + fi + CMD=$1 +} + +unset VERBOSE + +while getopts :cf:htxvC: opt; do + case $opt in + c) + set_cmd "create" + ;; + f) + ARCHIVE=$OPTARG + ;; + h) + usage 0 + ;; + t) + set_cmd "list" + ;; + x) + set_cmd "extract" + ;; + v) + VERBOSE=yes + exec 7>&1 + ;; + C) + CDIR=$OPTARG + ;; + *) + echo >&2 "ERROR: invalid option -$OPTARG" + echo + usage 1 + ;; + esac +done + +# Remove processed options from arguments +shift $(( OPTIND - 1 )); + +if [ "${CMD:-}" == "" ]; then + echo >&2 "ERROR: no command given" + echo + usage 1 +elif [ "${ARCHIVE:-}" == "" ]; then + echo >&2 "ERROR: no archive name given" + echo + usage 1 +fi + +function list { + local path="" + local size=0 + local line_no=0 + local ttar_file=$1 + if [ -n "${2:-}" ]; then + echo >&2 "ERROR: too many arguments." + echo + usage 1 + fi + if [ ! -e "$ttar_file" ]; then + echo >&2 "ERROR: file not found ($ttar_file)" + echo + usage 1 + fi + while read -r line; do + line_no=$(( line_no + 1 )) + if [ $size -gt 0 ]; then + size=$(( size - 1 )) + continue + fi + if [[ $line =~ ^Path:\ (.*)$ ]]; then + path=${BASH_REMATCH[1]} + elif [[ $line =~ ^Lines:\ (.*)$ ]]; then + size=${BASH_REMATCH[1]} + echo "$path" + elif [[ $line =~ ^Directory:\ (.*)$ ]]; then + path=${BASH_REMATCH[1]} + echo "$path/" + elif [[ $line =~ ^SymlinkTo:\ (.*)$ ]]; then + echo "$path -> ${BASH_REMATCH[1]}" + fi + done < "$ttar_file" +} + +function extract { + local path="" + local size=0 + local line_no=0 + local ttar_file=$1 + if [ -n "${2:-}" ]; then + echo >&2 "ERROR: too many arguments." + echo + usage 1 + fi + if [ ! -e "$ttar_file" ]; then + echo >&2 "ERROR: file not found ($ttar_file)" + echo + usage 1 + fi + while IFS= read -r line; do + line_no=$(( line_no + 1 )) + local eof_without_newline + if [ "$size" -gt 0 ]; then + if [[ "$line" =~ [^\\]EOF ]]; then + # An EOF not preceeded by a backslash indicates that the line + # does not end with a newline + eof_without_newline=1 + else + eof_without_newline=0 + fi + # Replace NULLBYTE with null byte if at beginning of line + # Replace NULLBYTE with null byte unless preceeded by backslash + # Remove one backslash in front of NULLBYTE (if any) + # Remove EOF unless preceeded by backslash + # Remove one backslash in front of EOF + if [ $USE_PYTHON -eq 1 ]; then + echo -n "$line" | python -c "$PYTHON_EXTRACT_FILTER" >> "$path" + else + # The repeated pattern makes up for sed's lack of negative + # lookbehind assertions (for consecutive null bytes). + echo -n "$line" | \ + sed -e 's/^NULLBYTE/\x0/g; + s/\([^\\]\)NULLBYTE/\1\x0/g; + s/\([^\\]\)NULLBYTE/\1\x0/g; + s/\\NULLBYTE/NULLBYTE/g; + s/\([^\\]\)EOF/\1/g; + s/\\EOF/EOF/g; + ' >> "$path" + fi + if [[ "$eof_without_newline" -eq 0 ]]; then + echo >> "$path" + fi + size=$(( size - 1 )) + continue + fi + if [[ $line =~ ^Path:\ (.*)$ ]]; then + path=${BASH_REMATCH[1]} + if [ -e "$path" ] || [ -L "$path" ]; then + rm "$path" + fi + elif [[ $line =~ ^Lines:\ (.*)$ ]]; then + size=${BASH_REMATCH[1]} + # Create file even if it is zero-length. + touch "$path" + vecho " $path" + elif [[ $line =~ ^Mode:\ (.*)$ ]]; then + mode=${BASH_REMATCH[1]} + chmod "$mode" "$path" + vecho "$mode" + elif [[ $line =~ ^Directory:\ (.*)$ ]]; then + path=${BASH_REMATCH[1]} + mkdir -p "$path" + vecho " $path/" + elif [[ $line =~ ^SymlinkTo:\ (.*)$ ]]; then + ln -s "${BASH_REMATCH[1]}" "$path" + vecho " $path -> ${BASH_REMATCH[1]}" + elif [[ $line =~ ^# ]]; then + # Ignore comments between files + continue + else + echo >&2 "ERROR: Unknown keyword on line $line_no: $line" + exit 1 + fi + done < "$ttar_file" +} + +function div { + echo "# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" \ + "- - - - - -" +} + +function get_mode { + local mfile=$1 + if [ -z "${STAT_OPTION:-}" ]; then + if stat -c '%a' "$mfile" >/dev/null 2>&1; then + # GNU stat + STAT_OPTION='-c' + STAT_FORMAT='%a' + else + # BSD stat + STAT_OPTION='-f' + # Octal output, user/group/other (omit file type, sticky bit) + STAT_FORMAT='%OLp' + fi + fi + stat "${STAT_OPTION}" "${STAT_FORMAT}" "$mfile" +} + +function _create { + shopt -s nullglob + local mode + local eof_without_newline + while (( "$#" )); do + file=$1 + if [ -L "$file" ]; then + echo "Path: $file" + symlinkTo=$(readlink "$file") + echo "SymlinkTo: $symlinkTo" + vecho " $file -> $symlinkTo" + div + elif [ -d "$file" ]; then + # Strip trailing slash (if there is one) + file=${file%/} + echo "Directory: $file" + mode=$(get_mode "$file") + echo "Mode: $mode" + vecho "$mode $file/" + div + # Find all files and dirs, including hidden/dot files + for x in "$file/"{*,.[^.]*}; do + _create "$x" + done + elif [ -f "$file" ]; then + echo "Path: $file" + lines=$(wc -l "$file"|awk '{print $1}') + eof_without_newline=0 + if [[ "$(wc -c "$file"|awk '{print $1}')" -gt 0 ]] && \ + [[ "$(tail -c 1 "$file" | wc -l)" -eq 0 ]]; then + eof_without_newline=1 + lines=$((lines+1)) + fi + echo "Lines: $lines" + # Add backslash in front of EOF + # Add backslash in front of NULLBYTE + # Replace null byte with NULLBYTE + if [ $USE_PYTHON -eq 1 ]; then + < "$file" python -c "$PYTHON_CREATE_FILTER" + else + < "$file" \ + sed 's/EOF/\\EOF/g; + s/NULLBYTE/\\NULLBYTE/g; + s/\x0/NULLBYTE/g; + ' + fi + if [[ "$eof_without_newline" -eq 1 ]]; then + # Finish line with EOF to indicate that the original line did + # not end with a linefeed + echo "EOF" + fi + mode=$(get_mode "$file") + echo "Mode: $mode" + vecho "$mode $file" + div + else + echo >&2 "ERROR: file not found ($file in $(pwd))" + exit 2 + fi + shift + done +} + +function create { + ttar_file=$1 + shift + if [ -z "${1:-}" ]; then + echo >&2 "ERROR: missing arguments." + echo + usage 1 + fi + if [ -e "$ttar_file" ]; then + rm "$ttar_file" + fi + exec > "$ttar_file" + echo "# Archive created by ttar $ARG_STRING" + _create "$@" +} + +test_environment + +if [ -n "${CDIR:-}" ]; then + if [[ "$ARCHIVE" != /* ]]; then + # Relative path: preserve the archive's location before changing + # directory + ARCHIVE="$(pwd)/$ARCHIVE" + fi + cd "$CDIR" +fi + +"$CMD" "$ARCHIVE" "$@" diff --git a/vendor/github.com/prometheus/procfs/xfs/parse.go b/vendor/github.com/prometheus/procfs/xfs/parse.go index c8f6279f..2bc0ef34 100644 --- a/vendor/github.com/prometheus/procfs/xfs/parse.go +++ b/vendor/github.com/prometheus/procfs/xfs/parse.go @@ -17,8 +17,9 @@ import ( "bufio" "fmt" "io" - "strconv" "strings" + + "github.com/prometheus/procfs/internal/util" ) // ParseStats parses a Stats from an input io.Reader, using the format @@ -68,7 +69,7 @@ func ParseStats(r io.Reader) (*Stats, error) { // Extended precision counters are uint64 values. if label == fieldXpc { - us, err := parseUint64s(ss[1:]) + us, err := util.ParseUint64s(ss[1:]) if err != nil { return nil, err } @@ -82,7 +83,7 @@ func ParseStats(r io.Reader) (*Stats, error) { } // All other counters are uint32 values. - us, err := parseUint32s(ss[1:]) + us, err := util.ParseUint32s(ss[1:]) if err != nil { return nil, err } @@ -327,33 +328,3 @@ func extendedPrecisionStats(us []uint64) (ExtendedPrecisionStats, error) { ReadBytes: us[2], }, nil } - -// parseUint32s parses a slice of strings into a slice of uint32s. -func parseUint32s(ss []string) ([]uint32, error) { - us := make([]uint32, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 10, 32) - if err != nil { - return nil, err - } - - us = append(us, uint32(u)) - } - - return us, nil -} - -// parseUint64s parses a slice of strings into a slice of uint64s. -func parseUint64s(ss []string) ([]uint64, error) { - us := make([]uint64, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return nil, err - } - - us = append(us, u) - } - - return us, nil -} diff --git a/vendor/github.com/remind101/migrate/migrate.go b/vendor/github.com/remind101/migrate/migrate.go index d1a082cd..8c829dbe 100644 --- a/vendor/github.com/remind101/migrate/migrate.go +++ b/vendor/github.com/remind101/migrate/migrate.go @@ -59,13 +59,13 @@ type Migration struct { Down func(tx *sql.Tx) error } -// byID implements the sort.Interface interface for sorting migrations by +// ByID implements the sort.Interface interface for sorting migrations by // ID. -type byID []Migration +type ByID []Migration -func (m byID) Len() int { return len(m) } -func (m byID) Less(i, j int) bool { return m[i].ID < m[j].ID } -func (m byID) Swap(i, j int) { m[i], m[j] = m[j], m[i] } +func (m ByID) Len() int { return len(m) } +func (m ByID) Less(i, j int) bool { return m[i].ID < m[j].ID } +func (m ByID) Swap(i, j int) { m[i], m[j] = m[j], m[i] } // Migrator performs migrations. type Migrator struct { @@ -277,16 +277,16 @@ func Queries(queries []string) func(*sql.Tx) error { // When the direction is "Up", the migrations will be sorted by ID ascending. // When the direction is "Down", the migrations will be sorted by ID descending. func sortMigrations(dir MigrationDirection, migrations []Migration) []Migration { - var m byID + var m ByID for _, migration := range migrations { m = append(m, migration) } switch dir { case Up: - sort.Sort(byID(m)) + sort.Sort(ByID(m)) default: - sort.Sort(sort.Reverse(byID(m))) + sort.Sort(sort.Reverse(ByID(m))) } return m diff --git a/vendor/github.com/stretchr/testify/.travis.gofmt.sh b/vendor/github.com/stretchr/testify/.travis.gofmt.sh new file mode 100755 index 00000000..bfffdca8 --- /dev/null +++ b/vendor/github.com/stretchr/testify/.travis.gofmt.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +if [ -n "$(gofmt -l .)" ]; then + echo "Go code is not formatted:" + gofmt -d . + exit 1 +fi diff --git a/vendor/github.com/stretchr/testify/.travis.gogenerate.sh b/vendor/github.com/stretchr/testify/.travis.gogenerate.sh new file mode 100755 index 00000000..161b449c --- /dev/null +++ b/vendor/github.com/stretchr/testify/.travis.gogenerate.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +if [[ "$TRAVIS_GO_VERSION" =~ ^1\.[45](\..*)?$ ]]; then + exit 0 +fi + +go get github.com/ernesto-jimenez/gogen/imports +go generate ./... +if [ -n "$(git diff)" ]; then + echo "Go generate had not been run" + git diff + exit 1 +fi diff --git a/vendor/github.com/stretchr/testify/.travis.govet.sh b/vendor/github.com/stretchr/testify/.travis.govet.sh new file mode 100755 index 00000000..f8fbba7a --- /dev/null +++ b/vendor/github.com/stretchr/testify/.travis.govet.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd "$(dirname $0)" +DIRS=". assert require mock _codegen" +set -e +for subdir in $DIRS; do + pushd $subdir + go vet + popd +done diff --git a/vendor/github.com/stretchr/testify/.travis.yml b/vendor/github.com/stretchr/testify/.travis.yml index ffb9e0dd..6e51e63c 100644 --- a/vendor/github.com/stretchr/testify/.travis.yml +++ b/vendor/github.com/stretchr/testify/.travis.yml @@ -3,14 +3,13 @@ language: go sudo: false go: - - 1.1 - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 - 1.7 + - 1.8 + - 1.9 - tip script: - - go test -v ./... + - ./.travis.gogenerate.sh + - ./.travis.gofmt.sh + - ./.travis.govet.sh + - go test -v -race $(go list ./... | grep -v vendor) diff --git a/vendor/github.com/stretchr/testify/Godeps/Godeps.json b/vendor/github.com/stretchr/testify/Godeps/Godeps.json deleted file mode 100644 index df032ac3..00000000 --- a/vendor/github.com/stretchr/testify/Godeps/Godeps.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "ImportPath": "github.com/stretchr/testify", - "GoVersion": "go1.5", - "GodepVersion": "v74", - "Packages": [ - "./..." - ], - "Deps": [ - { - "ImportPath": "github.com/davecgh/go-spew/spew", - "Comment": "v1.0.0-3-g6d21280", - "Rev": "6d212800a42e8ab5c146b8ace3490ee17e5225f9" - }, - { - "ImportPath": "github.com/pmezard/go-difflib/difflib", - "Rev": "d8ed2627bdf02c080bf22230dbb337003b7aba2d" - }, - { - "ImportPath": "github.com/stretchr/objx", - "Rev": "cbeaeb16a013161a98496fad62933b1d21786672" - } - ] -} diff --git a/vendor/github.com/stretchr/testify/Godeps/Readme b/vendor/github.com/stretchr/testify/Godeps/Readme deleted file mode 100644 index 4cdaa53d..00000000 --- a/vendor/github.com/stretchr/testify/Godeps/Readme +++ /dev/null @@ -1,5 +0,0 @@ -This directory tree is generated automatically by godep. - -Please do not edit. - -See https://github.com/tools/godep for more information. diff --git a/vendor/github.com/stretchr/testify/Gopkg.lock b/vendor/github.com/stretchr/testify/Gopkg.lock new file mode 100644 index 00000000..294cda09 --- /dev/null +++ b/vendor/github.com/stretchr/testify/Gopkg.lock @@ -0,0 +1,27 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "346938d642f2ec3594ed81d874461961cd0faa76" + version = "v1.1.0" + +[[projects]] + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + name = "github.com/stretchr/objx" + packages = ["."] + revision = "facf9a85c22f48d2f52f2380e4efce1768749a89" + version = "v0.1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "448ddae4702c6aded2555faafd390c537789bb1c483f70b0431e6634f73f2090" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/stretchr/testify/Gopkg.toml b/vendor/github.com/stretchr/testify/Gopkg.toml new file mode 100644 index 00000000..a16374c8 --- /dev/null +++ b/vendor/github.com/stretchr/testify/Gopkg.toml @@ -0,0 +1,16 @@ +[prune] + unused-packages = true + non-go = true + go-tests = true + +[[constraint]] + name = "github.com/davecgh/go-spew" + version = "~1.1.0" + +[[constraint]] + name = "github.com/pmezard/go-difflib" + version = "~1.0.0" + +[[constraint]] + name = "github.com/stretchr/objx" + version = "~0.1.0" diff --git a/vendor/github.com/stretchr/testify/LICENCE.txt b/vendor/github.com/stretchr/testify/LICENCE.txt deleted file mode 100644 index 473b670a..00000000 --- a/vendor/github.com/stretchr/testify/LICENCE.txt +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2012 - 2013 Mat Ryer and Tyler Bunnell - -Please consider promoting this project if you find it useful. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, -and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT -OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/README.md b/vendor/github.com/stretchr/testify/README.md index e57b1811..d3b942b7 100644 --- a/vendor/github.com/stretchr/testify/README.md +++ b/vendor/github.com/stretchr/testify/README.md @@ -9,7 +9,6 @@ Features include: * [Easy assertions](#assert-package) * [Mocking](#mock-package) - * [HTTP response trapping](#http-package) * [Testing suite interfaces and functions](#suite-package) Get started: @@ -106,14 +105,6 @@ The `require` package provides same global functions as the `assert` package, bu See [t.FailNow](http://golang.org/pkg/testing/#T.FailNow) for details. - -[`http`](http://godoc.org/github.com/stretchr/testify/http "API documentation") package ---------------------------------------------------------------------------------------- - -The `http` package contains test objects useful for testing code that relies on the `net/http` package. Check out the [(deprecated) API documentation for the `http` package](http://godoc.org/github.com/stretchr/testify/http). - -We recommend you use [httptest](http://golang.org/pkg/net/http/httptest) instead. - [`mock`](http://godoc.org/github.com/stretchr/testify/mock "API documentation") package ---------------------------------------------------------------------------------------- @@ -268,8 +259,7 @@ Installation To install Testify, use `go get`: - * Latest version: go get github.com/stretchr/testify - * Specific version: go get gopkg.in/stretchr/testify.v1 + go get github.com/stretchr/testify This will then make the following packages available to you: @@ -303,30 +293,9 @@ To update Testify to the latest version, use `go get -u github.com/stretchr/test ------ -Version History -=============== - - * 1.0 - New package versioning strategy adopted. - ------- - Contributing ============ Please feel free to submit issues, fork the repository and send pull requests! When submitting an issue, we ask that you please include a complete test function that demonstrates the issue. Extra credit for those using Testify to write the test code that demonstrates it. - ------- - -Licence -======= -Copyright (c) 2012 - 2013 Mat Ryer and Tyler Bunnell - -Please consider promoting this project if you find it useful. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/_codegen/main.go b/vendor/github.com/stretchr/testify/_codegen/main.go index 328009f8..2e5e8124 100644 --- a/vendor/github.com/stretchr/testify/_codegen/main.go +++ b/vendor/github.com/stretchr/testify/_codegen/main.go @@ -1,5 +1,5 @@ // This program reads all assertion functions from the assert package and -// automatically generates the corersponding requires and forwarded assertions +// automatically generates the corresponding requires and forwarded assertions package main @@ -10,6 +10,7 @@ import ( "go/ast" "go/build" "go/doc" + "go/format" "go/importer" "go/parser" "go/token" @@ -19,6 +20,7 @@ import ( "log" "os" "path" + "regexp" "strings" "text/template" @@ -27,6 +29,7 @@ import ( var ( pkg = flag.String("assert-path", "github.com/stretchr/testify/assert", "Path to the assert package") + includeF = flag.Bool("include-format-funcs", false, "include format functions such as Errorf and Equalf") outputPkg = flag.String("output-package", "", "package for the resulting code") tmplFile = flag.String("template", "", "What file to load the function template from") out = flag.String("out", "", "What file to write the source code to") @@ -77,13 +80,18 @@ func generateCode(importer imports.Importer, funcs []testFunc) error { } } + code, err := format.Source(buff.Bytes()) + if err != nil { + return err + } + // Write file output, err := outputFile() if err != nil { return err } defer output.Close() - _, err = io.Copy(output, buff) + _, err = io.Copy(output, bytes.NewReader(code)) return err } @@ -133,7 +141,7 @@ func analyzeCode(scope *types.Scope, docs *doc.Package) (imports.Importer, []tes if !ok { continue } - // Check function signatuer has at least two arguments + // Check function signature has at least two arguments sig := fn.Type().(*types.Signature) if sig.Params().Len() < 2 { continue @@ -151,13 +159,18 @@ func analyzeCode(scope *types.Scope, docs *doc.Package) (imports.Importer, []tes continue } + // Skip functions ending with f + if strings.HasSuffix(fdocs.Name, "f") && !*includeF { + continue + } + funcs = append(funcs, testFunc{*outputPkg, fdocs, fn}) importer.AddImportsFrom(sig.Params()) } return importer, funcs, nil } -// parsePackageSource returns the types scope and the package documentation from the pa +// parsePackageSource returns the types scope and the package documentation from the package func parsePackageSource(pkg string) (*types.Scope, *doc.Package, error) { pd, err := build.Import(pkg, ".", 0) if err != nil { @@ -258,10 +271,26 @@ func (f *testFunc) ForwardedParams() string { return p } +func (f *testFunc) ParamsFormat() string { + return strings.Replace(f.Params(), "msgAndArgs", "msg string, args", 1) +} + +func (f *testFunc) ForwardedParamsFormat() string { + return strings.Replace(f.ForwardedParams(), "msgAndArgs", "append([]interface{}{msg}, args...)", 1) +} + func (f *testFunc) Comment() string { return "// " + strings.Replace(strings.TrimSpace(f.DocInfo.Doc), "\n", "\n// ", -1) } +func (f *testFunc) CommentFormat() string { + search := fmt.Sprintf("%s", f.DocInfo.Name) + replace := fmt.Sprintf("%sf", f.DocInfo.Name) + comment := strings.Replace(f.Comment(), search, replace, -1) + exp := regexp.MustCompile(replace + `\(((\(\)|[^)])+)\)`) + return exp.ReplaceAllString(comment, replace+`($1, "error message %s", "formatted")`) +} + func (f *testFunc) CommentWithoutT(receiver string) string { search := fmt.Sprintf("assert.%s(t, ", f.DocInfo.Name) replace := fmt.Sprintf("%s.%s(", receiver, f.DocInfo.Name) diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go new file mode 100644 index 00000000..ae06a54e --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -0,0 +1,349 @@ +/* +* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen +* THIS FILE MUST NOT BE EDITED BY HAND + */ + +package assert + +import ( + http "net/http" + url "net/url" + time "time" +) + +// Conditionf uses a Comparison to assert a complex condition. +func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bool { + return Condition(t, comp, append([]interface{}{msg}, args...)...) +} + +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { + return Contains(t, s, contains, append([]interface{}{msg}, args...)...) +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExistsf(t TestingT, path string, msg string, args ...interface{}) bool { + return DirExists(t, path, append([]interface{}{msg}, args...)...) +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) bool { + return ElementsMatch(t, listA, listB, append([]interface{}{msg}, args...)...) +} + +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// assert.Emptyf(t, obj, "error message %s", "formatted") +func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + return Empty(t, object, append([]interface{}{msg}, args...)...) +} + +// Equalf asserts that two objects are equal. +// +// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return Equal(t, expected, actual, append([]interface{}{msg}, args...)...) +} + +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool { + return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...) +} + +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123)) +func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return EqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) +} + +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { + return Error(t, err, append([]interface{}{msg}, args...)...) +} + +// Exactlyf asserts that two objects are equal in value and type. +// +// assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) +func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return Exactly(t, expected, actual, append([]interface{}{msg}, args...)...) +} + +// Failf reports a failure through +func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { + return Fail(t, failureMessage, append([]interface{}{msg}, args...)...) +} + +// FailNowf fails test +func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { + return FailNow(t, failureMessage, append([]interface{}{msg}, args...)...) +} + +// Falsef asserts that the specified value is false. +// +// assert.Falsef(t, myBool, "error message %s", "formatted") +func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { + return False(t, value, append([]interface{}{msg}, args...)...) +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool { + return FileExists(t, path, append([]interface{}{msg}, args...)...) +} + +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// assert.HTTPBodyContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { + return HTTPBodyContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...) +} + +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// assert.HTTPBodyNotContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { + return HTTPBodyNotContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...) +} + +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPError(t, handler, method, url, values, append([]interface{}{msg}, args...)...) +} + +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPRedirect(t, handler, method, url, values, append([]interface{}{msg}, args...)...) +} + +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPSuccess(t, handler, method, url, values, append([]interface{}{msg}, args...)...) +} + +// Implementsf asserts that an object is implemented by the specified interface. +// +// assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { + return Implements(t, interfaceObject, object, append([]interface{}{msg}, args...)...) +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDelta(t, expected, actual, delta, append([]interface{}{msg}, args...)...) +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDeltaMapValues(t, expected, actual, delta, append([]interface{}{msg}, args...)...) +} + +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDeltaSlice(t, expected, actual, delta, append([]interface{}{msg}, args...)...) +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { + return InEpsilon(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) +} + +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { + return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) +} + +// IsTypef asserts that the specified objects are of the same type. +func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { + return IsType(t, expectedType, object, append([]interface{}{msg}, args...)...) +} + +// JSONEqf asserts that two JSON strings are equivalent. +// +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { + return JSONEq(t, expected, actual, append([]interface{}{msg}, args...)...) +} + +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool { + return Len(t, object, length, append([]interface{}{msg}, args...)...) +} + +// Nilf asserts that the specified object is nil. +// +// assert.Nilf(t, err, "error message %s", "formatted") +func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + return Nil(t, object, append([]interface{}{msg}, args...)...) +} + +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { + return NoError(t, err, append([]interface{}{msg}, args...)...) +} + +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { + return NotContains(t, s, contains, append([]interface{}{msg}, args...)...) +} + +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + return NotEmpty(t, object, append([]interface{}{msg}, args...)...) +} + +// NotEqualf asserts that the specified values are NOT equal. +// +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return NotEqual(t, expected, actual, append([]interface{}{msg}, args...)...) +} + +// NotNilf asserts that the specified object is not nil. +// +// assert.NotNilf(t, err, "error message %s", "formatted") +func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + return NotNil(t, object, append([]interface{}{msg}, args...)...) +} + +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { + return NotPanics(t, f, append([]interface{}{msg}, args...)...) +} + +// NotRegexpf asserts that a specified regexp does not match a string. +// +// assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { + return NotRegexp(t, rx, str, append([]interface{}{msg}, args...)...) +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { + return NotSubset(t, list, subset, append([]interface{}{msg}, args...)...) +} + +// NotZerof asserts that i is not the zero value for its type. +func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { + return NotZero(t, i, append([]interface{}{msg}, args...)...) +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { + return Panics(t, f, append([]interface{}{msg}, args...)...) +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { + return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...) +} + +// Regexpf asserts that a specified regexp matches a string. +// +// assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { + return Regexp(t, rx, str, append([]interface{}{msg}, args...)...) +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { + return Subset(t, list, subset, append([]interface{}{msg}, args...)...) +} + +// Truef asserts that the specified value is true. +// +// assert.Truef(t, myBool, "error message %s", "formatted") +func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { + return True(t, value, append([]interface{}{msg}, args...)...) +} + +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { + return WithinDuration(t, expected, actual, delta, append([]interface{}{msg}, args...)...) +} + +// Zerof asserts that i is the zero value for its type. +func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { + return Zero(t, i, append([]interface{}{msg}, args...)...) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl new file mode 100644 index 00000000..c5cc66f4 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl @@ -0,0 +1,4 @@ +{{.CommentFormat}} +func {{.DocInfo.Name}}f(t TestingT, {{.ParamsFormat}}) bool { + return {{.DocInfo.Name}}(t, {{.ForwardedParamsFormat}}) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index e6a79604..ffa5428f 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -1,387 +1,686 @@ /* * CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen * THIS FILE MUST NOT BE EDITED BY HAND -*/ + */ package assert import ( - http "net/http" url "net/url" time "time" ) - // Condition uses a Comparison to assert a complex condition. func (a *Assertions) Condition(comp Comparison, msgAndArgs ...interface{}) bool { return Condition(a.t, comp, msgAndArgs...) } +// Conditionf uses a Comparison to assert a complex condition. +func (a *Assertions) Conditionf(comp Comparison, msg string, args ...interface{}) bool { + return Conditionf(a.t, comp, msg, args...) +} // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. -// -// a.Contains("Hello World", "World", "But 'Hello World' does contain 'World'") -// a.Contains(["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") -// a.Contains({"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { return Contains(a.t, s, contains, msgAndArgs...) } +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { + return Containsf(a.t, s, contains, msg, args...) +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) bool { + return DirExists(a.t, path, msgAndArgs...) +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) bool { + return DirExistsf(a.t, path, msg, args...) +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2]) +func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) bool { + return ElementsMatch(a.t, listA, listB, msgAndArgs...) +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) bool { + return ElementsMatchf(a.t, listA, listB, msg, args...) +} // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // a.Empty(obj) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { return Empty(a.t, object, msgAndArgs...) } +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// a.Emptyf(obj, "error message %s", "formatted") +func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool { + return Emptyf(a.t, object, msg, args...) +} // Equal asserts that two objects are equal. -// -// a.Equal(123, 123, "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Equal(123, 123) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { return Equal(a.t, expected, actual, msgAndArgs...) } - // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. -// +// // actualObj, err := SomeFunction() -// if assert.Error(t, err, "An error was expected") { -// assert.Equal(t, err, expectedError) -// } -// -// Returns whether the assertion was successful (true) or not (false). +// a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool { return EqualError(a.t, theError, errString, msgAndArgs...) } +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) bool { + return EqualErrorf(a.t, theError, errString, msg, args...) +} // EqualValues asserts that two objects are equal or convertable to the same types // and equal. -// -// a.EqualValues(uint32(123), int32(123), "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { return EqualValues(a.t, expected, actual, msgAndArgs...) } +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// a.EqualValuesf(uint32(123, "error message %s", "formatted"), int32(123)) +func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return EqualValuesf(a.t, expected, actual, msg, args...) +} + +// Equalf asserts that two objects are equal. +// +// a.Equalf(123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return Equalf(a.t, expected, actual, msg, args...) +} // Error asserts that a function returned an error (i.e. not `nil`). -// +// // actualObj, err := SomeFunction() -// if a.Error(err, "An error was expected") { -// assert.Equal(t, err, expectedError) +// if a.Error(err) { +// assert.Equal(t, expectedError, err) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { return Error(a.t, err, msgAndArgs...) } +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { + return Errorf(a.t, err, msg, args...) +} -// Exactly asserts that two objects are equal is value and type. -// -// a.Exactly(int32(123), int64(123), "123 and 123 should NOT be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// Exactly asserts that two objects are equal in value and type. +// +// a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { return Exactly(a.t, expected, actual, msgAndArgs...) } +// Exactlyf asserts that two objects are equal in value and type. +// +// a.Exactlyf(int32(123, "error message %s", "formatted"), int64(123)) +func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return Exactlyf(a.t, expected, actual, msg, args...) +} // Fail reports a failure through func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) bool { return Fail(a.t, failureMessage, msgAndArgs...) } - // FailNow fails test func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) bool { return FailNow(a.t, failureMessage, msgAndArgs...) } +// FailNowf fails test +func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) bool { + return FailNowf(a.t, failureMessage, msg, args...) +} + +// Failf reports a failure through +func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) bool { + return Failf(a.t, failureMessage, msg, args...) +} // False asserts that the specified value is false. -// -// a.False(myBool, "myBool should be false") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { return False(a.t, value, msgAndArgs...) } +// Falsef asserts that the specified value is false. +// +// a.Falsef(myBool, "error message %s", "formatted") +func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool { + return Falsef(a.t, value, msg, args...) +} + +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) bool { + return FileExists(a.t, path, msgAndArgs...) +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) bool { + return FileExistsf(a.t, path, msg, args...) +} // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. -// +// // a.HTTPBodyContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { - return HTTPBodyContains(a.t, handler, method, url, values, str) +func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { + return HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...) } +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// a.HTTPBodyContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { + return HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...) +} // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. -// +// // a.HTTPBodyNotContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { - return HTTPBodyNotContains(a.t, handler, method, url, values, str) +func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { + return HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...) } +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// a.HTTPBodyNotContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { + return HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...) +} // HTTPError asserts that a specified handler returns an error status code. -// +// // a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values) bool { - return HTTPError(a.t, handler, method, url, values) +func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { + return HTTPError(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPErrorf(a.t, handler, method, url, values, msg, args...) +} // HTTPRedirect asserts that a specified handler returns a redirect status code. -// +// // a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values) bool { - return HTTPRedirect(a.t, handler, method, url, values) +func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { + return HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPRedirectf(a.t, handler, method, url, values, msg, args...) +} // HTTPSuccess asserts that a specified handler returns a success status code. -// +// // a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values) bool { - return HTTPSuccess(a.t, handler, method, url, values) +func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { + return HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { + return HTTPSuccessf(a.t, handler, method, url, values, msg, args...) +} // Implements asserts that an object is implemented by the specified interface. -// -// a.Implements((*MyInterface)(nil), new(MyObject), "MyObject") +// +// a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { return Implements(a.t, interfaceObject, object, msgAndArgs...) } +// Implementsf asserts that an object is implemented by the specified interface. +// +// a.Implementsf((*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { + return Implementsf(a.t, interfaceObject, object, msg, args...) +} // InDelta asserts that the two numerals are within delta of each other. -// +// // a.InDelta(math.Pi, (22 / 7.0), 0.01) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { return InDelta(a.t, expected, actual, delta, msgAndArgs...) } +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + return InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...) +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...) +} // InDeltaSlice is the same as InDelta, except it compares two slices. func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { return InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) } +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDeltaSlicef(a.t, expected, actual, delta, msg, args...) +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { + return InDeltaf(a.t, expected, actual, delta, msg, args...) +} // InEpsilon asserts that expected and actual have a relative error less than epsilon -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { return InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) } - -// InEpsilonSlice is the same as InEpsilon, except it compares two slices. -func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - return InEpsilonSlice(a.t, expected, actual, delta, msgAndArgs...) +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { + return InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...) } +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { + return InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...) +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { + return InEpsilonf(a.t, expected, actual, epsilon, msg, args...) +} // IsType asserts that the specified objects are of the same type. func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { return IsType(a.t, expectedType, object, msgAndArgs...) } +// IsTypef asserts that the specified objects are of the same type. +func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { + return IsTypef(a.t, expectedType, object, msg, args...) +} // JSONEq asserts that two JSON strings are equivalent. -// +// // a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool { return JSONEq(a.t, expected, actual, msgAndArgs...) } +// JSONEqf asserts that two JSON strings are equivalent. +// +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) bool { + return JSONEqf(a.t, expected, actual, msg, args...) +} // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. -// -// a.Len(mySlice, 3, "The size of slice is not 3") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool { return Len(a.t, object, length, msgAndArgs...) } +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// a.Lenf(mySlice, 3, "error message %s", "formatted") +func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) bool { + return Lenf(a.t, object, length, msg, args...) +} // Nil asserts that the specified object is nil. -// -// a.Nil(err, "err should be nothing") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { return Nil(a.t, object, msgAndArgs...) } +// Nilf asserts that the specified object is nil. +// +// a.Nilf(err, "error message %s", "formatted") +func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) bool { + return Nilf(a.t, object, msg, args...) +} // NoError asserts that a function returned no error (i.e. `nil`). -// +// // actualObj, err := SomeFunction() // if a.NoError(err) { -// assert.Equal(t, actualObj, expectedObj) +// assert.Equal(t, expectedObj, actualObj) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { return NoError(a.t, err, msgAndArgs...) } +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool { + return NoErrorf(a.t, err, msg, args...) +} // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. -// -// a.NotContains("Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") -// a.NotContains(["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") -// a.NotContains({"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { return NotContains(a.t, s, contains, msgAndArgs...) } +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { + return NotContainsf(a.t, s, contains, msg, args...) +} // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // if a.NotEmpty(obj) { // assert.Equal(t, "two", obj[1]) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool { return NotEmpty(a.t, object, msgAndArgs...) } +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) bool { + return NotEmptyf(a.t, object, msg, args...) +} // NotEqual asserts that the specified values are NOT equal. -// -// a.NotEqual(obj1, obj2, "two objects shouldn't be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotEqual(obj1, obj2) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { return NotEqual(a.t, expected, actual, msgAndArgs...) } +// NotEqualf asserts that the specified values are NOT equal. +// +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + return NotEqualf(a.t, expected, actual, msg, args...) +} // NotNil asserts that the specified object is not nil. -// -// a.NotNil(err, "err should be something") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool { return NotNil(a.t, object, msgAndArgs...) } +// NotNilf asserts that the specified object is not nil. +// +// a.NotNilf(err, "error message %s", "formatted") +func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) bool { + return NotNilf(a.t, object, msg, args...) +} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// a.NotPanics(func(){ -// RemainCalm() -// }, "Calling RemainCalm() should NOT panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool { return NotPanics(a.t, f, msgAndArgs...) } +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{}) bool { + return NotPanicsf(a.t, f, msg, args...) +} // NotRegexp asserts that a specified regexp does not match a string. -// +// // a.NotRegexp(regexp.MustCompile("starts"), "it's starting") // a.NotRegexp("^start", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { return NotRegexp(a.t, rx, str, msgAndArgs...) } +// NotRegexpf asserts that a specified regexp does not match a string. +// +// a.NotRegexpf(regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { + return NotRegexpf(a.t, rx, str, msg, args...) +} -// NotZero asserts that i is not the zero value for its type and returns the truth. +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { + return NotSubset(a.t, list, subset, msgAndArgs...) +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { + return NotSubsetf(a.t, list, subset, msg, args...) +} + +// NotZero asserts that i is not the zero value for its type. func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) bool { return NotZero(a.t, i, msgAndArgs...) } +// NotZerof asserts that i is not the zero value for its type. +func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) bool { + return NotZerof(a.t, i, msg, args...) +} // Panics asserts that the code inside the specified PanicTestFunc panics. -// -// a.Panics(func(){ -// GoCrazy() -// }, "Calling GoCrazy() should panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { return Panics(a.t, f, msgAndArgs...) } +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { + return PanicsWithValue(a.t, expected, f, msgAndArgs...) +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { + return PanicsWithValuef(a.t, expected, f, msg, args...) +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) bool { + return Panicsf(a.t, f, msg, args...) +} // Regexp asserts that a specified regexp matches a string. -// +// // a.Regexp(regexp.MustCompile("start"), "it's starting") // a.Regexp("start...$", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { return Regexp(a.t, rx, str, msgAndArgs...) } +// Regexpf asserts that a specified regexp matches a string. +// +// a.Regexpf(regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { + return Regexpf(a.t, rx, str, msg, args...) +} + +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { + return Subset(a.t, list, subset, msgAndArgs...) +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { + return Subsetf(a.t, list, subset, msg, args...) +} // True asserts that the specified value is true. -// -// a.True(myBool, "myBool should be true") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { return True(a.t, value, msgAndArgs...) } +// Truef asserts that the specified value is true. +// +// a.Truef(myBool, "error message %s", "formatted") +func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { + return Truef(a.t, value, msg, args...) +} // WithinDuration asserts that the two times are within duration delta of each other. -// -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { return WithinDuration(a.t, expected, actual, delta, msgAndArgs...) } +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { + return WithinDurationf(a.t, expected, actual, delta, msg, args...) +} -// Zero asserts that i is the zero value for its type and returns the truth. +// Zero asserts that i is the zero value for its type. func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool { return Zero(a.t, i, msgAndArgs...) } + +// Zerof asserts that i is the zero value for its type. +func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) bool { + return Zerof(a.t, i, msg, args...) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index b3f4e170..47bda778 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -4,8 +4,10 @@ import ( "bufio" "bytes" "encoding/json" + "errors" "fmt" "math" + "os" "reflect" "regexp" "runtime" @@ -18,9 +20,7 @@ import ( "github.com/pmezard/go-difflib/difflib" ) -func init() { - spew.Config.SortKeys = true -} +//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_format.go.tmpl // TestingT is an interface wrapper around *testing.T type TestingT interface { @@ -42,7 +42,15 @@ func ObjectsAreEqual(expected, actual interface{}) bool { if expected == nil || actual == nil { return expected == actual } - + if exp, ok := expected.([]byte); ok { + act, ok := actual.([]byte) + if !ok { + return false + } else if exp == nil || act == nil { + return exp == nil && act == nil + } + return bytes.Equal(exp, act) + } return reflect.DeepEqual(expected, actual) } @@ -112,10 +120,12 @@ func CallerInfo() []string { } parts := strings.Split(file, "/") - dir := parts[len(parts)-2] file = parts[len(parts)-1] - if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" { - callers = append(callers, fmt.Sprintf("%s:%d", file, line)) + if len(parts) > 1 { + dir := parts[len(parts)-2] + if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" { + callers = append(callers, fmt.Sprintf("%s:%d", file, line)) + } } // Drop the package @@ -157,7 +167,7 @@ func getWhitespaceString() string { parts := strings.Split(file, "/") file = parts[len(parts)-1] - return strings.Repeat(" ", len(fmt.Sprintf("%s:%d: ", file, line))) + return strings.Repeat(" ", len(fmt.Sprintf("%s:%d: ", file, line))) } @@ -174,22 +184,18 @@ func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { return "" } -// Indents all lines of the message by appending a number of tabs to each line, in an output format compatible with Go's -// test printing (see inner comment for specifics) -func indentMessageLines(message string, tabs int) string { +// Aligns the provided message so that all lines after the first line start at the same location as the first line. +// Assumes that the first line starts at the correct location (after carriage return, tab, label, spacer and tab). +// The longestLabelLen parameter specifies the length of the longest label in the output (required becaues this is the +// basis on which the alignment occurs). +func indentMessageLines(message string, longestLabelLen int) string { outBuf := new(bytes.Buffer) for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ { + // no need to align first line because it starts at the correct location (after the label) if i != 0 { - outBuf.WriteRune('\n') - } - for ii := 0; ii < tabs; ii++ { - outBuf.WriteRune('\t') - // Bizarrely, all lines except the first need one fewer tabs prepended, so deliberately advance the counter - // by 1 prematurely. - if ii == 0 && i > 0 { - ii++ - } + // append alignLen+1 spaces to align with "{{longestLabel}}:" before adding tab + outBuf.WriteString("\n\r\t" + strings.Repeat(" ", longestLabelLen+1) + "\t") } outBuf.WriteString(scanner.Text()) } @@ -221,42 +227,70 @@ func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool // Fail reports a failure through func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool { + content := []labeledContent{ + {"Error Trace", strings.Join(CallerInfo(), "\n\r\t\t\t")}, + {"Error", failureMessage}, + } + + // Add test name if the Go version supports it + if n, ok := t.(interface { + Name() string + }); ok { + content = append(content, labeledContent{"Test", n.Name()}) + } message := messageFromMsgAndArgs(msgAndArgs...) - - errorTrace := strings.Join(CallerInfo(), "\n\r\t\t\t") if len(message) > 0 { - t.Errorf("\r%s\r\tError Trace:\t%s\n"+ - "\r\tError:%s\n"+ - "\r\tMessages:\t%s\n\r", - getWhitespaceString(), - errorTrace, - indentMessageLines(failureMessage, 2), - message) - } else { - t.Errorf("\r%s\r\tError Trace:\t%s\n"+ - "\r\tError:%s\n\r", - getWhitespaceString(), - errorTrace, - indentMessageLines(failureMessage, 2)) + content = append(content, labeledContent{"Messages", message}) } + t.Errorf("%s", "\r"+getWhitespaceString()+labeledOutput(content...)) + return false } +type labeledContent struct { + label string + content string +} + +// labeledOutput returns a string consisting of the provided labeledContent. Each labeled output is appended in the following manner: +// +// \r\t{{label}}:{{align_spaces}}\t{{content}}\n +// +// The initial carriage return is required to undo/erase any padding added by testing.T.Errorf. The "\t{{label}}:" is for the label. +// If a label is shorter than the longest label provided, padding spaces are added to make all the labels match in length. Once this +// alignment is achieved, "\t{{content}}\n" is added for the output. +// +// If the content of the labeledOutput contains line breaks, the subsequent lines are aligned so that they start at the same location as the first line. +func labeledOutput(content ...labeledContent) string { + longestLabel := 0 + for _, v := range content { + if len(v.label) > longestLabel { + longestLabel = len(v.label) + } + } + var output string + for _, v := range content { + output += "\r\t" + v.label + ":" + strings.Repeat(" ", longestLabel-len(v.label)) + "\t" + indentMessageLines(v.content, longestLabel) + "\n" + } + return output +} + // Implements asserts that an object is implemented by the specified interface. // -// assert.Implements(t, (*MyInterface)(nil), new(MyObject), "MyObject") +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { - interfaceType := reflect.TypeOf(interfaceObject).Elem() + if object == nil { + return Fail(t, fmt.Sprintf("Cannot check if nil implements %v", interfaceType), msgAndArgs...) + } if !reflect.TypeOf(object).Implements(interfaceType) { return Fail(t, fmt.Sprintf("%T must implement %v", object, interfaceType), msgAndArgs...) } return true - } // IsType asserts that the specified objects are of the same type. @@ -271,16 +305,23 @@ func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs // Equal asserts that two objects are equal. // -// assert.Equal(t, 123, 123, "123 and 123 should be equal") +// assert.Equal(t, 123, 123) // -// Returns whether the assertion was successful (true) or not (false). +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if err := validateEqualArgs(expected, actual); err != nil { + return Fail(t, fmt.Sprintf("Invalid operation: %#v == %#v (%s)", + expected, actual, err), msgAndArgs...) + } if !ObjectsAreEqual(expected, actual) { diff := diff(expected, actual) expected, actual = formatUnequalValues(expected, actual) - return Fail(t, fmt.Sprintf("Not equal: %s (expected)\n"+ - " != %s (actual)%s", expected, actual, diff), msgAndArgs...) + return Fail(t, fmt.Sprintf("Not equal: \n"+ + "expected: %s\n"+ + "actual : %s%s", expected, actual, diff), msgAndArgs...) } return true @@ -294,53 +335,36 @@ func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) // with the type name, and the value will be enclosed in parenthesis similar // to a type conversion in the Go grammar. func formatUnequalValues(expected, actual interface{}) (e string, a string) { - aType := reflect.TypeOf(expected) - bType := reflect.TypeOf(actual) - - if aType != bType && isNumericType(aType) && isNumericType(bType) { - return fmt.Sprintf("%v(%#v)", aType, expected), - fmt.Sprintf("%v(%#v)", bType, actual) + if reflect.TypeOf(expected) != reflect.TypeOf(actual) { + return fmt.Sprintf("%T(%#v)", expected, expected), + fmt.Sprintf("%T(%#v)", actual, actual) } return fmt.Sprintf("%#v", expected), fmt.Sprintf("%#v", actual) } -func isNumericType(t reflect.Type) bool { - switch t.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return true - case reflect.Float32, reflect.Float64: - return true - } - - return false -} - // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValues(t, uint32(123), int32(123), "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if !ObjectsAreEqualValues(expected, actual) { - return Fail(t, fmt.Sprintf("Not equal: %#v (expected)\n"+ - " != %#v (actual)", expected, actual), msgAndArgs...) + diff := diff(expected, actual) + expected, actual = formatUnequalValues(expected, actual) + return Fail(t, fmt.Sprintf("Not equal: \n"+ + "expected: %s\n"+ + "actual : %s%s", expected, actual, diff), msgAndArgs...) } return true } -// Exactly asserts that two objects are equal is value and type. +// Exactly asserts that two objects are equal in value and type. // -// assert.Exactly(t, int32(123), int64(123), "123 and 123 should NOT be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { aType := reflect.TypeOf(expected) @@ -356,9 +380,7 @@ func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} // NotNil asserts that the specified object is not nil. // -// assert.NotNil(t, err, "err should be something") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if !isNil(object) { return true @@ -383,9 +405,7 @@ func isNil(object interface{}) bool { // Nil asserts that the specified object is nil. // -// assert.Nil(t, err, "err should be nothing") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if isNil(object) { return true @@ -393,74 +413,38 @@ func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return Fail(t, fmt.Sprintf("Expected nil, but got: %#v", object), msgAndArgs...) } -var numericZeros = []interface{}{ - int(0), - int8(0), - int16(0), - int32(0), - int64(0), - uint(0), - uint8(0), - uint16(0), - uint32(0), - uint64(0), - float32(0), - float64(0), -} - // isEmpty gets whether the specified object is considered empty or not. func isEmpty(object interface{}) bool { + // get nil case out of the way if object == nil { return true - } else if object == "" { - return true - } else if object == false { - return true - } - - for _, v := range numericZeros { - if object == v { - return true - } } objValue := reflect.ValueOf(object) switch objValue.Kind() { - case reflect.Map: - fallthrough - case reflect.Slice, reflect.Chan: - { - return (objValue.Len() == 0) - } - case reflect.Struct: - switch object.(type) { - case time.Time: - return object.(time.Time).IsZero() - } + // collection types are empty when they have no element + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + return objValue.Len() == 0 + // pointers are empty if nil or if the value they point to is empty case reflect.Ptr: - { - if objValue.IsNil() { - return true - } - switch object.(type) { - case *time.Time: - return object.(*time.Time).IsZero() - default: - return false - } + if objValue.IsNil() { + return true } + deref := objValue.Elem().Interface() + return isEmpty(deref) + // for all other types, compare against the zero value + default: + zero := reflect.Zero(objValue.Type()) + return reflect.DeepEqual(object, zero.Interface()) } - return false } // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // // assert.Empty(t, obj) -// -// Returns whether the assertion was successful (true) or not (false). func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := isEmpty(object) @@ -478,8 +462,6 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { // if assert.NotEmpty(t, obj) { // assert.Equal(t, "two", obj[1]) // } -// -// Returns whether the assertion was successful (true) or not (false). func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := !isEmpty(object) @@ -506,9 +488,7 @@ func getLen(x interface{}) (ok bool, length int) { // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// assert.Len(t, mySlice, 3, "The size of slice is not 3") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { ok, l := getLen(object) if !ok { @@ -523,9 +503,7 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) // True asserts that the specified value is true. // -// assert.True(t, myBool, "myBool should be true") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { if value != true { @@ -538,9 +516,7 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { // False asserts that the specified value is false. // -// assert.False(t, myBool, "myBool should be false") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { if value != false { @@ -553,10 +529,15 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { // NotEqual asserts that the specified values are NOT equal. // -// assert.NotEqual(t, obj1, obj2, "two objects shouldn't be equal") +// assert.NotEqual(t, obj1, obj2) // -// Returns whether the assertion was successful (true) or not (false). +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if err := validateEqualArgs(expected, actual); err != nil { + return Fail(t, fmt.Sprintf("Invalid operation: %#v != %#v (%s)", + expected, actual, err), msgAndArgs...) + } if ObjectsAreEqual(expected, actual) { return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...) @@ -607,11 +588,9 @@ func includeElement(list interface{}, element interface{}) (ok, found bool) { // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Contains(t, "Hello World", "World", "But 'Hello World' does contain 'World'") -// assert.Contains(t, ["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") -// assert.Contains(t, {"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { ok, found := includeElement(s, contains) @@ -629,11 +608,9 @@ func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bo // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContains(t, "Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") -// assert.NotContains(t, ["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") -// assert.NotContains(t, {"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { ok, found := includeElement(s, contains) @@ -648,6 +625,142 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) } +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { + if subset == nil { + return true // we consider nil to be equal to the nil set + } + + subsetValue := reflect.ValueOf(subset) + defer func() { + if e := recover(); e != nil { + ok = false + } + }() + + listKind := reflect.TypeOf(list).Kind() + subsetKind := reflect.TypeOf(subset).Kind() + + if listKind != reflect.Array && listKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) + } + + if subsetKind != reflect.Array && subsetKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) + } + + for i := 0; i < subsetValue.Len(); i++ { + element := subsetValue.Index(i).Interface() + ok, found := includeElement(list, element) + if !ok { + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) + } + if !found { + return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", list, element), msgAndArgs...) + } + } + + return true +} + +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { + if subset == nil { + return Fail(t, fmt.Sprintf("nil is the empty set which is a subset of every set"), msgAndArgs...) + } + + subsetValue := reflect.ValueOf(subset) + defer func() { + if e := recover(); e != nil { + ok = false + } + }() + + listKind := reflect.TypeOf(list).Kind() + subsetKind := reflect.TypeOf(subset).Kind() + + if listKind != reflect.Array && listKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) + } + + if subsetKind != reflect.Array && subsetKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) + } + + for i := 0; i < subsetValue.Len(); i++ { + element := subsetValue.Index(i).Interface() + ok, found := includeElement(list, element) + if !ok { + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) + } + if !found { + return true + } + } + + return Fail(t, fmt.Sprintf("%q is a subset of %q", subset, list), msgAndArgs...) +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2]) +func ElementsMatch(t TestingT, listA, listB interface{}, msgAndArgs ...interface{}) (ok bool) { + if isEmpty(listA) && isEmpty(listB) { + return true + } + + aKind := reflect.TypeOf(listA).Kind() + bKind := reflect.TypeOf(listB).Kind() + + if aKind != reflect.Array && aKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", listA, aKind), msgAndArgs...) + } + + if bKind != reflect.Array && bKind != reflect.Slice { + return Fail(t, fmt.Sprintf("%q has an unsupported type %s", listB, bKind), msgAndArgs...) + } + + aValue := reflect.ValueOf(listA) + bValue := reflect.ValueOf(listB) + + aLen := aValue.Len() + bLen := bValue.Len() + + if aLen != bLen { + return Fail(t, fmt.Sprintf("lengths don't match: %d != %d", aLen, bLen), msgAndArgs...) + } + + // Mark indexes in bValue that we already used + visited := make([]bool, bLen) + for i := 0; i < aLen; i++ { + element := aValue.Index(i).Interface() + found := false + for j := 0; j < bLen; j++ { + if visited[j] { + continue + } + if ObjectsAreEqual(bValue.Index(j).Interface(), element) { + visited[j] = true + found = true + break + } + } + if !found { + return Fail(t, fmt.Sprintf("element %s appears more times in %s than in %s", element, aValue, bValue), msgAndArgs...) + } + } + + return true +} + // Condition uses a Comparison to assert a complex condition. func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool { result := comp() @@ -685,11 +798,7 @@ func didPanic(f PanicTestFunc) (bool, interface{}) { // Panics asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panics(t, func(){ -// GoCrazy() -// }, "Calling GoCrazy() should panic") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if funcDidPanic, panicValue := didPanic(f); !funcDidPanic { @@ -699,13 +808,26 @@ func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { return true } +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { + + funcDidPanic, panicValue := didPanic(f) + if !funcDidPanic { + return Fail(t, fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue), msgAndArgs...) + } + if panicValue != expected { + return Fail(t, fmt.Sprintf("func %#v should panic with value:\t%v\n\r\tPanic value:\t%v", f, expected, panicValue), msgAndArgs...) + } + + return true +} + // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanics(t, func(){ -// RemainCalm() -// }, "Calling RemainCalm() should NOT panic") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if funcDidPanic, panicValue := didPanic(f); funcDidPanic { @@ -717,9 +839,7 @@ func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { // WithinDuration asserts that the two times are within duration delta of each other. // -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { dt := expected.Sub(actual) @@ -757,6 +877,8 @@ func toFloat(x interface{}) (float64, bool) { xf = float64(xn) case float64: xf = float64(xn) + case time.Duration: + xf = float64(xn) default: xok = false } @@ -767,8 +889,6 @@ func toFloat(x interface{}) (float64, bool) { // InDelta asserts that the two numerals are within delta of each other. // // assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) -// -// Returns whether the assertion was successful (true) or not (false). func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { af, aok := toFloat(expected) @@ -779,7 +899,7 @@ func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs } if math.IsNaN(af) { - return Fail(t, fmt.Sprintf("Actual must not be NaN"), msgAndArgs...) + return Fail(t, fmt.Sprintf("Expected must not be NaN"), msgAndArgs...) } if math.IsNaN(bf) { @@ -806,7 +926,7 @@ func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAn expectedSlice := reflect.ValueOf(expected) for i := 0; i < actualSlice.Len(); i++ { - result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta) + result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta, msgAndArgs...) if !result { return result } @@ -815,6 +935,47 @@ func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAn return true } +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValues(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + if expected == nil || actual == nil || + reflect.TypeOf(actual).Kind() != reflect.Map || + reflect.TypeOf(expected).Kind() != reflect.Map { + return Fail(t, "Arguments must be maps", msgAndArgs...) + } + + expectedMap := reflect.ValueOf(expected) + actualMap := reflect.ValueOf(actual) + + if expectedMap.Len() != actualMap.Len() { + return Fail(t, "Arguments must have the same number of keys", msgAndArgs...) + } + + for _, k := range expectedMap.MapKeys() { + ev := expectedMap.MapIndex(k) + av := actualMap.MapIndex(k) + + if !ev.IsValid() { + return Fail(t, fmt.Sprintf("missing key %q in expected map", k), msgAndArgs...) + } + + if !av.IsValid() { + return Fail(t, fmt.Sprintf("missing key %q in actual map", k), msgAndArgs...) + } + + if !InDelta( + t, + ev.Interface(), + av.Interface(), + delta, + msgAndArgs..., + ) { + return false + } + } + + return true +} + func calcRelativeError(expected, actual interface{}) (float64, error) { af, aok := toFloat(expected) if !aok { @@ -825,15 +986,13 @@ func calcRelativeError(expected, actual interface{}) (float64, error) { } bf, bok := toFloat(actual) if !bok { - return 0, fmt.Errorf("expected value %q cannot be converted to float", actual) + return 0, fmt.Errorf("actual value %q cannot be converted to float", actual) } return math.Abs(af-bf) / math.Abs(af), nil } // InEpsilon asserts that expected and actual have a relative error less than epsilon -// -// Returns whether the assertion was successful (true) or not (false). func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { actualEpsilon, err := calcRelativeError(expected, actual) if err != nil { @@ -841,7 +1000,7 @@ func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAnd } if actualEpsilon > epsilon { return Fail(t, fmt.Sprintf("Relative error is too high: %#v (expected)\n"+ - " < %#v (actual)", actualEpsilon, epsilon), msgAndArgs...) + " < %#v (actual)", epsilon, actualEpsilon), msgAndArgs...) } return true @@ -876,13 +1035,11 @@ func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, m // // actualObj, err := SomeFunction() // if assert.NoError(t, err) { -// assert.Equal(t, actualObj, expectedObj) +// assert.Equal(t, expectedObj, actualObj) // } -// -// Returns whether the assertion was successful (true) or not (false). func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { if err != nil { - return Fail(t, fmt.Sprintf("Received unexpected error %+v", err), msgAndArgs...) + return Fail(t, fmt.Sprintf("Received unexpected error:\n%+v", err), msgAndArgs...) } return true @@ -891,11 +1048,9 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { // Error asserts that a function returned an error (i.e. not `nil`). // // actualObj, err := SomeFunction() -// if assert.Error(t, err, "An error was expected") { -// assert.Equal(t, err, expectedError) +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) // } -// -// Returns whether the assertion was successful (true) or not (false). func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { if err == nil { @@ -909,18 +1064,20 @@ func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { // and that it is equal to the provided error. // // actualObj, err := SomeFunction() -// assert.EqualError(t, err, expectedErrorString, "An error was expected") -// -// Returns whether the assertion was successful (true) or not (false). +// assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { - - message := messageFromMsgAndArgs(msgAndArgs...) - if !NotNil(t, theError, "An error is expected but got nil. %s", message) { + if !Error(t, theError, msgAndArgs...) { return false } - s := "An error with value \"%s\" is expected but got \"%s\". %s" - return Equal(t, errString, theError.Error(), - s, errString, theError.Error(), message) + expected := errString + actual := theError.Error() + // don't need to use deep equals here, we know they are both strings + if expected != actual { + return Fail(t, fmt.Sprintf("Error message not equal:\n"+ + "expected: %q\n"+ + "actual : %q", expected, actual), msgAndArgs...) + } + return true } // matchRegexp return true if a specified regexp matches a string. @@ -941,8 +1098,6 @@ func matchRegexp(rx interface{}, str interface{}) bool { // // assert.Regexp(t, regexp.MustCompile("start"), "it's starting") // assert.Regexp(t, "start...$", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { match := matchRegexp(rx, str) @@ -958,8 +1113,6 @@ func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface // // assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") // assert.NotRegexp(t, "^start", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { match := matchRegexp(rx, str) @@ -971,7 +1124,7 @@ func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interf } -// Zero asserts that i is the zero value for its type and returns the truth. +// Zero asserts that i is the zero value for its type. func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { if i != nil && !reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { return Fail(t, fmt.Sprintf("Should be zero, but was %v", i), msgAndArgs...) @@ -979,7 +1132,7 @@ func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { return true } -// NotZero asserts that i is not the zero value for its type and returns the truth. +// NotZero asserts that i is not the zero value for its type. func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { if i == nil || reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { return Fail(t, fmt.Sprintf("Should not be zero, but was %v", i), msgAndArgs...) @@ -987,11 +1140,39 @@ func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { return true } +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { + info, err := os.Lstat(path) + if err != nil { + if os.IsNotExist(err) { + return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...) + } + return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...) + } + if info.IsDir() { + return Fail(t, fmt.Sprintf("%q is a directory", path), msgAndArgs...) + } + return true +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { + info, err := os.Lstat(path) + if err != nil { + if os.IsNotExist(err) { + return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...) + } + return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...) + } + if !info.IsDir() { + return Fail(t, fmt.Sprintf("%q is a file", path), msgAndArgs...) + } + return true +} + // JSONEq asserts that two JSON strings are equivalent. // // assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -// -// Returns whether the assertion was successful (true) or not (false). func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { var expectedJSONAsInterface, actualJSONAsInterface interface{} @@ -1035,8 +1216,8 @@ func diff(expected interface{}, actual interface{}) string { return "" } - e := spew.Sdump(expected) - a := spew.Sdump(actual) + e := spewConfig.Sdump(expected) + a := spewConfig.Sdump(actual) diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ A: difflib.SplitLines(e), @@ -1050,3 +1231,26 @@ func diff(expected interface{}, actual interface{}) string { return "\n\nDiff:\n" + diff } + +// validateEqualArgs checks whether provided arguments can be safely used in the +// Equal/NotEqual functions. +func validateEqualArgs(expected, actual interface{}) error { + if isFunction(expected) || isFunction(actual) { + return errors.New("cannot take func type as argument") + } + return nil +} + +func isFunction(arg interface{}) bool { + if arg == nil { + return false + } + return reflect.TypeOf(arg).Kind() == reflect.Func +} + +var spewConfig = spew.ConfigState{ + Indent: " ", + DisablePointerAddresses: true, + DisableCapacities: true, + SortKeys: true, +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions_test.go b/vendor/github.com/stretchr/testify/assert/assertions_test.go index ac9b7017..6757bd13 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions_test.go +++ b/vendor/github.com/stretchr/testify/assert/assertions_test.go @@ -1,12 +1,16 @@ package assert import ( + "bytes" "errors" + "fmt" "io" "math" "os" "reflect" "regexp" + "runtime" + "strings" "testing" "time" ) @@ -151,6 +155,9 @@ func TestImplements(t *testing.T) { if Implements(mockT, (*AssertionTesterInterface)(nil), new(AssertionTesterNonConformingObject)) { t.Error("Implements method should return false: AssertionTesterNonConformingObject does not implements AssertionTesterInterface") } + if Implements(mockT, (*AssertionTesterInterface)(nil), nil) { + t.Error("Implements method should return false: nil does not implement AssertionTesterInterface") + } } @@ -192,7 +199,71 @@ func TestEqual(t *testing.T) { if !Equal(mockT, uint64(123), uint64(123)) { t.Error("Equal should return true") } + if !Equal(mockT, &struct{}{}, &struct{}{}) { + t.Error("Equal should return true (pointer equality is based on equality of underlying value)") + } + var m map[string]interface{} + if Equal(mockT, m["bar"], "something") { + t.Error("Equal should return false") + } +} +// bufferT implements TestingT. Its implementation of Errorf writes the output that would be produced by +// testing.T.Errorf to an internal bytes.Buffer. +type bufferT struct { + buf bytes.Buffer +} + +func (t *bufferT) Errorf(format string, args ...interface{}) { + // implementation of decorate is copied from testing.T + decorate := func(s string) string { + _, file, line, ok := runtime.Caller(3) // decorate + log + public function. + if ok { + // Truncate file name at last file name separator. + if index := strings.LastIndex(file, "/"); index >= 0 { + file = file[index+1:] + } else if index = strings.LastIndex(file, "\\"); index >= 0 { + file = file[index+1:] + } + } else { + file = "???" + line = 1 + } + buf := new(bytes.Buffer) + // Every line is indented at least one tab. + buf.WriteByte('\t') + fmt.Fprintf(buf, "%s:%d: ", file, line) + lines := strings.Split(s, "\n") + if l := len(lines); l > 1 && lines[l-1] == "" { + lines = lines[:l-1] + } + for i, line := range lines { + if i > 0 { + // Second and subsequent lines are indented an extra tab. + buf.WriteString("\n\t\t") + } + buf.WriteString(line) + } + buf.WriteByte('\n') + return buf.String() + } + t.buf.WriteString(decorate(fmt.Sprintf(format, args...))) +} + +func TestEqualFormatting(t *testing.T) { + for i, currCase := range []struct { + equalWant string + equalGot string + msgAndArgs []interface{} + want string + }{ + {equalWant: "want", equalGot: "got", want: "\tassertions.go:[0-9]+: \r \r\tError Trace:\t\n\t\t\r\tError: \tNot equal: \n\t\t\r\t \texpected: \"want\"\n\t\t\r\t \tactual : \"got\"\n"}, + {equalWant: "want", equalGot: "got", msgAndArgs: []interface{}{"hello, %v!", "world"}, want: "\tassertions.go:[0-9]+: \r \r\tError Trace:\t\n\t\t\r\tError: \tNot equal: \n\t\t\r\t \texpected: \"want\"\n\t\t\r\t \tactual : \"got\"\n\t\t\r\tMessages: \thello, world!\n"}, + } { + mockT := &bufferT{} + Equal(mockT, currCase.equalWant, currCase.equalGot, currCase.msgAndArgs...) + Regexp(t, regexp.MustCompile(currCase.want), mockT.buf.String(), "Case %d", i) + } } func TestFormatUnequalValues(t *testing.T) { @@ -208,6 +279,10 @@ func TestFormatUnequalValues(t *testing.T) { Equal(t, `int64(123)`, expected, "value should include type") Equal(t, `int32(123)`, actual, "value should include type") + expected, actual = formatUnequalValues(int64(123), nil) + Equal(t, `int64(123)`, expected, "value should include type") + Equal(t, `()`, actual, "value should include type") + type testStructType struct { Val string } @@ -324,8 +399,8 @@ func TestNotEqual(t *testing.T) { } funcA := func() int { return 23 } funcB := func() int { return 42 } - if !NotEqual(mockT, funcA, funcB) { - t.Error("NotEqual should return true") + if NotEqual(mockT, funcA, funcB) { + t.Error("NotEqual should return false") } if NotEqual(mockT, "Hello World", "Hello World") { @@ -343,6 +418,9 @@ func TestNotEqual(t *testing.T) { if NotEqual(mockT, new(AssertionTesterConformingObject), new(AssertionTesterConformingObject)) { t.Error("NotEqual should return false") } + if NotEqual(mockT, &struct{}{}, &struct{}{}) { + t.Error("NotEqual should return false") + } } type A struct { @@ -418,6 +496,74 @@ func TestNotContains(t *testing.T) { } } +func TestSubset(t *testing.T) { + mockT := new(testing.T) + + if !Subset(mockT, []int{1, 2, 3}, nil) { + t.Error("Subset should return true: given subset is nil") + } + if !Subset(mockT, []int{1, 2, 3}, []int{}) { + t.Error("Subset should return true: any set contains the nil set") + } + if !Subset(mockT, []int{1, 2, 3}, []int{1, 2}) { + t.Error("Subset should return true: [1, 2, 3] contains [1, 2]") + } + if !Subset(mockT, []int{1, 2, 3}, []int{1, 2, 3}) { + t.Error("Subset should return true: [1, 2, 3] contains [1, 2, 3]") + } + if !Subset(mockT, []string{"hello", "world"}, []string{"hello"}) { + t.Error("Subset should return true: [\"hello\", \"world\"] contains [\"hello\"]") + } + + if Subset(mockT, []string{"hello", "world"}, []string{"hello", "testify"}) { + t.Error("Subset should return false: [\"hello\", \"world\"] does not contain [\"hello\", \"testify\"]") + } + if Subset(mockT, []int{1, 2, 3}, []int{4, 5}) { + t.Error("Subset should return false: [1, 2, 3] does not contain [4, 5]") + } + if Subset(mockT, []int{1, 2, 3}, []int{1, 5}) { + t.Error("Subset should return false: [1, 2, 3] does not contain [1, 5]") + } +} + +func TestNotSubset(t *testing.T) { + mockT := new(testing.T) + + if NotSubset(mockT, []int{1, 2, 3}, nil) { + t.Error("NotSubset should return false: given subset is nil") + } + if NotSubset(mockT, []int{1, 2, 3}, []int{}) { + t.Error("NotSubset should return false: any set contains the nil set") + } + if NotSubset(mockT, []int{1, 2, 3}, []int{1, 2}) { + t.Error("NotSubset should return false: [1, 2, 3] contains [1, 2]") + } + if NotSubset(mockT, []int{1, 2, 3}, []int{1, 2, 3}) { + t.Error("NotSubset should return false: [1, 2, 3] contains [1, 2, 3]") + } + if NotSubset(mockT, []string{"hello", "world"}, []string{"hello"}) { + t.Error("NotSubset should return false: [\"hello\", \"world\"] contains [\"hello\"]") + } + + if !NotSubset(mockT, []string{"hello", "world"}, []string{"hello", "testify"}) { + t.Error("NotSubset should return true: [\"hello\", \"world\"] does not contain [\"hello\", \"testify\"]") + } + if !NotSubset(mockT, []int{1, 2, 3}, []int{4, 5}) { + t.Error("NotSubset should return true: [1, 2, 3] does not contain [4, 5]") + } + if !NotSubset(mockT, []int{1, 2, 3}, []int{1, 5}) { + t.Error("NotSubset should return true: [1, 2, 3] does not contain [1, 5]") + } +} + +func TestNotSubsetNil(t *testing.T) { + mockT := new(testing.T) + NotSubset(mockT, []string{"foo"}, nil) + if !mockT.Failed() { + t.Error("NotSubset on nil set should have failed the test") + } +} + func Test_includeElement(t *testing.T) { list1 := []string{"Foo", "Bar"} @@ -469,6 +615,57 @@ func Test_includeElement(t *testing.T) { False(t, found) } +func TestElementsMatch(t *testing.T) { + mockT := new(testing.T) + + if !ElementsMatch(mockT, nil, nil) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{}, []int{}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{1}, []int{1}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{1, 1}, []int{1, 1}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{1, 2}, []int{1, 2}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{1, 2}, []int{2, 1}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, [2]int{1, 2}, [2]int{2, 1}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []string{"hello", "world"}, []string{"world", "hello"}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []string{"hello", "hello"}, []string{"hello", "hello"}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []string{"hello", "hello", "world"}, []string{"hello", "world", "hello"}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, [3]string{"hello", "hello", "world"}, [3]string{"hello", "world", "hello"}) { + t.Error("ElementsMatch should return true") + } + if !ElementsMatch(mockT, []int{}, nil) { + t.Error("ElementsMatch should return true") + } + + if ElementsMatch(mockT, []int{1}, []int{1, 1}) { + t.Error("ElementsMatch should return false") + } + if ElementsMatch(mockT, []int{1, 2}, []int{2, 2}) { + t.Error("ElementsMatch should return false") + } + if ElementsMatch(mockT, []string{"hello", "hello"}, []string{"hello"}) { + t.Error("ElementsMatch should return false") + } +} + func TestCondition(t *testing.T) { mockT := new(testing.T) @@ -514,6 +711,28 @@ func TestPanics(t *testing.T) { } +func TestPanicsWithValue(t *testing.T) { + + mockT := new(testing.T) + + if !PanicsWithValue(mockT, "Panic!", func() { + panic("Panic!") + }) { + t.Error("PanicsWithValue should return true") + } + + if PanicsWithValue(mockT, "Panic!", func() { + }) { + t.Error("PanicsWithValue should return false") + } + + if PanicsWithValue(mockT, "at the disco", func() { + panic("Panic!") + }) { + t.Error("PanicsWithValue should return false") + } +} + func TestNotPanics(t *testing.T) { mockT := new(testing.T) @@ -555,7 +774,7 @@ func TestNoError(t *testing.T) { }() if err == nil { // err is not nil here! - t.Errorf("Error should be nil due to empty interface", err) + t.Errorf("Error should be nil due to empty interface: %s", err) } False(t, NoError(mockT, err), "NoError should fail with empty error interface") @@ -579,6 +798,9 @@ func TestError(t *testing.T) { True(t, Error(mockT, err), "Error with error should return True") + // go vet check + True(t, Errorf(mockT, err, "example with %s", "formatted message"), "Errorf with error should rturn True") + // returning an empty error interface err = func() error { var err *customError @@ -589,7 +811,7 @@ func TestError(t *testing.T) { }() if err == nil { // err is not nil here! - t.Errorf("Error should be nil due to empty interface", err) + t.Errorf("Error should be nil due to empty interface: %s", err) } True(t, Error(mockT, err), "Error should pass with empty error interface") @@ -646,6 +868,15 @@ func TestEmpty(t *testing.T) { var tiNP time.Time var s *string var f *os.File + sP := &s + x := 1 + xP := &x + + type TString string + type TStruct struct { + x int + s []int + } True(t, Empty(mockT, ""), "Empty string is empty") True(t, Empty(mockT, nil), "Nil is empty") @@ -657,6 +888,9 @@ func TestEmpty(t *testing.T) { True(t, Empty(mockT, f), "Nil os.File pointer is empty") True(t, Empty(mockT, tiP), "Nil time.Time pointer is empty") True(t, Empty(mockT, tiNP), "time.Time is empty") + True(t, Empty(mockT, TStruct{}), "struct with zero values is empty") + True(t, Empty(mockT, TString("")), "empty aliased string is empty") + True(t, Empty(mockT, sP), "ptr to nil value is empty") False(t, Empty(mockT, "something"), "Non Empty string is not empty") False(t, Empty(mockT, errors.New("something")), "Non nil object is not empty") @@ -664,6 +898,9 @@ func TestEmpty(t *testing.T) { False(t, Empty(mockT, 1), "Non-zero int value is not empty") False(t, Empty(mockT, true), "True value is not empty") False(t, Empty(mockT, chWithValue), "Channel with values is not empty") + False(t, Empty(mockT, TStruct{x: 1}), "struct with initialized values is empty") + False(t, Empty(mockT, TString("abc")), "non-empty aliased string is empty") + False(t, Empty(mockT, xP), "ptr to non-nil value is not empty") } func TestNotEmpty(t *testing.T) { @@ -870,6 +1107,82 @@ func TestInDeltaSlice(t *testing.T) { False(t, InDeltaSlice(mockT, "", nil, 1), "Expected non numeral slices to fail") } +func TestInDeltaMapValues(t *testing.T) { + mockT := new(testing.T) + + for _, tc := range []struct { + title string + expect interface{} + actual interface{} + f func(TestingT, bool, ...interface{}) bool + delta float64 + }{ + { + title: "Within delta", + expect: map[string]float64{ + "foo": 1.0, + "bar": 2.0, + }, + actual: map[string]float64{ + "foo": 1.01, + "bar": 1.99, + }, + delta: 0.1, + f: True, + }, + { + title: "Within delta", + expect: map[int]float64{ + 1: 1.0, + 2: 2.0, + }, + actual: map[int]float64{ + 1: 1.0, + 2: 1.99, + }, + delta: 0.1, + f: True, + }, + { + title: "Different number of keys", + expect: map[int]float64{ + 1: 1.0, + 2: 2.0, + }, + actual: map[int]float64{ + 1: 1.0, + }, + delta: 0.1, + f: False, + }, + { + title: "Within delta with zero value", + expect: map[string]float64{ + "zero": 0.0, + }, + actual: map[string]float64{ + "zero": 0.0, + }, + delta: 0.1, + f: True, + }, + { + title: "With missing key with zero value", + expect: map[string]float64{ + "zero": 0.0, + "foo": 0.0, + }, + actual: map[string]float64{ + "zero": 0.0, + "bar": 0.0, + }, + f: False, + }, + } { + tc.f(t, InDeltaMapValues(mockT, tc.expect, tc.actual, tc.delta), tc.title+"\n"+diff(tc.expect, tc.actual)) + } +} + func TestInEpsilon(t *testing.T) { mockT := new(testing.T) @@ -885,6 +1198,7 @@ func TestInEpsilon(t *testing.T) { {uint64(100), uint8(101), 0.01}, {0.1, -0.1, 2}, {0.1, 0, 2}, + {time.Second, time.Second + time.Millisecond, 0.002}, } for _, tc := range cases { @@ -903,6 +1217,7 @@ func TestInEpsilon(t *testing.T) { {2.1, "bla-bla", 0}, {0.1, -0.1, 1.99}, {0, 0.1, 2}, // expected must be different to zero + {time.Second, time.Second + 10*time.Millisecond, 0.002}, } for _, tc := range cases { @@ -1006,6 +1321,28 @@ func TestNotZero(t *testing.T) { } } +func TestFileExists(t *testing.T) { + mockT := new(testing.T) + True(t, FileExists(mockT, "assertions.go")) + + mockT = new(testing.T) + False(t, FileExists(mockT, "random_file")) + + mockT = new(testing.T) + False(t, FileExists(mockT, "../_codegen")) +} + +func TestDirExists(t *testing.T) { + mockT := new(testing.T) + False(t, DirExists(mockT, "assertions.go")) + + mockT = new(testing.T) + False(t, DirExists(mockT, "random_dir")) + + mockT = new(testing.T) + True(t, DirExists(mockT, "../_codegen")) +} + func TestJSONEq_EqualSONString(t *testing.T) { mockT := new(testing.T) True(t, JSONEq(mockT, `{"hello": "world", "foo": "bar"}`, `{"hello": "world", "foo": "bar"}`)) @@ -1208,3 +1545,37 @@ func TestFailNowWithFullTestingT(t *testing.T) { FailNow(mockT, "failed") }, "should call mockT.FailNow() rather than panicking") } + +func TestBytesEqual(t *testing.T) { + var cases = []struct { + a, b []byte + }{ + {make([]byte, 2), make([]byte, 2)}, + {make([]byte, 2), make([]byte, 2, 3)}, + {nil, make([]byte, 0)}, + } + for i, c := range cases { + Equal(t, reflect.DeepEqual(c.a, c.b), ObjectsAreEqual(c.a, c.b), "case %d failed", i+1) + } +} + +func BenchmarkBytesEqual(b *testing.B) { + const size = 1024 * 8 + s := make([]byte, size) + for i := range s { + s[i] = byte(i % 255) + } + s2 := make([]byte, size) + copy(s2, s) + + mockT := &mockFailNowTestingT{} + b.ResetTimer() + for i := 0; i < b.N; i++ { + Equal(mockT, s, s2) + } +} + +func TestEqualArgsValidation(t *testing.T) { + err := validateEqualArgs(time.Now, time.Now) + EqualError(t, err, "cannot take func type as argument") +} diff --git a/vendor/github.com/stretchr/testify/assert/forward_assertions.go b/vendor/github.com/stretchr/testify/assert/forward_assertions.go index b867e95e..9ad56851 100644 --- a/vendor/github.com/stretchr/testify/assert/forward_assertions.go +++ b/vendor/github.com/stretchr/testify/assert/forward_assertions.go @@ -13,4 +13,4 @@ func New(t TestingT) *Assertions { } } -//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl +//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl -include-format-funcs diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go index fa7ab89b..3101e78d 100644 --- a/vendor/github.com/stretchr/testify/assert/http_assertions.go +++ b/vendor/github.com/stretchr/testify/assert/http_assertions.go @@ -8,16 +8,16 @@ import ( "strings" ) -// httpCode is a helper that returns HTTP code of the response. It returns -1 -// if building a new request fails. -func httpCode(handler http.HandlerFunc, method, url string, values url.Values) int { +// httpCode is a helper that returns HTTP code of the response. It returns -1 and +// an error if building a new request fails. +func httpCode(handler http.HandlerFunc, method, url string, values url.Values) (int, error) { w := httptest.NewRecorder() req, err := http.NewRequest(method, url+"?"+values.Encode(), nil) if err != nil { - return -1 + return -1, err } handler(w, req) - return w.Code + return w.Code, nil } // HTTPSuccess asserts that a specified handler returns a success status code. @@ -25,12 +25,19 @@ func httpCode(handler http.HandlerFunc, method, url string, values url.Values) i // assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). -func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { - code := httpCode(handler, method, url, values) - if code == -1 { +func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { + code, err := httpCode(handler, method, url, values) + if err != nil { + Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err)) return false } - return code >= http.StatusOK && code <= http.StatusPartialContent + + isSuccessCode := code >= http.StatusOK && code <= http.StatusPartialContent + if !isSuccessCode { + Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code)) + } + + return isSuccessCode } // HTTPRedirect asserts that a specified handler returns a redirect status code. @@ -38,12 +45,19 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, value // assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). -func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { - code := httpCode(handler, method, url, values) - if code == -1 { +func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { + code, err := httpCode(handler, method, url, values) + if err != nil { + Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err)) return false } - return code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect + + isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect + if !isRedirectCode { + Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code)) + } + + return isRedirectCode } // HTTPError asserts that a specified handler returns an error status code. @@ -51,12 +65,19 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, valu // assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). -func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { - code := httpCode(handler, method, url, values) - if code == -1 { +func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { + code, err := httpCode(handler, method, url, values) + if err != nil { + Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err)) return false } - return code >= http.StatusBadRequest + + isErrorCode := code >= http.StatusBadRequest + if !isErrorCode { + Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code)) + } + + return isErrorCode } // HTTPBody is a helper that returns HTTP body of the response. It returns @@ -77,7 +98,7 @@ func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) s // assert.HTTPBodyContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). -func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool { +func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { body := HTTPBody(handler, method, url, values) contains := strings.Contains(body, fmt.Sprint(str)) @@ -94,7 +115,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, // assert.HTTPBodyNotContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). -func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool { +func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { body := HTTPBody(handler, method, url, values) contains := strings.Contains(body, fmt.Sprint(str)) diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions_test.go b/vendor/github.com/stretchr/testify/assert/http_assertions_test.go index 684c2d5d..3ab76830 100644 --- a/vendor/github.com/stretchr/testify/assert/http_assertions_test.go +++ b/vendor/github.com/stretchr/testify/assert/http_assertions_test.go @@ -19,21 +19,52 @@ func httpError(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) } -func TestHTTPStatuses(t *testing.T) { +func TestHTTPSuccess(t *testing.T) { assert := New(t) - mockT := new(testing.T) - assert.Equal(HTTPSuccess(mockT, httpOK, "GET", "/", nil), true) - assert.Equal(HTTPSuccess(mockT, httpRedirect, "GET", "/", nil), false) - assert.Equal(HTTPSuccess(mockT, httpError, "GET", "/", nil), false) + mockT1 := new(testing.T) + assert.Equal(HTTPSuccess(mockT1, httpOK, "GET", "/", nil), true) + assert.False(mockT1.Failed()) - assert.Equal(HTTPRedirect(mockT, httpOK, "GET", "/", nil), false) - assert.Equal(HTTPRedirect(mockT, httpRedirect, "GET", "/", nil), true) - assert.Equal(HTTPRedirect(mockT, httpError, "GET", "/", nil), false) + mockT2 := new(testing.T) + assert.Equal(HTTPSuccess(mockT2, httpRedirect, "GET", "/", nil), false) + assert.True(mockT2.Failed()) - assert.Equal(HTTPError(mockT, httpOK, "GET", "/", nil), false) - assert.Equal(HTTPError(mockT, httpRedirect, "GET", "/", nil), false) - assert.Equal(HTTPError(mockT, httpError, "GET", "/", nil), true) + mockT3 := new(testing.T) + assert.Equal(HTTPSuccess(mockT3, httpError, "GET", "/", nil), false) + assert.True(mockT3.Failed()) +} + +func TestHTTPRedirect(t *testing.T) { + assert := New(t) + + mockT1 := new(testing.T) + assert.Equal(HTTPRedirect(mockT1, httpOK, "GET", "/", nil), false) + assert.True(mockT1.Failed()) + + mockT2 := new(testing.T) + assert.Equal(HTTPRedirect(mockT2, httpRedirect, "GET", "/", nil), true) + assert.False(mockT2.Failed()) + + mockT3 := new(testing.T) + assert.Equal(HTTPRedirect(mockT3, httpError, "GET", "/", nil), false) + assert.True(mockT3.Failed()) +} + +func TestHTTPError(t *testing.T) { + assert := New(t) + + mockT1 := new(testing.T) + assert.Equal(HTTPError(mockT1, httpOK, "GET", "/", nil), false) + assert.True(mockT1.Failed()) + + mockT2 := new(testing.T) + assert.Equal(HTTPError(mockT2, httpRedirect, "GET", "/", nil), false) + assert.True(mockT2.Failed()) + + mockT3 := new(testing.T) + assert.Equal(HTTPError(mockT3, httpError, "GET", "/", nil), true) + assert.False(mockT3.Failed()) } func TestHTTPStatusesWrapper(t *testing.T) { diff --git a/vendor/github.com/stretchr/testify/mock/mock.go b/vendor/github.com/stretchr/testify/mock/mock.go index 20d7b8b1..1e232b56 100644 --- a/vendor/github.com/stretchr/testify/mock/mock.go +++ b/vendor/github.com/stretchr/testify/mock/mock.go @@ -1,6 +1,7 @@ package mock import ( + "errors" "fmt" "reflect" "regexp" @@ -15,10 +16,6 @@ import ( "github.com/stretchr/testify/assert" ) -func inin() { - spew.Config.SortKeys = true -} - // TestingT is an interface wrapper around *testing.T type TestingT interface { Logf(format string, args ...interface{}) @@ -52,10 +49,15 @@ type Call struct { // Amount of times this call has been called totalCalls int + // Call to this method can be optional + optional bool + // Holds a channel that will be used to block the Return until it either // receives a message or is closed. nil means it returns immediately. WaitFor <-chan time.Time + waitTime time.Duration + // Holds a handler used to manipulate arguments content that are passed by // reference. It's useful when mocking methods such as unmarshalers or // decoders. @@ -134,7 +136,10 @@ func (c *Call) WaitUntil(w <-chan time.Time) *Call { // // Mock.On("MyMethod", arg1, arg2).After(time.Second) func (c *Call) After(d time.Duration) *Call { - return c.WaitUntil(time.After(d)) + c.lock() + defer c.unlock() + c.waitTime = d + return c } // Run sets a handler to be called before returning. It can be used when @@ -145,13 +150,22 @@ func (c *Call) After(d time.Duration) *Call { // arg := args.Get(0).(*map[string]interface{}) // arg["foo"] = "bar" // }) -func (c *Call) Run(fn func(Arguments)) *Call { +func (c *Call) Run(fn func(args Arguments)) *Call { c.lock() defer c.unlock() c.RunFn = fn return c } +// Maybe allows the method call to be optional. Not calling an optional method +// will not cause an error while asserting expectations +func (c *Call) Maybe() *Call { + c.lock() + defer c.unlock() + c.optional = true + return c +} + // On chains a new expectation description onto the mocked interface. This // allows syntax like. // @@ -218,8 +232,6 @@ func (m *Mock) On(methodName string, arguments ...interface{}) *Call { // */ func (m *Mock) findExpectedCall(method string, arguments ...interface{}) (int, *Call) { - m.mutex.Lock() - defer m.mutex.Unlock() for i, call := range m.ExpectedCalls { if call.Method == method && call.Repeatability > -1 { @@ -283,7 +295,7 @@ func (m *Mock) Called(arguments ...interface{}) Arguments { functionPath := runtime.FuncForPC(pc).Name() //Next four lines are required to use GCCGO function naming conventions. //For Ex: github_com_docker_libkv_store_mock.WatchTree.pN39_github_com_docker_libkv_store_mock.Mock - //uses inteface information unlike golang github.com/docker/libkv/store/mock.(*Mock).WatchTree + //uses interface information unlike golang github.com/docker/libkv/store/mock.(*Mock).WatchTree //With GCCGO we need to remove interface information starting from pN
. re := regexp.MustCompile("\\.pN\\d+_") if re.MatchString(functionPath) { @@ -291,8 +303,16 @@ func (m *Mock) Called(arguments ...interface{}) Arguments { } parts := strings.Split(functionPath, ".") functionName := parts[len(parts)-1] + return m.MethodCalled(functionName, arguments...) +} - found, call := m.findExpectedCall(functionName, arguments...) +// MethodCalled tells the mock object that the given method has been called, and gets +// an array of arguments to return. Panics if the call is unexpected (i.e. not preceded +// by appropriate .On .Return() calls) +// If Call.WaitFor is set, blocks until the channel is closed or receives a message. +func (m *Mock) MethodCalled(methodName string, arguments ...interface{}) Arguments { + m.mutex.Lock() + found, call := m.findExpectedCall(methodName, arguments...) if found < 0 { // we have to fail here - because we don't know what to do @@ -302,45 +322,47 @@ func (m *Mock) Called(arguments ...interface{}) Arguments { // b) the arguments are not what was expected, or // c) the developer has forgotten to add an accompanying On...Return pair. - closestFound, closestCall := m.findClosestCall(functionName, arguments...) + closestFound, closestCall := m.findClosestCall(methodName, arguments...) + m.mutex.Unlock() if closestFound { - panic(fmt.Sprintf("\n\nmock: Unexpected Method Call\n-----------------------------\n\n%s\n\nThe closest call I have is: \n\n%s\n\n%s\n", callString(functionName, arguments, true), callString(functionName, closestCall.Arguments, true), diffArguments(arguments, closestCall.Arguments))) + panic(fmt.Sprintf("\n\nmock: Unexpected Method Call\n-----------------------------\n\n%s\n\nThe closest call I have is: \n\n%s\n\n%s\n", callString(methodName, arguments, true), callString(methodName, closestCall.Arguments, true), diffArguments(closestCall.Arguments, arguments))) } else { - panic(fmt.Sprintf("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(\"%s\").Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", functionName, functionName, callString(functionName, arguments, true), assert.CallerInfo())) + panic(fmt.Sprintf("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(\"%s\").Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", methodName, methodName, callString(methodName, arguments, true), assert.CallerInfo())) } - } else { - m.mutex.Lock() - switch { - case call.Repeatability == 1: - call.Repeatability = -1 - call.totalCalls++ - - case call.Repeatability > 1: - call.Repeatability-- - call.totalCalls++ - - case call.Repeatability == 0: - call.totalCalls++ - } - m.mutex.Unlock() } + if call.Repeatability == 1 { + call.Repeatability = -1 + } else if call.Repeatability > 1 { + call.Repeatability-- + } + call.totalCalls++ + // add the call - m.mutex.Lock() - m.Calls = append(m.Calls, *newCall(m, functionName, arguments...)) + m.Calls = append(m.Calls, *newCall(m, methodName, arguments...)) m.mutex.Unlock() // block if specified if call.WaitFor != nil { <-call.WaitFor + } else { + time.Sleep(call.waitTime) } - if call.RunFn != nil { - call.RunFn(arguments) + m.mutex.Lock() + runFn := call.RunFn + m.mutex.Unlock() + + if runFn != nil { + runFn(arguments) } - return call.ReturnArguments + m.mutex.Lock() + returnArgs := call.ReturnArguments + m.mutex.Unlock() + + return returnArgs } /* @@ -372,25 +394,26 @@ func AssertExpectationsForObjects(t TestingT, testObjects ...interface{}) bool { // AssertExpectations asserts that everything specified with On and Return was // in fact called as expected. Calls may have occurred in any order. func (m *Mock) AssertExpectations(t TestingT) bool { + m.mutex.Lock() + defer m.mutex.Unlock() var somethingMissing bool var failedExpectations int // iterate through each expectation expectedCalls := m.expectedCalls() for _, expectedCall := range expectedCalls { - if !m.methodWasCalled(expectedCall.Method, expectedCall.Arguments) && expectedCall.totalCalls == 0 { + if !expectedCall.optional && !m.methodWasCalled(expectedCall.Method, expectedCall.Arguments) && expectedCall.totalCalls == 0 { somethingMissing = true failedExpectations++ - t.Logf("\u274C\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) + t.Logf("FAIL:\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) } else { - m.mutex.Lock() if expectedCall.Repeatability > 0 { somethingMissing = true failedExpectations++ + t.Logf("FAIL:\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) } else { - t.Logf("\u2705\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) + t.Logf("PASS:\t%s(%s)", expectedCall.Method, expectedCall.Arguments.String()) } - m.mutex.Unlock() } } @@ -403,6 +426,8 @@ func (m *Mock) AssertExpectations(t TestingT) bool { // AssertNumberOfCalls asserts that the method was called expectedCalls times. func (m *Mock) AssertNumberOfCalls(t TestingT, methodName string, expectedCalls int) bool { + m.mutex.Lock() + defer m.mutex.Unlock() var actualCalls int for _, call := range m.calls() { if call.Method == methodName { @@ -415,6 +440,8 @@ func (m *Mock) AssertNumberOfCalls(t TestingT, methodName string, expectedCalls // AssertCalled asserts that the method was called. // It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. func (m *Mock) AssertCalled(t TestingT, methodName string, arguments ...interface{}) bool { + m.mutex.Lock() + defer m.mutex.Unlock() if !assert.True(t, m.methodWasCalled(methodName, arguments), fmt.Sprintf("The \"%s\" method should have been called with %d argument(s), but was not.", methodName, len(arguments))) { t.Logf("%v", m.expectedCalls()) return false @@ -425,6 +452,8 @@ func (m *Mock) AssertCalled(t TestingT, methodName string, arguments ...interfac // AssertNotCalled asserts that the method was not called. // It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. func (m *Mock) AssertNotCalled(t TestingT, methodName string, arguments ...interface{}) bool { + m.mutex.Lock() + defer m.mutex.Unlock() if !assert.False(t, m.methodWasCalled(methodName, arguments), fmt.Sprintf("The \"%s\" method was called with %d argument(s), but should NOT have been.", methodName, len(arguments))) { t.Logf("%v", m.expectedCalls()) return false @@ -450,14 +479,10 @@ func (m *Mock) methodWasCalled(methodName string, expected []interface{}) bool { } func (m *Mock) expectedCalls() []*Call { - m.mutex.Lock() - defer m.mutex.Unlock() return append([]*Call{}, m.ExpectedCalls...) } func (m *Mock) calls() []Call { - m.mutex.Lock() - defer m.mutex.Unlock() return append([]Call{}, m.Calls...) } @@ -496,9 +521,25 @@ type argumentMatcher struct { func (f argumentMatcher) Matches(argument interface{}) bool { expectType := f.fn.Type().In(0) + expectTypeNilSupported := false + switch expectType.Kind() { + case reflect.Interface, reflect.Chan, reflect.Func, reflect.Map, reflect.Slice, reflect.Ptr: + expectTypeNilSupported = true + } - if reflect.TypeOf(argument).AssignableTo(expectType) { - result := f.fn.Call([]reflect.Value{reflect.ValueOf(argument)}) + argType := reflect.TypeOf(argument) + var arg reflect.Value + if argType == nil { + arg = reflect.New(expectType).Elem() + } else { + arg = reflect.ValueOf(argument) + } + + if argType == nil && !expectTypeNilSupported { + panic(errors.New("attempting to call matcher with nil for non-nil expected type")) + } + if argType == nil || argType.AssignableTo(expectType) { + result := f.fn.Call([]reflect.Value{arg}) return result[0].Bool() } return false @@ -518,7 +559,7 @@ func (f argumentMatcher) String() string { // // |fn|, must be a function accepting a single argument (of the expected type) // which returns a bool. If |fn| doesn't match the required signature, -// MathedBy() panics. +// MatchedBy() panics. func MatchedBy(fn interface{}) argumentMatcher { fnType := reflect.TypeOf(fn) @@ -584,10 +625,10 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { if matcher, ok := expected.(argumentMatcher); ok { if matcher.Matches(actual) { - output = fmt.Sprintf("%s\t%d: \u2705 %s matched by %s\n", output, i, actual, matcher) + output = fmt.Sprintf("%s\t%d: PASS: %s matched by %s\n", output, i, actual, matcher) } else { differences++ - output = fmt.Sprintf("%s\t%d: \u2705 %s not matched by %s\n", output, i, actual, matcher) + output = fmt.Sprintf("%s\t%d: PASS: %s not matched by %s\n", output, i, actual, matcher) } } else if reflect.TypeOf(expected) == reflect.TypeOf((*AnythingOfTypeArgument)(nil)).Elem() { @@ -595,7 +636,7 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { if reflect.TypeOf(actual).Name() != string(expected.(AnythingOfTypeArgument)) && reflect.TypeOf(actual).String() != string(expected.(AnythingOfTypeArgument)) { // not match differences++ - output = fmt.Sprintf("%s\t%d: \u274C type %s != type %s - %s\n", output, i, expected, reflect.TypeOf(actual).Name(), actual) + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected, reflect.TypeOf(actual).Name(), actual) } } else { @@ -604,11 +645,11 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { if assert.ObjectsAreEqual(expected, Anything) || assert.ObjectsAreEqual(actual, Anything) || assert.ObjectsAreEqual(actual, expected) { // match - output = fmt.Sprintf("%s\t%d: \u2705 %s == %s\n", output, i, actual, expected) + output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, actual, expected) } else { // not match differences++ - output = fmt.Sprintf("%s\t%d: \u274C %s != %s\n", output, i, actual, expected) + output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, actual, expected) } } @@ -719,6 +760,10 @@ func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { } func diffArguments(expected Arguments, actual Arguments) string { + if len(expected) != len(actual) { + return fmt.Sprintf("Provided %v arguments, mocked for %v arguments", len(expected), len(actual)) + } + for x := range expected { if diffString := diff(expected[x], actual[x]); diffString != "" { return fmt.Sprintf("Difference found in argument %v:\n\n%s", x, diffString) @@ -746,8 +791,8 @@ func diff(expected interface{}, actual interface{}) string { return "" } - e := spew.Sdump(expected) - a := spew.Sdump(actual) + e := spewConfig.Sdump(expected) + a := spewConfig.Sdump(actual) diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ A: difflib.SplitLines(e), @@ -761,3 +806,10 @@ func diff(expected interface{}, actual interface{}) string { return diff } + +var spewConfig = spew.ConfigState{ + Indent: " ", + DisablePointerAddresses: true, + DisableCapacities: true, + SortKeys: true, +} diff --git a/vendor/github.com/stretchr/testify/mock/mock_test.go b/vendor/github.com/stretchr/testify/mock/mock_test.go index 8cb4615d..cb245ba5 100644 --- a/vendor/github.com/stretchr/testify/mock/mock_test.go +++ b/vendor/github.com/stretchr/testify/mock/mock_test.go @@ -2,10 +2,13 @@ package mock import ( "errors" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" + "fmt" + "sync" "testing" "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) /* @@ -40,6 +43,26 @@ func (i *TestExampleImplementation) TheExampleMethod3(et *ExampleType) error { return args.Error(0) } +func (i *TestExampleImplementation) TheExampleMethod4(v ExampleInterface) error { + args := i.Called(v) + return args.Error(0) +} + +func (i *TestExampleImplementation) TheExampleMethod5(ch chan struct{}) error { + args := i.Called(ch) + return args.Error(0) +} + +func (i *TestExampleImplementation) TheExampleMethod6(m map[string]bool) error { + args := i.Called(m) + return args.Error(0) +} + +func (i *TestExampleImplementation) TheExampleMethod7(slice []bool) error { + args := i.Called(slice) + return args.Error(0) +} + func (i *TestExampleImplementation) TheExampleMethodFunc(fn func(string) error) error { args := i.Called(fn) return args.Error(0) @@ -55,6 +78,11 @@ func (i *TestExampleImplementation) TheExampleMethodVariadicInterface(a ...inter return args.Error(0) } +func (i *TestExampleImplementation) TheExampleMethodMixedVariadic(a int, b ...int) error { + args := i.Called(a, b) + return args.Error(0) +} + type ExampleFuncType func(string) error func (i *TestExampleImplementation) TheExampleMethodFuncType(fn ExampleFuncType) error { @@ -174,15 +202,20 @@ func Test_Mock_On_WithPtrArgMatcher(t *testing.T) { var mockedService TestExampleImplementation mockedService.On("TheExampleMethod3", - MatchedBy(func(a *ExampleType) bool { return a.ran == true }), + MatchedBy(func(a *ExampleType) bool { return a != nil && a.ran == true }), ).Return(nil) mockedService.On("TheExampleMethod3", - MatchedBy(func(a *ExampleType) bool { return a.ran == false }), + MatchedBy(func(a *ExampleType) bool { return a != nil && a.ran == false }), ).Return(errors.New("error")) + mockedService.On("TheExampleMethod3", + MatchedBy(func(a *ExampleType) bool { return a == nil }), + ).Return(errors.New("error2")) + assert.Equal(t, mockedService.TheExampleMethod3(&ExampleType{true}), nil) assert.EqualError(t, mockedService.TheExampleMethod3(&ExampleType{false}), "error") + assert.EqualError(t, mockedService.TheExampleMethod3(nil), "error2") } func Test_Mock_On_WithFuncArgMatcher(t *testing.T) { @@ -191,17 +224,62 @@ func Test_Mock_On_WithFuncArgMatcher(t *testing.T) { fixture1, fixture2 := errors.New("fixture1"), errors.New("fixture2") mockedService.On("TheExampleMethodFunc", - MatchedBy(func(a func(string) error) bool { return a("string") == fixture1 }), + MatchedBy(func(a func(string) error) bool { return a != nil && a("string") == fixture1 }), ).Return(errors.New("fixture1")) mockedService.On("TheExampleMethodFunc", - MatchedBy(func(a func(string) error) bool { return a("string") == fixture2 }), + MatchedBy(func(a func(string) error) bool { return a != nil && a("string") == fixture2 }), ).Return(errors.New("fixture2")) + mockedService.On("TheExampleMethodFunc", + MatchedBy(func(a func(string) error) bool { return a == nil }), + ).Return(errors.New("fixture3")) + assert.EqualError(t, mockedService.TheExampleMethodFunc( func(string) error { return fixture1 }), "fixture1") assert.EqualError(t, mockedService.TheExampleMethodFunc( func(string) error { return fixture2 }), "fixture2") + assert.EqualError(t, mockedService.TheExampleMethodFunc(nil), "fixture3") +} + +func Test_Mock_On_WithInterfaceArgMatcher(t *testing.T) { + var mockedService TestExampleImplementation + + mockedService.On("TheExampleMethod4", + MatchedBy(func(a ExampleInterface) bool { return a == nil }), + ).Return(errors.New("fixture1")) + + assert.EqualError(t, mockedService.TheExampleMethod4(nil), "fixture1") +} + +func Test_Mock_On_WithChannelArgMatcher(t *testing.T) { + var mockedService TestExampleImplementation + + mockedService.On("TheExampleMethod5", + MatchedBy(func(ch chan struct{}) bool { return ch == nil }), + ).Return(errors.New("fixture1")) + + assert.EqualError(t, mockedService.TheExampleMethod5(nil), "fixture1") +} + +func Test_Mock_On_WithMapArgMatcher(t *testing.T) { + var mockedService TestExampleImplementation + + mockedService.On("TheExampleMethod6", + MatchedBy(func(m map[string]bool) bool { return m == nil }), + ).Return(errors.New("fixture1")) + + assert.EqualError(t, mockedService.TheExampleMethod6(nil), "fixture1") +} + +func Test_Mock_On_WithSliceArgMatcher(t *testing.T) { + var mockedService TestExampleImplementation + + mockedService.On("TheExampleMethod7", + MatchedBy(func(slice []bool) bool { return slice == nil }), + ).Return(errors.New("fixture1")) + + assert.EqualError(t, mockedService.TheExampleMethod7(nil), "fixture1") } func Test_Mock_On_WithVariadicFunc(t *testing.T) { @@ -226,6 +304,29 @@ func Test_Mock_On_WithVariadicFunc(t *testing.T) { } +func Test_Mock_On_WithMixedVariadicFunc(t *testing.T) { + + // make a test impl object + var mockedService = new(TestExampleImplementation) + + c := mockedService. + On("TheExampleMethodMixedVariadic", 1, []int{2, 3, 4}). + Return(nil) + + assert.Equal(t, []*Call{c}, mockedService.ExpectedCalls) + assert.Equal(t, 2, len(c.Arguments)) + assert.Equal(t, 1, c.Arguments[0]) + assert.Equal(t, []int{2, 3, 4}, c.Arguments[1]) + + assert.NotPanics(t, func() { + mockedService.TheExampleMethodMixedVariadic(1, 2, 3, 4) + }) + assert.Panics(t, func() { + mockedService.TheExampleMethodMixedVariadic(1, 2, 3, 5) + }) + +} + func Test_Mock_On_WithVariadicFuncWithInterface(t *testing.T) { // make a test impl object @@ -726,7 +827,7 @@ func Test_AssertExpectationsForObjects_Helper(t *testing.T) { mockedService2.Called(2) mockedService3.Called(3) - assert.True(t, AssertExpectationsForObjects(t, mockedService1.Mock, mockedService2.Mock, mockedService3.Mock)) + assert.True(t, AssertExpectationsForObjects(t, &mockedService1.Mock, &mockedService2.Mock, &mockedService3.Mock)) assert.True(t, AssertExpectationsForObjects(t, mockedService1, mockedService2, mockedService3)) } @@ -745,7 +846,7 @@ func Test_AssertExpectationsForObjects_Helper_Failed(t *testing.T) { mockedService3.Called(3) tt := new(testing.T) - assert.False(t, AssertExpectationsForObjects(tt, mockedService1.Mock, mockedService2.Mock, mockedService3.Mock)) + assert.False(t, AssertExpectationsForObjects(tt, &mockedService1.Mock, &mockedService2.Mock, &mockedService3.Mock)) assert.False(t, AssertExpectationsForObjects(tt, mockedService1, mockedService2, mockedService3)) } @@ -969,6 +1070,31 @@ func Test_Mock_AssertNotCalled(t *testing.T) { } +func Test_Mock_AssertOptional(t *testing.T) { + // Optional called + var ms1 = new(TestExampleImplementation) + ms1.On("TheExampleMethod", 1, 2, 3).Maybe().Return(4, nil) + ms1.TheExampleMethod(1, 2, 3) + + tt1 := new(testing.T) + assert.Equal(t, true, ms1.AssertExpectations(tt1)) + + // Optional not called + var ms2 = new(TestExampleImplementation) + ms2.On("TheExampleMethod", 1, 2, 3).Maybe().Return(4, nil) + + tt2 := new(testing.T) + assert.Equal(t, true, ms2.AssertExpectations(tt2)) + + // Non-optional called + var ms3 = new(TestExampleImplementation) + ms3.On("TheExampleMethod", 1, 2, 3).Return(4, nil) + ms3.TheExampleMethod(1, 2, 3) + + tt3 := new(testing.T) + assert.Equal(t, true, ms3.AssertExpectations(tt3)) +} + /* Arguments helper methods */ @@ -1130,3 +1256,97 @@ func Test_Arguments_Bool(t *testing.T) { assert.Equal(t, true, args.Bool(2)) } + +func Test_WaitUntil_Parallel(t *testing.T) { + + // make a test impl object + var mockedService *TestExampleImplementation = new(TestExampleImplementation) + + ch1 := make(chan time.Time) + ch2 := make(chan time.Time) + + mockedService.Mock.On("TheExampleMethod2", true).Return().WaitUntil(ch2).Run(func(args Arguments) { + ch1 <- time.Now() + }) + + mockedService.Mock.On("TheExampleMethod2", false).Return().WaitUntil(ch1) + + // Lock both goroutines on the .WaitUntil method + go func() { + mockedService.TheExampleMethod2(false) + }() + go func() { + mockedService.TheExampleMethod2(true) + }() + + // Allow the first call to execute, so the second one executes afterwards + ch2 <- time.Now() +} + +func Test_MockMethodCalled(t *testing.T) { + m := new(Mock) + m.On("foo", "hello").Return("world") + + retArgs := m.MethodCalled("foo", "hello") + require.True(t, len(retArgs) == 1) + require.Equal(t, "world", retArgs[0]) + m.AssertExpectations(t) +} + +// Test to validate fix for racy concurrent call access in MethodCalled() +func Test_MockReturnAndCalledConcurrent(t *testing.T) { + iterations := 1000 + m := &Mock{} + call := m.On("ConcurrencyTestMethod") + + wg := sync.WaitGroup{} + wg.Add(2) + + go func() { + for i := 0; i < iterations; i++ { + call.Return(10) + } + wg.Done() + }() + go func() { + for i := 0; i < iterations; i++ { + ConcurrencyTestMethod(m) + } + wg.Done() + }() + wg.Wait() +} + +type timer struct{ Mock } + +func (s *timer) GetTime(i int) string { + return s.Called(i).Get(0).(string) +} + +func TestAfterTotalWaitTimeWhileExecution(t *testing.T) { + waitDuration := 1 + total, waitMs := 5, time.Millisecond*time.Duration(waitDuration) + aTimer := new(timer) + for i := 0; i < total; i++ { + aTimer.On("GetTime", i).After(waitMs).Return(fmt.Sprintf("Time%d", i)).Once() + } + time.Sleep(waitMs) + start := time.Now() + var results []string + + for i := 0; i < total; i++ { + results = append(results, aTimer.GetTime(i)) + } + + end := time.Now() + elapsedTime := end.Sub(start) + assert.True(t, elapsedTime > waitMs, fmt.Sprintf("Total elapsed time:%v should be atleast greater than %v", elapsedTime, waitMs)) + assert.Equal(t, total, len(results)) + for i, _ := range results { + assert.Equal(t, fmt.Sprintf("Time%d", i), results[i], "Return value of method should be same") + } +} + +func ConcurrencyTestMethod(m *Mock) { + m.Called() +} diff --git a/vendor/github.com/stretchr/testify/require/forward_requirements.go b/vendor/github.com/stretchr/testify/require/forward_requirements.go index d3c2ab9b..ac71d405 100644 --- a/vendor/github.com/stretchr/testify/require/forward_requirements.go +++ b/vendor/github.com/stretchr/testify/require/forward_requirements.go @@ -13,4 +13,4 @@ func New(t TestingT) *Assertions { } } -//go:generate go run ../_codegen/main.go -output-package=require -template=require_forward.go.tmpl +//go:generate go run ../_codegen/main.go -output-package=require -template=require_forward.go.tmpl -include-format-funcs diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go index 1bcfcb0d..ac3c3087 100644 --- a/vendor/github.com/stretchr/testify/require/require.go +++ b/vendor/github.com/stretchr/testify/require/require.go @@ -1,464 +1,867 @@ /* * CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen * THIS FILE MUST NOT BE EDITED BY HAND -*/ + */ package require import ( - assert "github.com/stretchr/testify/assert" http "net/http" url "net/url" time "time" ) - // Condition uses a Comparison to assert a complex condition. func Condition(t TestingT, comp assert.Comparison, msgAndArgs ...interface{}) { - if !assert.Condition(t, comp, msgAndArgs...) { - t.FailNow() - } + if !assert.Condition(t, comp, msgAndArgs...) { + t.FailNow() + } } +// Conditionf uses a Comparison to assert a complex condition. +func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interface{}) { + if !assert.Conditionf(t, comp, msg, args...) { + t.FailNow() + } +} // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. -// -// assert.Contains(t, "Hello World", "World", "But 'Hello World' does contain 'World'") -// assert.Contains(t, ["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") -// assert.Contains(t, {"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { - if !assert.Contains(t, s, contains, msgAndArgs...) { - t.FailNow() - } + if !assert.Contains(t, s, contains, msgAndArgs...) { + t.FailNow() + } } +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { + if !assert.Containsf(t, s, contains, msg, args...) { + t.FailNow() + } +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExists(t TestingT, path string, msgAndArgs ...interface{}) { + if !assert.DirExists(t, path, msgAndArgs...) { + t.FailNow() + } +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExistsf(t TestingT, path string, msg string, args ...interface{}) { + if !assert.DirExistsf(t, path, msg, args...) { + t.FailNow() + } +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2]) +func ElementsMatch(t TestingT, listA interface{}, listB interface{}, msgAndArgs ...interface{}) { + if !assert.ElementsMatch(t, listA, listB, msgAndArgs...) { + t.FailNow() + } +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) { + if !assert.ElementsMatchf(t, listA, listB, msg, args...) { + t.FailNow() + } +} // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // assert.Empty(t, obj) -// -// Returns whether the assertion was successful (true) or not (false). func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if !assert.Empty(t, object, msgAndArgs...) { - t.FailNow() - } + if !assert.Empty(t, object, msgAndArgs...) { + t.FailNow() + } } +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// assert.Emptyf(t, obj, "error message %s", "formatted") +func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.Emptyf(t, object, msg, args...) { + t.FailNow() + } +} // Equal asserts that two objects are equal. -// -// assert.Equal(t, 123, 123, "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.Equal(t, 123, 123) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if !assert.Equal(t, expected, actual, msgAndArgs...) { - t.FailNow() - } + if !assert.Equal(t, expected, actual, msgAndArgs...) { + t.FailNow() + } } - // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. -// +// // actualObj, err := SomeFunction() -// if assert.Error(t, err, "An error was expected") { -// assert.Equal(t, err, expectedError) -// } -// -// Returns whether the assertion was successful (true) or not (false). +// assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) { - if !assert.EqualError(t, theError, errString, msgAndArgs...) { - t.FailNow() - } + if !assert.EqualError(t, theError, errString, msgAndArgs...) { + t.FailNow() + } } +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) { + if !assert.EqualErrorf(t, theError, errString, msg, args...) { + t.FailNow() + } +} // EqualValues asserts that two objects are equal or convertable to the same types // and equal. -// -// assert.EqualValues(t, uint32(123), int32(123), "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if !assert.EqualValues(t, expected, actual, msgAndArgs...) { - t.FailNow() - } + if !assert.EqualValues(t, expected, actual, msgAndArgs...) { + t.FailNow() + } } +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123)) +func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.EqualValuesf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// Equalf asserts that two objects are equal. +// +// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.Equalf(t, expected, actual, msg, args...) { + t.FailNow() + } +} // Error asserts that a function returned an error (i.e. not `nil`). -// +// // actualObj, err := SomeFunction() -// if assert.Error(t, err, "An error was expected") { -// assert.Equal(t, err, expectedError) +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) // } -// -// Returns whether the assertion was successful (true) or not (false). func Error(t TestingT, err error, msgAndArgs ...interface{}) { - if !assert.Error(t, err, msgAndArgs...) { - t.FailNow() - } + if !assert.Error(t, err, msgAndArgs...) { + t.FailNow() + } } +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func Errorf(t TestingT, err error, msg string, args ...interface{}) { + if !assert.Errorf(t, err, msg, args...) { + t.FailNow() + } +} -// Exactly asserts that two objects are equal is value and type. -// -// assert.Exactly(t, int32(123), int64(123), "123 and 123 should NOT be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// Exactly asserts that two objects are equal in value and type. +// +// assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if !assert.Exactly(t, expected, actual, msgAndArgs...) { - t.FailNow() - } + if !assert.Exactly(t, expected, actual, msgAndArgs...) { + t.FailNow() + } } +// Exactlyf asserts that two objects are equal in value and type. +// +// assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) +func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.Exactlyf(t, expected, actual, msg, args...) { + t.FailNow() + } +} // Fail reports a failure through func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) { - if !assert.Fail(t, failureMessage, msgAndArgs...) { - t.FailNow() - } + if !assert.Fail(t, failureMessage, msgAndArgs...) { + t.FailNow() + } } - // FailNow fails test func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) { - if !assert.FailNow(t, failureMessage, msgAndArgs...) { - t.FailNow() - } + if !assert.FailNow(t, failureMessage, msgAndArgs...) { + t.FailNow() + } } +// FailNowf fails test +func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) { + if !assert.FailNowf(t, failureMessage, msg, args...) { + t.FailNow() + } +} + +// Failf reports a failure through +func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) { + if !assert.Failf(t, failureMessage, msg, args...) { + t.FailNow() + } +} // False asserts that the specified value is false. -// -// assert.False(t, myBool, "myBool should be false") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) { - if !assert.False(t, value, msgAndArgs...) { - t.FailNow() - } + if !assert.False(t, value, msgAndArgs...) { + t.FailNow() + } } +// Falsef asserts that the specified value is false. +// +// assert.Falsef(t, myBool, "error message %s", "formatted") +func Falsef(t TestingT, value bool, msg string, args ...interface{}) { + if !assert.Falsef(t, value, msg, args...) { + t.FailNow() + } +} + +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExists(t TestingT, path string, msgAndArgs ...interface{}) { + if !assert.FileExists(t, path, msgAndArgs...) { + t.FailNow() + } +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { + if !assert.FileExistsf(t, path, msg, args...) { + t.FailNow() + } +} // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. -// +// // assert.HTTPBodyContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) { - if !assert.HTTPBodyContains(t, handler, method, url, values, str) { - t.FailNow() - } +func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + if !assert.HTTPBodyContains(t, handler, method, url, values, str, msgAndArgs...) { + t.FailNow() + } } +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// assert.HTTPBodyContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + if !assert.HTTPBodyContainsf(t, handler, method, url, values, str, msg, args...) { + t.FailNow() + } +} // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. -// +// // assert.HTTPBodyNotContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) { - if !assert.HTTPBodyNotContains(t, handler, method, url, values, str) { - t.FailNow() - } +func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + if !assert.HTTPBodyNotContains(t, handler, method, url, values, str, msgAndArgs...) { + t.FailNow() + } } +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// assert.HTTPBodyNotContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + if !assert.HTTPBodyNotContainsf(t, handler, method, url, values, str, msg, args...) { + t.FailNow() + } +} // HTTPError asserts that a specified handler returns an error status code. -// +// // assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) { - if !assert.HTTPError(t, handler, method, url, values) { - t.FailNow() - } +func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPError(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } } +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPErrorf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} // HTTPRedirect asserts that a specified handler returns a redirect status code. -// +// // assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) { - if !assert.HTTPRedirect(t, handler, method, url, values) { - t.FailNow() - } +func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPRedirect(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } } +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPRedirectf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} // HTTPSuccess asserts that a specified handler returns a success status code. -// +// // assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) -// +// // Returns whether the assertion was successful (true) or not (false). -func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) { - if !assert.HTTPSuccess(t, handler, method, url, values) { - t.FailNow() - } +func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPSuccess(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } } +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPSuccessf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} // Implements asserts that an object is implemented by the specified interface. -// -// assert.Implements(t, (*MyInterface)(nil), new(MyObject), "MyObject") +// +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { - if !assert.Implements(t, interfaceObject, object, msgAndArgs...) { - t.FailNow() - } + if !assert.Implements(t, interfaceObject, object, msgAndArgs...) { + t.FailNow() + } } +// Implementsf asserts that an object is implemented by the specified interface. +// +// assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { + if !assert.Implementsf(t, interfaceObject, object, msg, args...) { + t.FailNow() + } +} // InDelta asserts that the two numerals are within delta of each other. -// +// // assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) -// -// Returns whether the assertion was successful (true) or not (false). func InDelta(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if !assert.InDelta(t, expected, actual, delta, msgAndArgs...) { - t.FailNow() - } + if !assert.InDelta(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } } +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValues(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + if !assert.InDeltaMapValues(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaMapValuesf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} // InDeltaSlice is the same as InDelta, except it compares two slices. func InDeltaSlice(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if !assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) { - t.FailNow() - } + if !assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } } +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaSlicef(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} // InEpsilon asserts that expected and actual have a relative error less than epsilon -// -// Returns whether the assertion was successful (true) or not (false). func InEpsilon(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { - if !assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) { - t.FailNow() - } + if !assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) { + t.FailNow() + } } - -// InEpsilonSlice is the same as InEpsilon, except it compares two slices. -func InEpsilonSlice(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if !assert.InEpsilonSlice(t, expected, actual, delta, msgAndArgs...) { - t.FailNow() - } +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlice(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + if !assert.InEpsilonSlice(t, expected, actual, epsilon, msgAndArgs...) { + t.FailNow() + } } +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + if !assert.InEpsilonSlicef(t, expected, actual, epsilon, msg, args...) { + t.FailNow() + } +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + if !assert.InEpsilonf(t, expected, actual, epsilon, msg, args...) { + t.FailNow() + } +} // IsType asserts that the specified objects are of the same type. func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { - if !assert.IsType(t, expectedType, object, msgAndArgs...) { - t.FailNow() - } + if !assert.IsType(t, expectedType, object, msgAndArgs...) { + t.FailNow() + } } +// IsTypef asserts that the specified objects are of the same type. +func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) { + if !assert.IsTypef(t, expectedType, object, msg, args...) { + t.FailNow() + } +} // JSONEq asserts that two JSON strings are equivalent. -// +// // assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -// -// Returns whether the assertion was successful (true) or not (false). func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) { - if !assert.JSONEq(t, expected, actual, msgAndArgs...) { - t.FailNow() - } + if !assert.JSONEq(t, expected, actual, msgAndArgs...) { + t.FailNow() + } } +// JSONEqf asserts that two JSON strings are equivalent. +// +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) { + if !assert.JSONEqf(t, expected, actual, msg, args...) { + t.FailNow() + } +} // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. -// -// assert.Len(t, mySlice, 3, "The size of slice is not 3") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) { - if !assert.Len(t, object, length, msgAndArgs...) { - t.FailNow() - } + if !assert.Len(t, object, length, msgAndArgs...) { + t.FailNow() + } } +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) { + if !assert.Lenf(t, object, length, msg, args...) { + t.FailNow() + } +} // Nil asserts that the specified object is nil. -// -// assert.Nil(t, err, "err should be nothing") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if !assert.Nil(t, object, msgAndArgs...) { - t.FailNow() - } + if !assert.Nil(t, object, msgAndArgs...) { + t.FailNow() + } } +// Nilf asserts that the specified object is nil. +// +// assert.Nilf(t, err, "error message %s", "formatted") +func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.Nilf(t, object, msg, args...) { + t.FailNow() + } +} // NoError asserts that a function returned no error (i.e. `nil`). -// +// // actualObj, err := SomeFunction() // if assert.NoError(t, err) { -// assert.Equal(t, actualObj, expectedObj) +// assert.Equal(t, expectedObj, actualObj) // } -// -// Returns whether the assertion was successful (true) or not (false). func NoError(t TestingT, err error, msgAndArgs ...interface{}) { - if !assert.NoError(t, err, msgAndArgs...) { - t.FailNow() - } + if !assert.NoError(t, err, msgAndArgs...) { + t.FailNow() + } } +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func NoErrorf(t TestingT, err error, msg string, args ...interface{}) { + if !assert.NoErrorf(t, err, msg, args...) { + t.FailNow() + } +} // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. -// -// assert.NotContains(t, "Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") -// assert.NotContains(t, ["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") -// assert.NotContains(t, {"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { - if !assert.NotContains(t, s, contains, msgAndArgs...) { - t.FailNow() - } + if !assert.NotContains(t, s, contains, msgAndArgs...) { + t.FailNow() + } } +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { + if !assert.NotContainsf(t, s, contains, msg, args...) { + t.FailNow() + } +} // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // if assert.NotEmpty(t, obj) { // assert.Equal(t, "two", obj[1]) // } -// -// Returns whether the assertion was successful (true) or not (false). func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if !assert.NotEmpty(t, object, msgAndArgs...) { - t.FailNow() - } + if !assert.NotEmpty(t, object, msgAndArgs...) { + t.FailNow() + } } +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.NotEmptyf(t, object, msg, args...) { + t.FailNow() + } +} // NotEqual asserts that the specified values are NOT equal. -// -// assert.NotEqual(t, obj1, obj2, "two objects shouldn't be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.NotEqual(t, obj1, obj2) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if !assert.NotEqual(t, expected, actual, msgAndArgs...) { - t.FailNow() - } + if !assert.NotEqual(t, expected, actual, msgAndArgs...) { + t.FailNow() + } } +// NotEqualf asserts that the specified values are NOT equal. +// +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.NotEqualf(t, expected, actual, msg, args...) { + t.FailNow() + } +} // NotNil asserts that the specified object is not nil. -// -// assert.NotNil(t, err, "err should be something") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if !assert.NotNil(t, object, msgAndArgs...) { - t.FailNow() - } + if !assert.NotNil(t, object, msgAndArgs...) { + t.FailNow() + } } +// NotNilf asserts that the specified object is not nil. +// +// assert.NotNilf(t, err, "error message %s", "formatted") +func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.NotNilf(t, object, msg, args...) { + t.FailNow() + } +} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// assert.NotPanics(t, func(){ -// RemainCalm() -// }, "Calling RemainCalm() should NOT panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { - if !assert.NotPanics(t, f, msgAndArgs...) { - t.FailNow() - } + if !assert.NotPanics(t, f, msgAndArgs...) { + t.FailNow() + } } +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.NotPanicsf(t, f, msg, args...) { + t.FailNow() + } +} // NotRegexp asserts that a specified regexp does not match a string. -// +// // assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") // assert.NotRegexp(t, "^start", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { - if !assert.NotRegexp(t, rx, str, msgAndArgs...) { - t.FailNow() - } + if !assert.NotRegexp(t, rx, str, msgAndArgs...) { + t.FailNow() + } } +// NotRegexpf asserts that a specified regexp does not match a string. +// +// assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if !assert.NotRegexpf(t, rx, str, msg, args...) { + t.FailNow() + } +} -// NotZero asserts that i is not the zero value for its type and returns the truth. +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { + if !assert.NotSubset(t, list, subset, msgAndArgs...) { + t.FailNow() + } +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { + if !assert.NotSubsetf(t, list, subset, msg, args...) { + t.FailNow() + } +} + +// NotZero asserts that i is not the zero value for its type. func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) { - if !assert.NotZero(t, i, msgAndArgs...) { - t.FailNow() - } + if !assert.NotZero(t, i, msgAndArgs...) { + t.FailNow() + } } +// NotZerof asserts that i is not the zero value for its type. +func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) { + if !assert.NotZerof(t, i, msg, args...) { + t.FailNow() + } +} // Panics asserts that the code inside the specified PanicTestFunc panics. -// -// assert.Panics(t, func(){ -// GoCrazy() -// }, "Calling GoCrazy() should panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { - if !assert.Panics(t, f, msgAndArgs...) { - t.FailNow() - } + if !assert.Panics(t, f, msgAndArgs...) { + t.FailNow() + } } +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if !assert.PanicsWithValue(t, expected, f, msgAndArgs...) { + t.FailNow() + } +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.PanicsWithValuef(t, expected, f, msg, args...) { + t.FailNow() + } +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.Panicsf(t, f, msg, args...) { + t.FailNow() + } +} // Regexp asserts that a specified regexp matches a string. -// +// // assert.Regexp(t, regexp.MustCompile("start"), "it's starting") // assert.Regexp(t, "start...$", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { - if !assert.Regexp(t, rx, str, msgAndArgs...) { - t.FailNow() - } + if !assert.Regexp(t, rx, str, msgAndArgs...) { + t.FailNow() + } } +// Regexpf asserts that a specified regexp matches a string. +// +// assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if !assert.Regexpf(t, rx, str, msg, args...) { + t.FailNow() + } +} + +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { + if !assert.Subset(t, list, subset, msgAndArgs...) { + t.FailNow() + } +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { + if !assert.Subsetf(t, list, subset, msg, args...) { + t.FailNow() + } +} // True asserts that the specified value is true. -// -// assert.True(t, myBool, "myBool should be true") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) { - if !assert.True(t, value, msgAndArgs...) { - t.FailNow() - } + if !assert.True(t, value, msgAndArgs...) { + t.FailNow() + } } +// Truef asserts that the specified value is true. +// +// assert.Truef(t, myBool, "error message %s", "formatted") +func Truef(t TestingT, value bool, msg string, args ...interface{}) { + if !assert.Truef(t, value, msg, args...) { + t.FailNow() + } +} // WithinDuration asserts that the two times are within duration delta of each other. -// -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") -// -// Returns whether the assertion was successful (true) or not (false). +// +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { - if !assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) { - t.FailNow() - } + if !assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } } +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { + if !assert.WithinDurationf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} -// Zero asserts that i is the zero value for its type and returns the truth. +// Zero asserts that i is the zero value for its type. func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) { - if !assert.Zero(t, i, msgAndArgs...) { - t.FailNow() - } + if !assert.Zero(t, i, msgAndArgs...) { + t.FailNow() + } +} + +// Zerof asserts that i is the zero value for its type. +func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) { + if !assert.Zerof(t, i, msg, args...) { + t.FailNow() + } } diff --git a/vendor/github.com/stretchr/testify/require/require.go.tmpl b/vendor/github.com/stretchr/testify/require/require.go.tmpl index ab1b1e9f..d2c38f6f 100644 --- a/vendor/github.com/stretchr/testify/require/require.go.tmpl +++ b/vendor/github.com/stretchr/testify/require/require.go.tmpl @@ -1,6 +1,6 @@ {{.Comment}} func {{.DocInfo.Name}}(t TestingT, {{.Params}}) { - if !assert.{{.DocInfo.Name}}(t, {{.ForwardedParams}}) { - t.FailNow() - } + if !assert.{{.DocInfo.Name}}(t, {{.ForwardedParams}}) { + t.FailNow() + } } diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go index 58324f10..299ceb95 100644 --- a/vendor/github.com/stretchr/testify/require/require_forward.go +++ b/vendor/github.com/stretchr/testify/require/require_forward.go @@ -1,388 +1,687 @@ /* * CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen * THIS FILE MUST NOT BE EDITED BY HAND -*/ + */ package require import ( - assert "github.com/stretchr/testify/assert" http "net/http" url "net/url" time "time" ) - // Condition uses a Comparison to assert a complex condition. func (a *Assertions) Condition(comp assert.Comparison, msgAndArgs ...interface{}) { Condition(a.t, comp, msgAndArgs...) } +// Conditionf uses a Comparison to assert a complex condition. +func (a *Assertions) Conditionf(comp assert.Comparison, msg string, args ...interface{}) { + Conditionf(a.t, comp, msg, args...) +} // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. -// -// a.Contains("Hello World", "World", "But 'Hello World' does contain 'World'") -// a.Contains(["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") -// a.Contains({"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { Contains(a.t, s, contains, msgAndArgs...) } +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) { + Containsf(a.t, s, contains, msg, args...) +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) { + DirExists(a.t, path, msgAndArgs...) +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) { + DirExistsf(a.t, path, msg, args...) +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2]) +func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) { + ElementsMatch(a.t, listA, listB, msgAndArgs...) +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) { + ElementsMatchf(a.t, listA, listB, msg, args...) +} // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // a.Empty(obj) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { Empty(a.t, object, msgAndArgs...) } +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// a.Emptyf(obj, "error message %s", "formatted") +func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) { + Emptyf(a.t, object, msg, args...) +} // Equal asserts that two objects are equal. -// -// a.Equal(123, 123, "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Equal(123, 123) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { Equal(a.t, expected, actual, msgAndArgs...) } - // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. -// +// // actualObj, err := SomeFunction() -// if assert.Error(t, err, "An error was expected") { -// assert.Equal(t, err, expectedError) -// } -// -// Returns whether the assertion was successful (true) or not (false). +// a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) { EqualError(a.t, theError, errString, msgAndArgs...) } +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) { + EqualErrorf(a.t, theError, errString, msg, args...) +} // EqualValues asserts that two objects are equal or convertable to the same types // and equal. -// -// a.EqualValues(uint32(123), int32(123), "123 and 123 should be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { EqualValues(a.t, expected, actual, msgAndArgs...) } +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// a.EqualValuesf(uint32(123, "error message %s", "formatted"), int32(123)) +func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + EqualValuesf(a.t, expected, actual, msg, args...) +} + +// Equalf asserts that two objects are equal. +// +// a.Equalf(123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + Equalf(a.t, expected, actual, msg, args...) +} // Error asserts that a function returned an error (i.e. not `nil`). -// +// // actualObj, err := SomeFunction() -// if a.Error(err, "An error was expected") { -// assert.Equal(t, err, expectedError) +// if a.Error(err) { +// assert.Equal(t, expectedError, err) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Error(err error, msgAndArgs ...interface{}) { Error(a.t, err, msgAndArgs...) } +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { + Errorf(a.t, err, msg, args...) +} -// Exactly asserts that two objects are equal is value and type. -// -// a.Exactly(int32(123), int64(123), "123 and 123 should NOT be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// Exactly asserts that two objects are equal in value and type. +// +// a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { Exactly(a.t, expected, actual, msgAndArgs...) } +// Exactlyf asserts that two objects are equal in value and type. +// +// a.Exactlyf(int32(123, "error message %s", "formatted"), int64(123)) +func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + Exactlyf(a.t, expected, actual, msg, args...) +} // Fail reports a failure through func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) { Fail(a.t, failureMessage, msgAndArgs...) } - // FailNow fails test func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) { FailNow(a.t, failureMessage, msgAndArgs...) } +// FailNowf fails test +func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) { + FailNowf(a.t, failureMessage, msg, args...) +} + +// Failf reports a failure through +func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) { + Failf(a.t, failureMessage, msg, args...) +} // False asserts that the specified value is false. -// -// a.False(myBool, "myBool should be false") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) { False(a.t, value, msgAndArgs...) } +// Falsef asserts that the specified value is false. +// +// a.Falsef(myBool, "error message %s", "formatted") +func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) { + Falsef(a.t, value, msg, args...) +} + +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) { + FileExists(a.t, path, msgAndArgs...) +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) { + FileExistsf(a.t, path, msg, args...) +} // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. -// +// // a.HTTPBodyContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) { - HTTPBodyContains(a.t, handler, method, url, values, str) +func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...) } +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// a.HTTPBodyContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...) +} // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. -// +// // a.HTTPBodyNotContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) { - HTTPBodyNotContains(a.t, handler, method, url, values, str) +func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...) } +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// a.HTTPBodyNotContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...) +} // HTTPError asserts that a specified handler returns an error status code. -// +// // a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values) { - HTTPError(a.t, handler, method, url, values) +func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPError(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPErrorf(a.t, handler, method, url, values, msg, args...) +} // HTTPRedirect asserts that a specified handler returns a redirect status code. -// +// // a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values) { - HTTPRedirect(a.t, handler, method, url, values) +func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPRedirectf(a.t, handler, method, url, values, msg, args...) +} // HTTPSuccess asserts that a specified handler returns a success status code. -// +// // a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) -// +// // Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values) { - HTTPSuccess(a.t, handler, method, url, values) +func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...) } +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPSuccessf(a.t, handler, method, url, values, msg, args...) +} // Implements asserts that an object is implemented by the specified interface. -// -// a.Implements((*MyInterface)(nil), new(MyObject), "MyObject") +// +// a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { Implements(a.t, interfaceObject, object, msgAndArgs...) } +// Implementsf asserts that an object is implemented by the specified interface. +// +// a.Implementsf((*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { + Implementsf(a.t, interfaceObject, object, msg, args...) +} // InDelta asserts that the two numerals are within delta of each other. -// +// // a.InDelta(math.Pi, (22 / 7.0), 0.01) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { InDelta(a.t, expected, actual, delta, msgAndArgs...) } +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...) +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...) +} // InDeltaSlice is the same as InDelta, except it compares two slices. func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) } +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaSlicef(a.t, expected, actual, delta, msg, args...) +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaf(a.t, expected, actual, delta, msg, args...) +} // InEpsilon asserts that expected and actual have a relative error less than epsilon -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) } - -// InEpsilonSlice is the same as InEpsilon, except it compares two slices. -func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - InEpsilonSlice(a.t, expected, actual, delta, msgAndArgs...) +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...) } +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...) +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + InEpsilonf(a.t, expected, actual, epsilon, msg, args...) +} // IsType asserts that the specified objects are of the same type. func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { IsType(a.t, expectedType, object, msgAndArgs...) } +// IsTypef asserts that the specified objects are of the same type. +func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) { + IsTypef(a.t, expectedType, object, msg, args...) +} // JSONEq asserts that two JSON strings are equivalent. -// +// // a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) { JSONEq(a.t, expected, actual, msgAndArgs...) } +// JSONEqf asserts that two JSON strings are equivalent. +// +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) { + JSONEqf(a.t, expected, actual, msg, args...) +} // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. -// -// a.Len(mySlice, 3, "The size of slice is not 3") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) { Len(a.t, object, length, msgAndArgs...) } +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// a.Lenf(mySlice, 3, "error message %s", "formatted") +func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) { + Lenf(a.t, object, length, msg, args...) +} // Nil asserts that the specified object is nil. -// -// a.Nil(err, "err should be nothing") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) { Nil(a.t, object, msgAndArgs...) } +// Nilf asserts that the specified object is nil. +// +// a.Nilf(err, "error message %s", "formatted") +func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) { + Nilf(a.t, object, msg, args...) +} // NoError asserts that a function returned no error (i.e. `nil`). -// +// // actualObj, err := SomeFunction() // if a.NoError(err) { -// assert.Equal(t, actualObj, expectedObj) +// assert.Equal(t, expectedObj, actualObj) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) { NoError(a.t, err, msgAndArgs...) } +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) { + NoErrorf(a.t, err, msg, args...) +} // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. -// -// a.NotContains("Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") -// a.NotContains(["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") -// a.NotContains({"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { NotContains(a.t, s, contains, msgAndArgs...) } +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) { + NotContainsf(a.t, s, contains, msg, args...) +} // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. -// +// // if a.NotEmpty(obj) { // assert.Equal(t, "two", obj[1]) // } -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { NotEmpty(a.t, object, msgAndArgs...) } +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) { + NotEmptyf(a.t, object, msg, args...) +} // NotEqual asserts that the specified values are NOT equal. -// -// a.NotEqual(obj1, obj2, "two objects shouldn't be equal") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotEqual(obj1, obj2) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { NotEqual(a.t, expected, actual, msgAndArgs...) } +// NotEqualf asserts that the specified values are NOT equal. +// +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + NotEqualf(a.t, expected, actual, msg, args...) +} // NotNil asserts that the specified object is not nil. -// -// a.NotNil(err, "err should be something") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) { NotNil(a.t, object, msgAndArgs...) } +// NotNilf asserts that the specified object is not nil. +// +// a.NotNilf(err, "error message %s", "formatted") +func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) { + NotNilf(a.t, object, msg, args...) +} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// a.NotPanics(func(){ -// RemainCalm() -// }, "Calling RemainCalm() should NOT panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { NotPanics(a.t, f, msgAndArgs...) } +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +func (a *Assertions) NotPanicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { + NotPanicsf(a.t, f, msg, args...) +} // NotRegexp asserts that a specified regexp does not match a string. -// +// // a.NotRegexp(regexp.MustCompile("starts"), "it's starting") // a.NotRegexp("^start", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { NotRegexp(a.t, rx, str, msgAndArgs...) } +// NotRegexpf asserts that a specified regexp does not match a string. +// +// a.NotRegexpf(regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { + NotRegexpf(a.t, rx, str, msg, args...) +} -// NotZero asserts that i is not the zero value for its type and returns the truth. +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { + NotSubset(a.t, list, subset, msgAndArgs...) +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { + NotSubsetf(a.t, list, subset, msg, args...) +} + +// NotZero asserts that i is not the zero value for its type. func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) { NotZero(a.t, i, msgAndArgs...) } +// NotZerof asserts that i is not the zero value for its type. +func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) { + NotZerof(a.t, i, msg, args...) +} // Panics asserts that the code inside the specified PanicTestFunc panics. -// -// a.Panics(func(){ -// GoCrazy() -// }, "Calling GoCrazy() should panic") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { Panics(a.t, f, msgAndArgs...) } +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + PanicsWithValue(a.t, expected, f, msgAndArgs...) +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) PanicsWithValuef(expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { + PanicsWithValuef(a.t, expected, f, msg, args...) +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) Panicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { + Panicsf(a.t, f, msg, args...) +} // Regexp asserts that a specified regexp matches a string. -// +// // a.Regexp(regexp.MustCompile("start"), "it's starting") // a.Regexp("start...$", "it's not starting") -// -// Returns whether the assertion was successful (true) or not (false). func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { Regexp(a.t, rx, str, msgAndArgs...) } +// Regexpf asserts that a specified regexp matches a string. +// +// a.Regexpf(regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { + Regexpf(a.t, rx, str, msg, args...) +} + +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { + Subset(a.t, list, subset, msgAndArgs...) +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { + Subsetf(a.t, list, subset, msg, args...) +} // True asserts that the specified value is true. -// -// a.True(myBool, "myBool should be true") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) { True(a.t, value, msgAndArgs...) } +// Truef asserts that the specified value is true. +// +// a.Truef(myBool, "error message %s", "formatted") +func (a *Assertions) Truef(value bool, msg string, args ...interface{}) { + Truef(a.t, value, msg, args...) +} // WithinDuration asserts that the two times are within duration delta of each other. -// -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") -// -// Returns whether the assertion was successful (true) or not (false). +// +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { WithinDuration(a.t, expected, actual, delta, msgAndArgs...) } +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { + WithinDurationf(a.t, expected, actual, delta, msg, args...) +} -// Zero asserts that i is the zero value for its type and returns the truth. +// Zero asserts that i is the zero value for its type. func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) { Zero(a.t, i, msgAndArgs...) } + +// Zerof asserts that i is the zero value for its type. +func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) { + Zerof(a.t, i, msg, args...) +} diff --git a/vendor/github.com/stretchr/testify/require/requirements.go b/vendor/github.com/stretchr/testify/require/requirements.go index 41147562..e404f016 100644 --- a/vendor/github.com/stretchr/testify/require/requirements.go +++ b/vendor/github.com/stretchr/testify/require/requirements.go @@ -6,4 +6,4 @@ type TestingT interface { FailNow() } -//go:generate go run ../_codegen/main.go -output-package=require -template=require.go.tmpl +//go:generate go run ../_codegen/main.go -output-package=require -template=require.go.tmpl -include-format-funcs diff --git a/vendor/github.com/stretchr/testify/suite/interfaces.go b/vendor/github.com/stretchr/testify/suite/interfaces.go index 20969472..b37cb040 100644 --- a/vendor/github.com/stretchr/testify/suite/interfaces.go +++ b/vendor/github.com/stretchr/testify/suite/interfaces.go @@ -32,3 +32,15 @@ type TearDownAllSuite interface { type TearDownTestSuite interface { TearDownTest() } + +// BeforeTest has a function to be executed right before the test +// starts and receives the suite and test names as input +type BeforeTest interface { + BeforeTest(suiteName, testName string) +} + +// AfterTest has a function to be executed right after the test +// finishes and receives the suite and test names as input +type AfterTest interface { + AfterTest(suiteName, testName string) +} diff --git a/vendor/github.com/stretchr/testify/suite/suite.go b/vendor/github.com/stretchr/testify/suite/suite.go index db741300..e20afbc2 100644 --- a/vendor/github.com/stretchr/testify/suite/suite.go +++ b/vendor/github.com/stretchr/testify/suite/suite.go @@ -12,6 +12,7 @@ import ( "github.com/stretchr/testify/require" ) +var allTestsFilter = func(_, _ string) (bool, error) { return true, nil } var matchMethod = flag.String("testify.m", "", "regular expression to select tests of the testify suite to run") // Suite is a basic testing suite with methods for storing and @@ -86,7 +87,13 @@ func Run(t *testing.T, suite TestingSuite) { if setupTestSuite, ok := suite.(SetupTestSuite); ok { setupTestSuite.SetupTest() } + if beforeTestSuite, ok := suite.(BeforeTest); ok { + beforeTestSuite.BeforeTest(methodFinder.Elem().Name(), method.Name) + } defer func() { + if afterTestSuite, ok := suite.(AfterTest); ok { + afterTestSuite.AfterTest(methodFinder.Elem().Name(), method.Name) + } if tearDownTestSuite, ok := suite.(TearDownTestSuite); ok { tearDownTestSuite.TearDownTest() } @@ -98,10 +105,20 @@ func Run(t *testing.T, suite TestingSuite) { tests = append(tests, test) } } + runTests(t, tests) +} - if !testing.RunTests(func(_, _ string) (bool, error) { return true, nil }, - tests) { - t.Fail() +func runTests(t testing.TB, tests []testing.InternalTest) { + r, ok := t.(runner) + if !ok { // backwards compatibility with Go 1.6 and below + if !testing.RunTests(allTestsFilter, tests) { + t.Fail() + } + return + } + + for _, test := range tests { + r.Run(test.Name, test.F) } } @@ -113,3 +130,7 @@ func methodFilter(name string) (bool, error) { } return regexp.MatchString(*matchMethod, name) } + +type runner interface { + Run(name string, f func(t *testing.T)) bool +} diff --git a/vendor/github.com/stretchr/testify/suite/suite_test.go b/vendor/github.com/stretchr/testify/suite/suite_test.go index c7c4e88f..b75fa4ac 100644 --- a/vendor/github.com/stretchr/testify/suite/suite_test.go +++ b/vendor/github.com/stretchr/testify/suite/suite_test.go @@ -5,8 +5,10 @@ import ( "io/ioutil" "os" "testing" + "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) // SuiteRequireTwice is intended to test the usage of suite.Require in two @@ -18,7 +20,7 @@ type SuiteRequireTwice struct{ Suite } // A regression would result on these tests panicking rather than failing. func TestSuiteRequireTwice(t *testing.T) { ok := testing.RunTests( - func(_, _ string) (bool, error) { return true, nil }, + allTestsFilter, []testing.InternalTest{{ Name: "TestSuiteRequireTwice", F: func(t *testing.T) { @@ -58,6 +60,15 @@ type SuiteTester struct { TestOneRunCount int TestTwoRunCount int NonTestMethodRunCount int + + SuiteNameBefore []string + TestNameBefore []string + + SuiteNameAfter []string + TestNameAfter []string + + TimeBefore []time.Time + TimeAfter []time.Time } type SuiteSkipTester struct { @@ -75,6 +86,18 @@ func (suite *SuiteTester) SetupSuite() { suite.SetupSuiteRunCount++ } +func (suite *SuiteTester) BeforeTest(suiteName, testName string) { + suite.SuiteNameBefore = append(suite.SuiteNameBefore, suiteName) + suite.TestNameBefore = append(suite.TestNameBefore, testName) + suite.TimeBefore = append(suite.TimeBefore, time.Now()) +} + +func (suite *SuiteTester) AfterTest(suiteName, testName string) { + suite.SuiteNameAfter = append(suite.SuiteNameAfter, suiteName) + suite.TestNameAfter = append(suite.TestNameAfter, testName) + suite.TimeAfter = append(suite.TimeAfter, time.Now()) +} + func (suite *SuiteSkipTester) SetupSuite() { suite.SetupSuiteRunCount++ suite.T().Skip() @@ -145,6 +168,35 @@ func TestRunSuite(t *testing.T) { assert.Equal(t, suiteTester.SetupSuiteRunCount, 1) assert.Equal(t, suiteTester.TearDownSuiteRunCount, 1) + assert.Equal(t, len(suiteTester.SuiteNameAfter), 3) + assert.Equal(t, len(suiteTester.SuiteNameBefore), 3) + assert.Equal(t, len(suiteTester.TestNameAfter), 3) + assert.Equal(t, len(suiteTester.TestNameBefore), 3) + + assert.Contains(t, suiteTester.TestNameAfter, "TestOne") + assert.Contains(t, suiteTester.TestNameAfter, "TestTwo") + assert.Contains(t, suiteTester.TestNameAfter, "TestSkip") + + assert.Contains(t, suiteTester.TestNameBefore, "TestOne") + assert.Contains(t, suiteTester.TestNameBefore, "TestTwo") + assert.Contains(t, suiteTester.TestNameBefore, "TestSkip") + + for _, suiteName := range suiteTester.SuiteNameAfter { + assert.Equal(t, "SuiteTester", suiteName) + } + + for _, suiteName := range suiteTester.SuiteNameBefore { + assert.Equal(t, "SuiteTester", suiteName) + } + + for _, when := range suiteTester.TimeAfter { + assert.False(t, when.IsZero()) + } + + for _, when := range suiteTester.TimeBefore { + assert.False(t, when.IsZero()) + } + // There are three test methods (TestOne, TestTwo, and TestSkip), so // the SetupTest and TearDownTest methods (which should be run once for // each test) should have been run three times. @@ -216,16 +268,19 @@ func (sc *StdoutCapture) StopCapture() (string, error) { } func TestSuiteLogging(t *testing.T) { - testT := testing.T{} - suiteLoggingTester := new(SuiteLoggingTester) - capture := StdoutCapture{} + internalTest := testing.InternalTest{ + Name: "SomeTest", + F: func(subT *testing.T) { + Run(subT, suiteLoggingTester) + }, + } capture.StartCapture() - Run(&testT, suiteLoggingTester) + testing.RunTests(allTestsFilter, []testing.InternalTest{internalTest}) output, err := capture.StopCapture() - - assert.Nil(t, err, "Got an error trying to capture stdout!") + require.NoError(t, err, "Got an error trying to capture stdout and stderr!") + require.NotEmpty(t, output, "output content must not be empty") // Failed tests' output is always printed assert.Contains(t, output, "TESTLOGFAIL") diff --git a/vendor/golang.org/x/net/CONTRIBUTING.md b/vendor/golang.org/x/net/CONTRIBUTING.md index 88dff59b..d0485e88 100644 --- a/vendor/golang.org/x/net/CONTRIBUTING.md +++ b/vendor/golang.org/x/net/CONTRIBUTING.md @@ -4,16 +4,15 @@ Go is an open source project. It is the work of hundreds of contributors. We appreciate your help! - ## Filing issues When [filing an issue](https://golang.org/issue/new), make sure to answer these five questions: -1. What version of Go are you using (`go version`)? -2. What operating system and processor architecture are you using? -3. What did you do? -4. What did you expect to see? -5. What did you see instead? +1. What version of Go are you using (`go version`)? +2. What operating system and processor architecture are you using? +3. What did you do? +4. What did you expect to see? +5. What did you see instead? General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker. The gophers there will answer or ask you to file an issue if you've tripped over a bug. @@ -23,9 +22,5 @@ The gophers there will answer or ask you to file an issue if you've tripped over Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html) before sending patches. -**We do not accept GitHub pull requests** -(we use [Gerrit](https://code.google.com/p/gerrit/) instead for code review). - Unless otherwise noted, the Go source files are distributed under the BSD-style license found in the LICENSE file. - diff --git a/vendor/golang.org/x/net/README b/vendor/golang.org/x/net/README deleted file mode 100644 index 6b13d8e5..00000000 --- a/vendor/golang.org/x/net/README +++ /dev/null @@ -1,3 +0,0 @@ -This repository holds supplementary Go networking libraries. - -To submit changes to this repository, see http://golang.org/doc/contribute.html. diff --git a/vendor/golang.org/x/net/README.md b/vendor/golang.org/x/net/README.md new file mode 100644 index 00000000..00a9b6eb --- /dev/null +++ b/vendor/golang.org/x/net/README.md @@ -0,0 +1,16 @@ +# Go Networking + +This repository holds supplementary Go networking libraries. + +## Download/Install + +The easiest way to install is to run `go get -u golang.org/x/net`. You can +also manually git clone the repository to `$GOPATH/src/golang.org/x/net`. + +## Report Issues / Send Patches + +This repository uses Gerrit for code changes. To learn how to submit +changes to this repository, see https://golang.org/doc/contribute.html. +The main issue tracker for the net repository is located at +https://github.com/golang/go/issues. Prefix your issue with "x/net:" in the +subject line, so it is easy to find. diff --git a/vendor/golang.org/x/net/bpf/instructions.go b/vendor/golang.org/x/net/bpf/instructions.go index 3b4fd089..f9dc0e8e 100644 --- a/vendor/golang.org/x/net/bpf/instructions.go +++ b/vendor/golang.org/x/net/bpf/instructions.go @@ -198,7 +198,7 @@ func (a LoadConstant) Assemble() (RawInstruction, error) { return assembleLoad(a.Dst, 4, opAddrModeImmediate, a.Val) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadConstant) String() string { switch a.Dst { case RegA: @@ -224,7 +224,7 @@ func (a LoadScratch) Assemble() (RawInstruction, error) { return assembleLoad(a.Dst, 4, opAddrModeScratch, uint32(a.N)) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadScratch) String() string { switch a.Dst { case RegA: @@ -248,7 +248,7 @@ func (a LoadAbsolute) Assemble() (RawInstruction, error) { return assembleLoad(RegA, a.Size, opAddrModeAbsolute, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadAbsolute) String() string { switch a.Size { case 1: // byte @@ -277,7 +277,7 @@ func (a LoadIndirect) Assemble() (RawInstruction, error) { return assembleLoad(RegA, a.Size, opAddrModeIndirect, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadIndirect) String() string { switch a.Size { case 1: // byte @@ -306,7 +306,7 @@ func (a LoadMemShift) Assemble() (RawInstruction, error) { return assembleLoad(RegX, 1, opAddrModeMemShift, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadMemShift) String() string { return fmt.Sprintf("ldx 4*([%d]&0xf)", a.Off) } @@ -325,7 +325,7 @@ func (a LoadExtension) Assemble() (RawInstruction, error) { return assembleLoad(RegA, 4, opAddrModeAbsolute, uint32(extOffset+a.Num)) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadExtension) String() string { switch a.Num { case ExtLen: @@ -392,7 +392,7 @@ func (a StoreScratch) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a StoreScratch) String() string { switch a.Src { case RegA: @@ -418,7 +418,7 @@ func (a ALUOpConstant) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a ALUOpConstant) String() string { switch a.Op { case ALUOpAdd: @@ -458,7 +458,7 @@ func (a ALUOpX) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a ALUOpX) String() string { switch a.Op { case ALUOpAdd: @@ -496,7 +496,7 @@ func (a NegateA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a NegateA) String() string { return fmt.Sprintf("neg") } @@ -514,7 +514,7 @@ func (a Jump) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a Jump) String() string { return fmt.Sprintf("ja %d", a.Skip) } @@ -566,7 +566,7 @@ func (a JumpIf) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a JumpIf) String() string { switch a.Cond { // K == A @@ -621,7 +621,7 @@ func (a RetA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a RetA) String() string { return fmt.Sprintf("ret a") } @@ -639,7 +639,7 @@ func (a RetConstant) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a RetConstant) String() string { return fmt.Sprintf("ret #%d", a.Val) } @@ -654,7 +654,7 @@ func (a TXA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a TXA) String() string { return fmt.Sprintf("txa") } @@ -669,7 +669,7 @@ func (a TAX) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a TAX) String() string { return fmt.Sprintf("tax") } diff --git a/vendor/golang.org/x/net/bpf/setter.go b/vendor/golang.org/x/net/bpf/setter.go new file mode 100644 index 00000000..43e35f0a --- /dev/null +++ b/vendor/golang.org/x/net/bpf/setter.go @@ -0,0 +1,10 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bpf + +// A Setter is a type which can attach a compiled BPF filter to itself. +type Setter interface { + SetBPF(filter []RawInstruction) error +} diff --git a/vendor/golang.org/x/net/context/context.go b/vendor/golang.org/x/net/context/context.go index f143ed6a..a3c021d3 100644 --- a/vendor/golang.org/x/net/context/context.go +++ b/vendor/golang.org/x/net/context/context.go @@ -5,6 +5,8 @@ // Package context defines the Context type, which carries deadlines, // cancelation signals, and other request-scoped values across API boundaries // and between processes. +// As of Go 1.7 this package is available in the standard library under the +// name context. https://golang.org/pkg/context. // // Incoming requests to a server should create a Context, and outgoing calls to // servers should accept a Context. The chain of function calls between must @@ -36,103 +38,6 @@ // Contexts. package context // import "golang.org/x/net/context" -import "time" - -// A Context carries a deadline, a cancelation signal, and other values across -// API boundaries. -// -// Context's methods may be called by multiple goroutines simultaneously. -type Context interface { - // Deadline returns the time when work done on behalf of this context - // should be canceled. Deadline returns ok==false when no deadline is - // set. Successive calls to Deadline return the same results. - Deadline() (deadline time.Time, ok bool) - - // Done returns a channel that's closed when work done on behalf of this - // context should be canceled. Done may return nil if this context can - // never be canceled. Successive calls to Done return the same value. - // - // WithCancel arranges for Done to be closed when cancel is called; - // WithDeadline arranges for Done to be closed when the deadline - // expires; WithTimeout arranges for Done to be closed when the timeout - // elapses. - // - // Done is provided for use in select statements: - // - // // Stream generates values with DoSomething and sends them to out - // // until DoSomething returns an error or ctx.Done is closed. - // func Stream(ctx context.Context, out chan<- Value) error { - // for { - // v, err := DoSomething(ctx) - // if err != nil { - // return err - // } - // select { - // case <-ctx.Done(): - // return ctx.Err() - // case out <- v: - // } - // } - // } - // - // See http://blog.golang.org/pipelines for more examples of how to use - // a Done channel for cancelation. - Done() <-chan struct{} - - // Err returns a non-nil error value after Done is closed. Err returns - // Canceled if the context was canceled or DeadlineExceeded if the - // context's deadline passed. No other values for Err are defined. - // After Done is closed, successive calls to Err return the same value. - Err() error - - // Value returns the value associated with this context for key, or nil - // if no value is associated with key. Successive calls to Value with - // the same key returns the same result. - // - // Use context values only for request-scoped data that transits - // processes and API boundaries, not for passing optional parameters to - // functions. - // - // A key identifies a specific value in a Context. Functions that wish - // to store values in Context typically allocate a key in a global - // variable then use that key as the argument to context.WithValue and - // Context.Value. A key can be any type that supports equality; - // packages should define keys as an unexported type to avoid - // collisions. - // - // Packages that define a Context key should provide type-safe accessors - // for the values stores using that key: - // - // // Package user defines a User type that's stored in Contexts. - // package user - // - // import "golang.org/x/net/context" - // - // // User is the type of value stored in the Contexts. - // type User struct {...} - // - // // key is an unexported type for keys defined in this package. - // // This prevents collisions with keys defined in other packages. - // type key int - // - // // userKey is the key for user.User values in Contexts. It is - // // unexported; clients use user.NewContext and user.FromContext - // // instead of using this key directly. - // var userKey key = 0 - // - // // NewContext returns a new Context that carries value u. - // func NewContext(ctx context.Context, u *User) context.Context { - // return context.WithValue(ctx, userKey, u) - // } - // - // // FromContext returns the User value stored in ctx, if any. - // func FromContext(ctx context.Context) (*User, bool) { - // u, ok := ctx.Value(userKey).(*User) - // return u, ok - // } - Value(key interface{}) interface{} -} - // Background returns a non-nil, empty Context. It is never canceled, has no // values, and has no deadline. It is typically used by the main function, // initialization, and tests, and as the top-level Context for incoming @@ -149,8 +54,3 @@ func Background() Context { func TODO() Context { return todo } - -// A CancelFunc tells an operation to abandon its work. -// A CancelFunc does not wait for the work to stop. -// After the first call, subsequent calls to a CancelFunc do nothing. -type CancelFunc func() diff --git a/vendor/golang.org/x/net/context/go19.go b/vendor/golang.org/x/net/context/go19.go new file mode 100644 index 00000000..d88bd1db --- /dev/null +++ b/vendor/golang.org/x/net/context/go19.go @@ -0,0 +1,20 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package context + +import "context" // standard library's context, as of Go 1.7 + +// A Context carries a deadline, a cancelation signal, and other values across +// API boundaries. +// +// Context's methods may be called by multiple goroutines simultaneously. +type Context = context.Context + +// A CancelFunc tells an operation to abandon its work. +// A CancelFunc does not wait for the work to stop. +// After the first call, subsequent calls to a CancelFunc do nothing. +type CancelFunc = context.CancelFunc diff --git a/vendor/golang.org/x/net/context/pre_go19.go b/vendor/golang.org/x/net/context/pre_go19.go new file mode 100644 index 00000000..b105f80b --- /dev/null +++ b/vendor/golang.org/x/net/context/pre_go19.go @@ -0,0 +1,109 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.9 + +package context + +import "time" + +// A Context carries a deadline, a cancelation signal, and other values across +// API boundaries. +// +// Context's methods may be called by multiple goroutines simultaneously. +type Context interface { + // Deadline returns the time when work done on behalf of this context + // should be canceled. Deadline returns ok==false when no deadline is + // set. Successive calls to Deadline return the same results. + Deadline() (deadline time.Time, ok bool) + + // Done returns a channel that's closed when work done on behalf of this + // context should be canceled. Done may return nil if this context can + // never be canceled. Successive calls to Done return the same value. + // + // WithCancel arranges for Done to be closed when cancel is called; + // WithDeadline arranges for Done to be closed when the deadline + // expires; WithTimeout arranges for Done to be closed when the timeout + // elapses. + // + // Done is provided for use in select statements: + // + // // Stream generates values with DoSomething and sends them to out + // // until DoSomething returns an error or ctx.Done is closed. + // func Stream(ctx context.Context, out chan<- Value) error { + // for { + // v, err := DoSomething(ctx) + // if err != nil { + // return err + // } + // select { + // case <-ctx.Done(): + // return ctx.Err() + // case out <- v: + // } + // } + // } + // + // See http://blog.golang.org/pipelines for more examples of how to use + // a Done channel for cancelation. + Done() <-chan struct{} + + // Err returns a non-nil error value after Done is closed. Err returns + // Canceled if the context was canceled or DeadlineExceeded if the + // context's deadline passed. No other values for Err are defined. + // After Done is closed, successive calls to Err return the same value. + Err() error + + // Value returns the value associated with this context for key, or nil + // if no value is associated with key. Successive calls to Value with + // the same key returns the same result. + // + // Use context values only for request-scoped data that transits + // processes and API boundaries, not for passing optional parameters to + // functions. + // + // A key identifies a specific value in a Context. Functions that wish + // to store values in Context typically allocate a key in a global + // variable then use that key as the argument to context.WithValue and + // Context.Value. A key can be any type that supports equality; + // packages should define keys as an unexported type to avoid + // collisions. + // + // Packages that define a Context key should provide type-safe accessors + // for the values stores using that key: + // + // // Package user defines a User type that's stored in Contexts. + // package user + // + // import "golang.org/x/net/context" + // + // // User is the type of value stored in the Contexts. + // type User struct {...} + // + // // key is an unexported type for keys defined in this package. + // // This prevents collisions with keys defined in other packages. + // type key int + // + // // userKey is the key for user.User values in Contexts. It is + // // unexported; clients use user.NewContext and user.FromContext + // // instead of using this key directly. + // var userKey key = 0 + // + // // NewContext returns a new Context that carries value u. + // func NewContext(ctx context.Context, u *User) context.Context { + // return context.WithValue(ctx, userKey, u) + // } + // + // // FromContext returns the User value stored in ctx, if any. + // func FromContext(ctx context.Context) (*User, bool) { + // u, ok := ctx.Value(userKey).(*User) + // return u, ok + // } + Value(key interface{}) interface{} +} + +// A CancelFunc tells an operation to abandon its work. +// A CancelFunc does not wait for the work to stop. +// After the first call, subsequent calls to a CancelFunc do nothing. +type CancelFunc func() diff --git a/vendor/golang.org/x/net/context/withtimeout_test.go b/vendor/golang.org/x/net/context/withtimeout_test.go index a6754dc3..e6f56691 100644 --- a/vendor/golang.org/x/net/context/withtimeout_test.go +++ b/vendor/golang.org/x/net/context/withtimeout_test.go @@ -11,16 +11,21 @@ import ( "golang.org/x/net/context" ) +// This example passes a context with a timeout to tell a blocking function that +// it should abandon its work after the timeout elapses. func ExampleWithTimeout() { // Pass a context with a timeout to tell a blocking function that it // should abandon its work after the timeout elapses. - ctx, _ := context.WithTimeout(context.Background(), 100*time.Millisecond) + ctx, cancel := context.WithTimeout(context.Background(), 50*time.Millisecond) + defer cancel() + select { - case <-time.After(200 * time.Millisecond): + case <-time.After(1 * time.Second): fmt.Println("overslept") case <-ctx.Done(): fmt.Println(ctx.Err()) // prints "context deadline exceeded" } + // Output: // context deadline exceeded } diff --git a/vendor/golang.org/x/net/dns/dnsmessage/example_test.go b/vendor/golang.org/x/net/dns/dnsmessage/example_test.go new file mode 100644 index 00000000..8600a6bc --- /dev/null +++ b/vendor/golang.org/x/net/dns/dnsmessage/example_test.go @@ -0,0 +1,132 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dnsmessage_test + +import ( + "fmt" + "net" + "strings" + + "golang.org/x/net/dns/dnsmessage" +) + +func mustNewName(name string) dnsmessage.Name { + n, err := dnsmessage.NewName(name) + if err != nil { + panic(err) + } + return n +} + +func ExampleParser() { + msg := dnsmessage.Message{ + Header: dnsmessage.Header{Response: true, Authoritative: true}, + Questions: []dnsmessage.Question{ + { + Name: mustNewName("foo.bar.example.com."), + Type: dnsmessage.TypeA, + Class: dnsmessage.ClassINET, + }, + { + Name: mustNewName("bar.example.com."), + Type: dnsmessage.TypeA, + Class: dnsmessage.ClassINET, + }, + }, + Answers: []dnsmessage.Resource{ + { + Header: dnsmessage.ResourceHeader{ + Name: mustNewName("foo.bar.example.com."), + Type: dnsmessage.TypeA, + Class: dnsmessage.ClassINET, + }, + Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 1}}, + }, + { + Header: dnsmessage.ResourceHeader{ + Name: mustNewName("bar.example.com."), + Type: dnsmessage.TypeA, + Class: dnsmessage.ClassINET, + }, + Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 2}}, + }, + }, + } + + buf, err := msg.Pack() + if err != nil { + panic(err) + } + + wantName := "bar.example.com." + + var p dnsmessage.Parser + if _, err := p.Start(buf); err != nil { + panic(err) + } + + for { + q, err := p.Question() + if err == dnsmessage.ErrSectionDone { + break + } + if err != nil { + panic(err) + } + + if q.Name.String() != wantName { + continue + } + + fmt.Println("Found question for name", wantName) + if err := p.SkipAllQuestions(); err != nil { + panic(err) + } + break + } + + var gotIPs []net.IP + for { + h, err := p.AnswerHeader() + if err == dnsmessage.ErrSectionDone { + break + } + if err != nil { + panic(err) + } + + if (h.Type != dnsmessage.TypeA && h.Type != dnsmessage.TypeAAAA) || h.Class != dnsmessage.ClassINET { + continue + } + + if !strings.EqualFold(h.Name.String(), wantName) { + if err := p.SkipAnswer(); err != nil { + panic(err) + } + continue + } + + switch h.Type { + case dnsmessage.TypeA: + r, err := p.AResource() + if err != nil { + panic(err) + } + gotIPs = append(gotIPs, r.A[:]) + case dnsmessage.TypeAAAA: + r, err := p.AAAAResource() + if err != nil { + panic(err) + } + gotIPs = append(gotIPs, r.AAAA[:]) + } + } + + fmt.Printf("Found A/AAAA records for name %s: %v\n", wantName, gotIPs) + + // Output: + // Found question for name bar.example.com. + // Found A/AAAA records for name bar.example.com.: [127.0.0.2] +} diff --git a/vendor/golang.org/x/net/dns/dnsmessage/message.go b/vendor/golang.org/x/net/dns/dnsmessage/message.go index da43b0ba..38f81774 100644 --- a/vendor/golang.org/x/net/dns/dnsmessage/message.go +++ b/vendor/golang.org/x/net/dns/dnsmessage/message.go @@ -5,6 +5,9 @@ // Package dnsmessage provides a mostly RFC 1035 compliant implementation of // DNS message packing and unpacking. // +// The package also supports messages with Extension Mechanisms for DNS +// (EDNS(0)) as defined in RFC 6891. +// // This implementation is designed to minimize heap allocations and avoid // unnecessary packing and unpacking as much as possible. package dnsmessage @@ -13,7 +16,7 @@ import ( "errors" ) -// Packet formats +// Message formats // A Type is a type of DNS request and response. type Type uint16 @@ -39,6 +42,7 @@ const ( TypeTXT Type = 16 TypeAAAA Type = 28 TypeSRV Type = 33 + TypeOPT Type = 41 // Question.Type TypeWKS Type = 11 @@ -68,18 +72,19 @@ const ( var ( // ErrNotStarted indicates that the prerequisite information isn't // available yet because the previous records haven't been appropriately - // parsed or skipped. - ErrNotStarted = errors.New("parsing of this type isn't available yet") + // parsed, skipped or finished. + ErrNotStarted = errors.New("parsing/packing of this type isn't available yet") // ErrSectionDone indicated that all records in the section have been - // parsed. - ErrSectionDone = errors.New("parsing of this section has completed") + // parsed or finished. + ErrSectionDone = errors.New("parsing/packing of this section has completed") errBaseLen = errors.New("insufficient data for base length type") errCalcLen = errors.New("insufficient data for calculated length type") errReserved = errors.New("segment prefix is reserved") errTooManyPtr = errors.New("too many pointers (>10)") errInvalidPtr = errors.New("invalid pointer") + errNilResouceBody = errors.New("nil resource body") errResourceLen = errors.New("insufficient data for resource body length") errSegTooLong = errors.New("segment length too long") errZeroSegLen = errors.New("zero length segment") @@ -88,6 +93,30 @@ var ( errTooManyAnswers = errors.New("too many Answers to pack (>65535)") errTooManyAuthorities = errors.New("too many Authorities to pack (>65535)") errTooManyAdditionals = errors.New("too many Additionals to pack (>65535)") + errNonCanonicalName = errors.New("name is not in canonical format (it must end with a .)") + errStringTooLong = errors.New("character string exceeds maximum length (255)") + errCompressedSRV = errors.New("compressed name in SRV resource data") +) + +// Internal constants. +const ( + // packStartingCap is the default initial buffer size allocated during + // packing. + // + // The starting capacity doesn't matter too much, but most DNS responses + // Will be <= 512 bytes as it is the limit for DNS over UDP. + packStartingCap = 512 + + // uint16Len is the length (in bytes) of a uint16. + uint16Len = 2 + + // uint32Len is the length (in bytes) of a uint32. + uint32Len = 4 + + // headerLen is the length (in bytes) of a DNS header. + // + // A header is comprised of 6 uint16s and no padding. + headerLen = 6 * uint16Len ) type nestedError struct { @@ -148,7 +177,8 @@ type Message struct { type section uint8 const ( - sectionHeader section = iota + sectionNotStarted section = iota + sectionHeader sectionQuestions sectionAnswers sectionAuthorities @@ -194,6 +224,7 @@ func (h *header) count(sec section) uint16 { return 0 } +// pack appends the wire format of the header to msg. func (h *header) pack(msg []byte) []byte { msg = packUint16(msg, h.id) msg = packUint16(msg, h.bits) @@ -241,37 +272,40 @@ func (h *header) header() Header { } // A Resource is a DNS resource record. -type Resource interface { - // Header return's the Resource's ResourceHeader. - Header() *ResourceHeader +type Resource struct { + Header ResourceHeader + Body ResourceBody +} +// A ResourceBody is a DNS resource record minus the header. +type ResourceBody interface { // pack packs a Resource except for its header. - pack(msg []byte, compression map[string]int) ([]byte, error) + pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) // realType returns the actual type of the Resource. This is used to // fill in the header Type field. realType() Type } -func packResource(msg []byte, resource Resource, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the Resource to msg. +func (r *Resource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + if r.Body == nil { + return msg, errNilResouceBody + } oldMsg := msg - resource.Header().Type = resource.realType() - msg, length, err := resource.Header().pack(msg, compression) + r.Header.Type = r.Body.realType() + msg, length, err := r.Header.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"ResourceHeader", err} } preLen := len(msg) - msg, err = resource.pack(msg, compression) + msg, err = r.Body.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"content", err} } - conLen := len(msg) - preLen - if conLen > int(^uint16(0)) { - return oldMsg, errResTooLong + if err := r.Header.fixLen(msg, length, preLen); err != nil { + return oldMsg, err } - // Fill in the length now that we know how long the content is. - packUint16(length[:0], uint16(conLen)) - resource.Header().Length = uint16(conLen) return msg, nil } @@ -330,14 +364,15 @@ func (p *Parser) checkAdvance(sec section) error { func (p *Parser) resource(sec section) (Resource, error) { var r Resource - hdr, err := p.resourceHeader(sec) + var err error + r.Header, err = p.resourceHeader(sec) if err != nil { return r, err } p.resHeaderValid = false - r, p.off, err = unpackResource(p.msg, p.off, hdr) + r.Body, p.off, err = unpackResourceBody(p.msg, p.off, r.Header) if err != nil { - return nil, &nestedError{"unpacking " + sectionNames[sec], err} + return Resource{}, &nestedError{"unpacking " + sectionNames[sec], err} } p.index++ return r, nil @@ -389,7 +424,8 @@ func (p *Parser) Question() (Question, error) { if err := p.checkAdvance(sectionQuestions); err != nil { return Question{}, err } - name, off, err := unpackName(p.msg, p.off) + var name Name + off, err := name.unpack(p.msg, p.off) if err != nil { return Question{}, &nestedError{"unpacking Question.Name", err} } @@ -408,7 +444,13 @@ func (p *Parser) Question() (Question, error) { // AllQuestions parses all Questions. func (p *Parser) AllQuestions() ([]Question, error) { - qs := make([]Question, 0, p.header.questions) + // Multiple questions are valid according to the spec, + // but servers don't actually support them. There will + // be at most one question here. + // + // Do not pre-allocate based on info in p.header, since + // the data is untrusted. + qs := []Question{} for { q, err := p.Question() if err == ErrSectionDone { @@ -464,7 +506,16 @@ func (p *Parser) Answer() (Resource, error) { // AllAnswers parses all Answer Resources. func (p *Parser) AllAnswers() ([]Resource, error) { - as := make([]Resource, 0, p.header.answers) + // The most common query is for A/AAAA, which usually returns + // a handful of IPs. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.answers) + if n > 20 { + n = 20 + } + as := make([]Resource, 0, n) for { a, err := p.Answer() if err == ErrSectionDone { @@ -505,7 +556,16 @@ func (p *Parser) Authority() (Resource, error) { // AllAuthorities parses all Authority Resources. func (p *Parser) AllAuthorities() ([]Resource, error) { - as := make([]Resource, 0, p.header.authorities) + // Authorities contains SOA in case of NXDOMAIN and friends, + // otherwise it is empty. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.authorities) + if n > 10 { + n = 10 + } + as := make([]Resource, 0, n) for { a, err := p.Authority() if err == ErrSectionDone { @@ -546,7 +606,16 @@ func (p *Parser) Additional() (Resource, error) { // AllAdditionals parses all Additional Resources. func (p *Parser) AllAdditionals() ([]Resource, error) { - as := make([]Resource, 0, p.header.additionals) + // Additionals usually contain OPT, and sometimes A/AAAA + // glue records. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.additionals) + if n > 10 { + n = 10 + } + as := make([]Resource, 0, n) for { a, err := p.Additional() if err == ErrSectionDone { @@ -575,6 +644,186 @@ func (p *Parser) SkipAllAdditionals() error { } } +// CNAMEResource parses a single CNAMEResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) CNAMEResource() (CNAMEResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeCNAME { + return CNAMEResource{}, ErrNotStarted + } + r, err := unpackCNAMEResource(p.msg, p.off) + if err != nil { + return CNAMEResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// MXResource parses a single MXResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) MXResource() (MXResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeMX { + return MXResource{}, ErrNotStarted + } + r, err := unpackMXResource(p.msg, p.off) + if err != nil { + return MXResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// NSResource parses a single NSResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) NSResource() (NSResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeNS { + return NSResource{}, ErrNotStarted + } + r, err := unpackNSResource(p.msg, p.off) + if err != nil { + return NSResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// PTRResource parses a single PTRResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) PTRResource() (PTRResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypePTR { + return PTRResource{}, ErrNotStarted + } + r, err := unpackPTRResource(p.msg, p.off) + if err != nil { + return PTRResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// SOAResource parses a single SOAResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) SOAResource() (SOAResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeSOA { + return SOAResource{}, ErrNotStarted + } + r, err := unpackSOAResource(p.msg, p.off) + if err != nil { + return SOAResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// TXTResource parses a single TXTResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) TXTResource() (TXTResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeTXT { + return TXTResource{}, ErrNotStarted + } + r, err := unpackTXTResource(p.msg, p.off, p.resHeader.Length) + if err != nil { + return TXTResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// SRVResource parses a single SRVResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) SRVResource() (SRVResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeSRV { + return SRVResource{}, ErrNotStarted + } + r, err := unpackSRVResource(p.msg, p.off) + if err != nil { + return SRVResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// AResource parses a single AResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) AResource() (AResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeA { + return AResource{}, ErrNotStarted + } + r, err := unpackAResource(p.msg, p.off) + if err != nil { + return AResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// AAAAResource parses a single AAAAResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) AAAAResource() (AAAAResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeAAAA { + return AAAAResource{}, ErrNotStarted + } + r, err := unpackAAAAResource(p.msg, p.off) + if err != nil { + return AAAAResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + +// OPTResource parses a single OPTResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) OPTResource() (OPTResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeOPT { + return OPTResource{}, ErrNotStarted + } + r, err := unpackOPTResource(p.msg, p.off, p.resHeader.Length) + if err != nil { + return OPTResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + // Unpack parses a full Message. func (m *Message) Unpack(msg []byte) error { var p Parser @@ -599,6 +848,12 @@ func (m *Message) Unpack(msg []byte) error { // Pack packs a full Message. func (m *Message) Pack() ([]byte, error) { + return m.AppendPack(make([]byte, 0, packStartingCap)) +} + +// AppendPack is like Pack but appends the full Message to b and returns the +// extended buffer. +func (m *Message) AppendPack(b []byte) ([]byte, error) { // Validate the lengths. It is very unlikely that anyone will try to // pack more than 65535 of any particular type, but it is possible and // we should fail gracefully. @@ -623,47 +878,40 @@ func (m *Message) Pack() ([]byte, error) { h.authorities = uint16(len(m.Authorities)) h.additionals = uint16(len(m.Additionals)) - // The starting capacity doesn't matter too much, but most DNS responses - // Will be <= 512 bytes as it is the limit for DNS over UDP. - msg := make([]byte, 0, 512) - - msg = h.pack(msg) + compressionOff := len(b) + msg := h.pack(b) // RFC 1035 allows (but does not require) compression for packing. RFC // 1035 requires unpacking implementations to support compression, so // unconditionally enabling it is fine. // // DNS lookups are typically done over UDP, and RFC 1035 states that UDP - // DNS packets can be a maximum of 512 bytes long. Without compression, - // many DNS response packets are over this limit, so enabling + // DNS messages can be a maximum of 512 bytes long. Without compression, + // many DNS response messages are over this limit, so enabling // compression will help ensure compliance. compression := map[string]int{} - for _, q := range m.Questions { + for i := range m.Questions { var err error - msg, err = q.pack(msg, compression) - if err != nil { + if msg, err = m.Questions[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Question", err} } } - for _, a := range m.Answers { + for i := range m.Answers { var err error - msg, err = packResource(msg, a, compression) - if err != nil { + if msg, err = m.Answers[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Answer", err} } } - for _, a := range m.Authorities { + for i := range m.Authorities { var err error - msg, err = packResource(msg, a, compression) - if err != nil { + if msg, err = m.Authorities[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Authority", err} } } - for _, a := range m.Additionals { + for i := range m.Additionals { var err error - msg, err = packResource(msg, a, compression) - if err != nil { + if msg, err = m.Additionals[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Additional", err} } } @@ -671,11 +919,427 @@ func (m *Message) Pack() ([]byte, error) { return msg, nil } -// An ResourceHeader is the header of a DNS resource record. There are +// A Builder allows incrementally packing a DNS message. +// +// Example usage: +// buf := make([]byte, 2, 514) +// b := NewBuilder(buf, Header{...}) +// b.EnableCompression() +// // Optionally start a section and add things to that section. +// // Repeat adding sections as necessary. +// buf, err := b.Finish() +// // If err is nil, buf[2:] will contain the built bytes. +type Builder struct { + // msg is the storage for the message being built. + msg []byte + + // section keeps track of the current section being built. + section section + + // header keeps track of what should go in the header when Finish is + // called. + header header + + // start is the starting index of the bytes allocated in msg for header. + start int + + // compression is a mapping from name suffixes to their starting index + // in msg. + compression map[string]int +} + +// NewBuilder creates a new builder with compression disabled. +// +// Note: Most users will want to immediately enable compression with the +// EnableCompression method. See that method's comment for why you may or may +// not want to enable compression. +// +// The DNS message is appended to the provided initial buffer buf (which may be +// nil) as it is built. The final message is returned by the (*Builder).Finish +// method, which may return the same underlying array if there was sufficient +// capacity in the slice. +func NewBuilder(buf []byte, h Header) Builder { + if buf == nil { + buf = make([]byte, 0, packStartingCap) + } + b := Builder{msg: buf, start: len(buf)} + b.header.id, b.header.bits = h.pack() + var hb [headerLen]byte + b.msg = append(b.msg, hb[:]...) + b.section = sectionHeader + return b +} + +// EnableCompression enables compression in the Builder. +// +// Leaving compression disabled avoids compression related allocations, but can +// result in larger message sizes. Be careful with this mode as it can cause +// messages to exceed the UDP size limit. +// +// According to RFC 1035, section 4.1.4, the use of compression is optional, but +// all implementations must accept both compressed and uncompressed DNS +// messages. +// +// Compression should be enabled before any sections are added for best results. +func (b *Builder) EnableCompression() { + b.compression = map[string]int{} +} + +func (b *Builder) startCheck(s section) error { + if b.section <= sectionNotStarted { + return ErrNotStarted + } + if b.section > s { + return ErrSectionDone + } + return nil +} + +// StartQuestions prepares the builder for packing Questions. +func (b *Builder) StartQuestions() error { + if err := b.startCheck(sectionQuestions); err != nil { + return err + } + b.section = sectionQuestions + return nil +} + +// StartAnswers prepares the builder for packing Answers. +func (b *Builder) StartAnswers() error { + if err := b.startCheck(sectionAnswers); err != nil { + return err + } + b.section = sectionAnswers + return nil +} + +// StartAuthorities prepares the builder for packing Authorities. +func (b *Builder) StartAuthorities() error { + if err := b.startCheck(sectionAuthorities); err != nil { + return err + } + b.section = sectionAuthorities + return nil +} + +// StartAdditionals prepares the builder for packing Additionals. +func (b *Builder) StartAdditionals() error { + if err := b.startCheck(sectionAdditionals); err != nil { + return err + } + b.section = sectionAdditionals + return nil +} + +func (b *Builder) incrementSectionCount() error { + var count *uint16 + var err error + switch b.section { + case sectionQuestions: + count = &b.header.questions + err = errTooManyQuestions + case sectionAnswers: + count = &b.header.answers + err = errTooManyAnswers + case sectionAuthorities: + count = &b.header.authorities + err = errTooManyAuthorities + case sectionAdditionals: + count = &b.header.additionals + err = errTooManyAdditionals + } + if *count == ^uint16(0) { + return err + } + *count++ + return nil +} + +// Question adds a single Question. +func (b *Builder) Question(q Question) error { + if b.section < sectionQuestions { + return ErrNotStarted + } + if b.section > sectionQuestions { + return ErrSectionDone + } + msg, err := q.pack(b.msg, b.compression, b.start) + if err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +func (b *Builder) checkResourceSection() error { + if b.section < sectionAnswers { + return ErrNotStarted + } + if b.section > sectionAdditionals { + return ErrSectionDone + } + return nil +} + +// CNAMEResource adds a single CNAMEResource. +func (b *Builder) CNAMEResource(h ResourceHeader, r CNAMEResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"CNAMEResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// MXResource adds a single MXResource. +func (b *Builder) MXResource(h ResourceHeader, r MXResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"MXResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// NSResource adds a single NSResource. +func (b *Builder) NSResource(h ResourceHeader, r NSResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"NSResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// PTRResource adds a single PTRResource. +func (b *Builder) PTRResource(h ResourceHeader, r PTRResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"PTRResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// SOAResource adds a single SOAResource. +func (b *Builder) SOAResource(h ResourceHeader, r SOAResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"SOAResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// TXTResource adds a single TXTResource. +func (b *Builder) TXTResource(h ResourceHeader, r TXTResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"TXTResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// SRVResource adds a single SRVResource. +func (b *Builder) SRVResource(h ResourceHeader, r SRVResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"SRVResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// AResource adds a single AResource. +func (b *Builder) AResource(h ResourceHeader, r AResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"AResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// AAAAResource adds a single AAAAResource. +func (b *Builder) AAAAResource(h ResourceHeader, r AAAAResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"AAAAResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// OPTResource adds a single OPTResource. +func (b *Builder) OPTResource(h ResourceHeader, r OPTResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, length, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"OPTResource body", err} + } + if err := h.fixLen(msg, length, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// Finish ends message building and generates a binary message. +func (b *Builder) Finish() ([]byte, error) { + if b.section < sectionHeader { + return nil, ErrNotStarted + } + b.section = sectionDone + // Space for the header was allocated in NewBuilder. + b.header.pack(b.msg[b.start:b.start]) + return b.msg, nil +} + +// A ResourceHeader is the header of a DNS resource record. There are // many types of DNS resource records, but they all share the same header. type ResourceHeader struct { // Name is the domain name for which this resource record pertains. - Name string + Name Name // Type is the type of DNS resource record. // @@ -697,17 +1361,13 @@ type ResourceHeader struct { Length uint16 } -// Header implements Resource.Header. -func (h *ResourceHeader) Header() *ResourceHeader { - return h -} - -// pack packs all of the fields in a ResourceHeader except for the length. The -// length bytes are returned as a slice so they can be filled in after the rest -// of the Resource has been packed. -func (h *ResourceHeader) pack(oldMsg []byte, compression map[string]int) (msg []byte, length []byte, err error) { +// pack appends the wire format of the ResourceHeader to oldMsg. +// +// The bytes where length was packed are returned as a slice so they can be +// updated after the rest of the Resource has been packed. +func (h *ResourceHeader) pack(oldMsg []byte, compression map[string]int, compressionOff int) (msg []byte, length []byte, err error) { msg = oldMsg - if msg, err = packName(msg, h.Name, compression); err != nil { + if msg, err = h.Name.pack(msg, compression, compressionOff); err != nil { return oldMsg, nil, &nestedError{"Name", err} } msg = packType(msg, h.Type) @@ -715,13 +1375,13 @@ func (h *ResourceHeader) pack(oldMsg []byte, compression map[string]int) (msg [] msg = packUint32(msg, h.TTL) lenBegin := len(msg) msg = packUint16(msg, h.Length) - return msg, msg[lenBegin:], nil + return msg, msg[lenBegin : lenBegin+uint16Len], nil } func (h *ResourceHeader) unpack(msg []byte, off int) (int, error) { newOff := off var err error - if h.Name, newOff, err = unpackName(msg, newOff); err != nil { + if newOff, err = h.Name.unpack(msg, newOff); err != nil { return off, &nestedError{"Name", err} } if h.Type, newOff, err = unpackType(msg, newOff); err != nil { @@ -739,6 +1399,57 @@ func (h *ResourceHeader) unpack(msg []byte, off int) (int, error) { return newOff, nil } +func (h *ResourceHeader) fixLen(msg []byte, length []byte, preLen int) error { + conLen := len(msg) - preLen + if conLen > int(^uint16(0)) { + return errResTooLong + } + + // Fill in the length now that we know how long the content is. + packUint16(length[:0], uint16(conLen)) + h.Length = uint16(conLen) + + return nil +} + +// EDNS(0) wire costants. +const ( + edns0Version = 0 + + edns0DNSSECOK = 0x00008000 + ednsVersionMask = 0x00ff0000 + edns0DNSSECOKMask = 0x00ff8000 +) + +// SetEDNS0 configures h for EDNS(0). +// +// The provided extRCode must be an extedned RCode. +func (h *ResourceHeader) SetEDNS0(udpPayloadLen int, extRCode RCode, dnssecOK bool) error { + h.Name = Name{Data: [nameLen]byte{'.'}, Length: 1} // RFC 6891 section 6.1.2 + h.Type = TypeOPT + h.Class = Class(udpPayloadLen) + h.TTL = uint32(extRCode) >> 4 << 24 + if dnssecOK { + h.TTL |= edns0DNSSECOK + } + return nil +} + +// DNSSECAllowed reports whether the DNSSEC OK bit is set. +func (h *ResourceHeader) DNSSECAllowed() bool { + return h.TTL&edns0DNSSECOKMask == edns0DNSSECOK // RFC 6891 section 6.1.3 +} + +// ExtendedRCode returns an extended RCode. +// +// The provided rcode must be the RCode in DNS message header. +func (h *ResourceHeader) ExtendedRCode(rcode RCode) RCode { + if h.TTL&ednsVersionMask == edns0Version { // RFC 6891 section 6.1.3 + return RCode(h.TTL>>24<<4) | rcode + } + return rcode +} + func skipResource(msg []byte, off int) (int, error) { newOff, err := skipName(msg, off) if err != nil { @@ -763,24 +1474,26 @@ func skipResource(msg []byte, off int) (int, error) { return newOff, nil } +// packUint16 appends the wire format of field to msg. func packUint16(msg []byte, field uint16) []byte { return append(msg, byte(field>>8), byte(field)) } func unpackUint16(msg []byte, off int) (uint16, int, error) { - if off+2 > len(msg) { + if off+uint16Len > len(msg) { return 0, off, errBaseLen } - return uint16(msg[off])<<8 | uint16(msg[off+1]), off + 2, nil + return uint16(msg[off])<<8 | uint16(msg[off+1]), off + uint16Len, nil } func skipUint16(msg []byte, off int) (int, error) { - if off+2 > len(msg) { + if off+uint16Len > len(msg) { return off, errBaseLen } - return off + 2, nil + return off + uint16Len, nil } +// packType appends the wire format of field to msg. func packType(msg []byte, field Type) []byte { return packUint16(msg, uint16(field)) } @@ -794,6 +1507,7 @@ func skipType(msg []byte, off int) (int, error) { return skipUint16(msg, off) } +// packClass appends the wire format of field to msg. func packClass(msg []byte, field Class) []byte { return packUint16(msg, uint16(field)) } @@ -807,6 +1521,7 @@ func skipClass(msg []byte, off int) (int, error) { return skipUint16(msg, off) } +// packUint32 appends the wire format of field to msg. func packUint32(msg []byte, field uint32) []byte { return append( msg, @@ -818,31 +1533,30 @@ func packUint32(msg []byte, field uint32) []byte { } func unpackUint32(msg []byte, off int) (uint32, int, error) { - if off+4 > len(msg) { + if off+uint32Len > len(msg) { return 0, off, errBaseLen } v := uint32(msg[off])<<24 | uint32(msg[off+1])<<16 | uint32(msg[off+2])<<8 | uint32(msg[off+3]) - return v, off + 4, nil + return v, off + uint32Len, nil } func skipUint32(msg []byte, off int) (int, error) { - if off+4 > len(msg) { + if off+uint32Len > len(msg) { return off, errBaseLen } - return off + 4, nil + return off + uint32Len, nil } -func packText(msg []byte, field string) []byte { - for len(field) > 0 { - l := len(field) - if l > 255 { - l = 255 - } - msg = append(msg, byte(l)) - msg = append(msg, field[:l]...) - field = field[l:] +// packText appends the wire format of field to msg. +func packText(msg []byte, field string) ([]byte, error) { + l := len(field) + if l > 255 { + return nil, errStringTooLong } - return msg + msg = append(msg, byte(l)) + msg = append(msg, field...) + + return msg, nil } func unpackText(msg []byte, off int) (string, int, error) { @@ -868,6 +1582,7 @@ func skipText(msg []byte, off int) (int, error) { return endOff, nil } +// packBytes appends the wire format of field to msg. func packBytes(msg []byte, field []byte) []byte { return append(msg, field...) } @@ -889,30 +1604,53 @@ func skipBytes(msg []byte, off int, field []byte) (int, error) { return newOff, nil } -// packName packs a domain name. +const nameLen = 255 + +// A Name is a non-encoded domain name. It is used instead of strings to avoid +// allocations. +type Name struct { + Data [nameLen]byte + Length uint8 +} + +// NewName creates a new Name from a string. +func NewName(name string) (Name, error) { + if len([]byte(name)) > nameLen { + return Name{}, errCalcLen + } + n := Name{Length: uint8(len(name))} + copy(n.Data[:], []byte(name)) + return n, nil +} + +func (n Name) String() string { + return string(n.Data[:n.Length]) +} + +// pack appends the wire format of the Name to msg. // // Domain names are a sequence of counted strings split at the dots. They end // with a zero-length string. Compression can be used to reuse domain suffixes. // // The compression map will be updated with new domain suffixes. If compression // is nil, compression will not be used. -func packName(msg []byte, name string, compression map[string]int) ([]byte, error) { +func (n *Name) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg // Add a trailing dot to canonicalize name. - if n := len(name); n == 0 || name[n-1] != '.' { - name += "." + if n.Length == 0 || n.Data[n.Length-1] != '.' { + return oldMsg, errNonCanonicalName } // Allow root domain. - if name == "." { + if n.Data[0] == '.' && n.Length == 1 { return append(msg, 0), nil } // Emit sequence of counted strings, chopping at dots. - for i, begin := 0, 0; i < len(name); i++ { + for i, begin := 0, 0; i < int(n.Length); i++ { // Check for the end of the segment. - if name[i] == '.' { + if n.Data[i] == '.' { // The two most significant bits have special meaning. // It isn't allowed for segments to be long enough to // need them. @@ -928,7 +1666,7 @@ func packName(msg []byte, name string, compression map[string]int) ([]byte, erro msg = append(msg, byte(i-begin)) for j := begin; j < i; j++ { - msg = append(msg, name[j]) + msg = append(msg, n.Data[j]) } begin = i + 1 @@ -938,8 +1676,8 @@ func packName(msg []byte, name string, compression map[string]int) ([]byte, erro // We can only compress domain suffixes starting with a new // segment. A pointer is two bytes with the two most significant // bits set to 1 to indicate that it is a pointer. - if (i == 0 || name[i-1] == '.') && compression != nil { - if ptr, ok := compression[name[i:]]; ok { + if (i == 0 || n.Data[i-1] == '.') && compression != nil { + if ptr, ok := compression[string(n.Data[i:])]; ok { // Hit. Emit a pointer instead of the rest of // the domain. return append(msg, byte(ptr>>8|0xC0), byte(ptr)), nil @@ -948,15 +1686,19 @@ func packName(msg []byte, name string, compression map[string]int) ([]byte, erro // Miss. Add the suffix to the compression table if the // offset can be stored in the available 14 bytes. if len(msg) <= int(^uint16(0)>>2) { - compression[name[i:]] = len(msg) + compression[string(n.Data[i:])] = len(msg) - compressionOff } } } return append(msg, 0), nil } -// unpackName unpacks a domain name. -func unpackName(msg []byte, off int) (string, int, error) { +// unpack unpacks a domain name. +func (n *Name) unpack(msg []byte, off int) (int, error) { + return n.unpackCompressed(msg, off, true /* allowCompression */) +} + +func (n *Name) unpackCompressed(msg []byte, off int, allowCompression bool) (int, error) { // currOff is the current working offset. currOff := off @@ -965,15 +1707,16 @@ func unpackName(msg []byte, off int) (string, int, error) { // the usage of this name. newOff := off - // name is the domain name being unpacked. - name := make([]byte, 0, 255) - // ptr is the number of pointers followed. var ptr int + + // Name is a slice representation of the name data. + name := n.Data[:0] + Loop: for { if currOff >= len(msg) { - return "", off, errBaseLen + return off, errBaseLen } c := int(msg[currOff]) currOff++ @@ -985,14 +1728,17 @@ Loop: } endOff := currOff + c if endOff > len(msg) { - return "", off, errCalcLen + return off, errCalcLen } name = append(name, msg[currOff:endOff]...) name = append(name, '.') currOff = endOff case 0xC0: // Pointer + if !allowCompression { + return off, errCompressedSRV + } if currOff >= len(msg) { - return "", off, errInvalidPtr + return off, errInvalidPtr } c1 := msg[currOff] currOff++ @@ -1001,21 +1747,25 @@ Loop: } // Don't follow too many pointers, maybe there's a loop. if ptr++; ptr > 10 { - return "", off, errTooManyPtr + return off, errTooManyPtr } currOff = (c^0xC0)<<8 | int(c1) default: // Prefixes 0x80 and 0x40 are reserved. - return "", off, errReserved + return off, errReserved } } if len(name) == 0 { name = append(name, '.') } + if len(name) > len(n.Data) { + return off, errCalcLen + } + n.Length = uint8(len(name)) if ptr == 0 { newOff = currOff } - return string(name), newOff, nil + return newOff, nil } func skipName(msg []byte, off int) (int, error) { @@ -1061,13 +1811,14 @@ Loop: // A Question is a DNS query. type Question struct { - Name string + Name Name Type Type Class Class } -func (q *Question) pack(msg []byte, compression map[string]int) ([]byte, error) { - msg, err := packName(msg, q.Name, compression) +// pack appends the wire format of the Question to msg. +func (q *Question) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + msg, err := q.Name.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"Name", err} } @@ -1075,159 +1826,176 @@ func (q *Question) pack(msg []byte, compression map[string]int) ([]byte, error) return packClass(msg, q.Class), nil } -func unpackResource(msg []byte, off int, hdr ResourceHeader) (Resource, int, error) { +func unpackResourceBody(msg []byte, off int, hdr ResourceHeader) (ResourceBody, int, error) { var ( - r Resource + r ResourceBody err error name string ) switch hdr.Type { case TypeA: - r, err = unpackAResource(hdr, msg, off) + var rb AResource + rb, err = unpackAResource(msg, off) + r = &rb name = "A" case TypeNS: - r, err = unpackNSResource(hdr, msg, off) + var rb NSResource + rb, err = unpackNSResource(msg, off) + r = &rb name = "NS" case TypeCNAME: - r, err = unpackCNAMEResource(hdr, msg, off) + var rb CNAMEResource + rb, err = unpackCNAMEResource(msg, off) + r = &rb name = "CNAME" case TypeSOA: - r, err = unpackSOAResource(hdr, msg, off) + var rb SOAResource + rb, err = unpackSOAResource(msg, off) + r = &rb name = "SOA" case TypePTR: - r, err = unpackPTRResource(hdr, msg, off) + var rb PTRResource + rb, err = unpackPTRResource(msg, off) + r = &rb name = "PTR" case TypeMX: - r, err = unpackMXResource(hdr, msg, off) + var rb MXResource + rb, err = unpackMXResource(msg, off) + r = &rb name = "MX" case TypeTXT: - r, err = unpackTXTResource(hdr, msg, off) + var rb TXTResource + rb, err = unpackTXTResource(msg, off, hdr.Length) + r = &rb name = "TXT" case TypeAAAA: - r, err = unpackAAAAResource(hdr, msg, off) + var rb AAAAResource + rb, err = unpackAAAAResource(msg, off) + r = &rb name = "AAAA" case TypeSRV: - r, err = unpackSRVResource(hdr, msg, off) + var rb SRVResource + rb, err = unpackSRVResource(msg, off) + r = &rb name = "SRV" + case TypeOPT: + var rb OPTResource + rb, err = unpackOPTResource(msg, off, hdr.Length) + r = &rb + name = "OPT" } if err != nil { return nil, off, &nestedError{name + " record", err} } - if r != nil { - return r, off + int(hdr.Length), nil + if r == nil { + return nil, off, errors.New("invalid resource type: " + string(hdr.Type+'0')) } - return nil, off, errors.New("invalid resource type: " + string(hdr.Type+'0')) + return r, off + int(hdr.Length), nil } // A CNAMEResource is a CNAME Resource record. type CNAMEResource struct { - ResourceHeader - - CNAME string + CNAME Name } func (r *CNAMEResource) realType() Type { return TypeCNAME } -func (r *CNAMEResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return packName(msg, r.CNAME, compression) +// pack appends the wire format of the CNAMEResource to msg. +func (r *CNAMEResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.CNAME.pack(msg, compression, compressionOff) } -func unpackCNAMEResource(hdr ResourceHeader, msg []byte, off int) (*CNAMEResource, error) { - cname, _, err := unpackName(msg, off) - if err != nil { - return nil, err +func unpackCNAMEResource(msg []byte, off int) (CNAMEResource, error) { + var cname Name + if _, err := cname.unpack(msg, off); err != nil { + return CNAMEResource{}, err } - return &CNAMEResource{hdr, cname}, nil + return CNAMEResource{cname}, nil } // An MXResource is an MX Resource record. type MXResource struct { - ResourceHeader - Pref uint16 - MX string + MX Name } func (r *MXResource) realType() Type { return TypeMX } -func (r *MXResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the MXResource to msg. +func (r *MXResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg msg = packUint16(msg, r.Pref) - msg, err := packName(msg, r.MX, compression) + msg, err := r.MX.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"MXResource.MX", err} } return msg, nil } -func unpackMXResource(hdr ResourceHeader, msg []byte, off int) (*MXResource, error) { +func unpackMXResource(msg []byte, off int) (MXResource, error) { pref, off, err := unpackUint16(msg, off) if err != nil { - return nil, &nestedError{"Pref", err} + return MXResource{}, &nestedError{"Pref", err} } - mx, _, err := unpackName(msg, off) - if err != nil { - return nil, &nestedError{"MX", err} + var mx Name + if _, err := mx.unpack(msg, off); err != nil { + return MXResource{}, &nestedError{"MX", err} } - return &MXResource{hdr, pref, mx}, nil + return MXResource{pref, mx}, nil } // An NSResource is an NS Resource record. type NSResource struct { - ResourceHeader - - NS string + NS Name } func (r *NSResource) realType() Type { return TypeNS } -func (r *NSResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return packName(msg, r.NS, compression) +// pack appends the wire format of the NSResource to msg. +func (r *NSResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.NS.pack(msg, compression, compressionOff) } -func unpackNSResource(hdr ResourceHeader, msg []byte, off int) (*NSResource, error) { - ns, _, err := unpackName(msg, off) - if err != nil { - return nil, err +func unpackNSResource(msg []byte, off int) (NSResource, error) { + var ns Name + if _, err := ns.unpack(msg, off); err != nil { + return NSResource{}, err } - return &NSResource{hdr, ns}, nil + return NSResource{ns}, nil } // A PTRResource is a PTR Resource record. type PTRResource struct { - ResourceHeader - - PTR string + PTR Name } func (r *PTRResource) realType() Type { return TypePTR } -func (r *PTRResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return packName(msg, r.PTR, compression) +// pack appends the wire format of the PTRResource to msg. +func (r *PTRResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.PTR.pack(msg, compression, compressionOff) } -func unpackPTRResource(hdr ResourceHeader, msg []byte, off int) (*PTRResource, error) { - ptr, _, err := unpackName(msg, off) - if err != nil { - return nil, err +func unpackPTRResource(msg []byte, off int) (PTRResource, error) { + var ptr Name + if _, err := ptr.unpack(msg, off); err != nil { + return PTRResource{}, err } - return &PTRResource{hdr, ptr}, nil + return PTRResource{ptr}, nil } // An SOAResource is an SOA Resource record. type SOAResource struct { - ResourceHeader - - NS string - MBox string + NS Name + MBox Name Serial uint32 Refresh uint32 Retry uint32 @@ -1243,13 +2011,14 @@ func (r *SOAResource) realType() Type { return TypeSOA } -func (r *SOAResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the SOAResource to msg. +func (r *SOAResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg - msg, err := packName(msg, r.NS, compression) + msg, err := r.NS.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"SOAResource.NS", err} } - msg, err = packName(msg, r.MBox, compression) + msg, err = r.MBox.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"SOAResource.MBox", err} } @@ -1260,121 +2029,126 @@ func (r *SOAResource) pack(msg []byte, compression map[string]int) ([]byte, erro return packUint32(msg, r.MinTTL), nil } -func unpackSOAResource(hdr ResourceHeader, msg []byte, off int) (*SOAResource, error) { - ns, off, err := unpackName(msg, off) +func unpackSOAResource(msg []byte, off int) (SOAResource, error) { + var ns Name + off, err := ns.unpack(msg, off) if err != nil { - return nil, &nestedError{"NS", err} + return SOAResource{}, &nestedError{"NS", err} } - mbox, off, err := unpackName(msg, off) - if err != nil { - return nil, &nestedError{"MBox", err} + var mbox Name + if off, err = mbox.unpack(msg, off); err != nil { + return SOAResource{}, &nestedError{"MBox", err} } serial, off, err := unpackUint32(msg, off) if err != nil { - return nil, &nestedError{"Serial", err} + return SOAResource{}, &nestedError{"Serial", err} } refresh, off, err := unpackUint32(msg, off) if err != nil { - return nil, &nestedError{"Refresh", err} + return SOAResource{}, &nestedError{"Refresh", err} } retry, off, err := unpackUint32(msg, off) if err != nil { - return nil, &nestedError{"Retry", err} + return SOAResource{}, &nestedError{"Retry", err} } expire, off, err := unpackUint32(msg, off) if err != nil { - return nil, &nestedError{"Expire", err} + return SOAResource{}, &nestedError{"Expire", err} } minTTL, _, err := unpackUint32(msg, off) if err != nil { - return nil, &nestedError{"MinTTL", err} + return SOAResource{}, &nestedError{"MinTTL", err} } - return &SOAResource{hdr, ns, mbox, serial, refresh, retry, expire, minTTL}, nil + return SOAResource{ns, mbox, serial, refresh, retry, expire, minTTL}, nil } // A TXTResource is a TXT Resource record. type TXTResource struct { - ResourceHeader - - Txt string // Not a domain name. + TXT []string } func (r *TXTResource) realType() Type { return TypeTXT } -func (r *TXTResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return packText(msg, r.Txt), nil +// pack appends the wire format of the TXTResource to msg. +func (r *TXTResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + oldMsg := msg + for _, s := range r.TXT { + var err error + msg, err = packText(msg, s) + if err != nil { + return oldMsg, err + } + } + return msg, nil } -func unpackTXTResource(hdr ResourceHeader, msg []byte, off int) (*TXTResource, error) { - var txt string - for n := uint16(0); n < hdr.Length; { +func unpackTXTResource(msg []byte, off int, length uint16) (TXTResource, error) { + txts := make([]string, 0, 1) + for n := uint16(0); n < length; { var t string var err error if t, off, err = unpackText(msg, off); err != nil { - return nil, &nestedError{"text", err} + return TXTResource{}, &nestedError{"text", err} } // Check if we got too many bytes. - if hdr.Length-n < uint16(len(t))+1 { - return nil, errCalcLen + if length-n < uint16(len(t))+1 { + return TXTResource{}, errCalcLen } n += uint16(len(t)) + 1 - txt += t + txts = append(txts, t) } - return &TXTResource{hdr, txt}, nil + return TXTResource{txts}, nil } // An SRVResource is an SRV Resource record. type SRVResource struct { - ResourceHeader - Priority uint16 Weight uint16 Port uint16 - Target string // Not compressed as per RFC 2782. + Target Name // Not compressed as per RFC 2782. } func (r *SRVResource) realType() Type { return TypeSRV } -func (r *SRVResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the SRVResource to msg. +func (r *SRVResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg msg = packUint16(msg, r.Priority) msg = packUint16(msg, r.Weight) msg = packUint16(msg, r.Port) - msg, err := packName(msg, r.Target, nil) + msg, err := r.Target.pack(msg, nil, compressionOff) if err != nil { return oldMsg, &nestedError{"SRVResource.Target", err} } return msg, nil } -func unpackSRVResource(hdr ResourceHeader, msg []byte, off int) (*SRVResource, error) { +func unpackSRVResource(msg []byte, off int) (SRVResource, error) { priority, off, err := unpackUint16(msg, off) if err != nil { - return nil, &nestedError{"Priority", err} + return SRVResource{}, &nestedError{"Priority", err} } weight, off, err := unpackUint16(msg, off) if err != nil { - return nil, &nestedError{"Weight", err} + return SRVResource{}, &nestedError{"Weight", err} } port, off, err := unpackUint16(msg, off) if err != nil { - return nil, &nestedError{"Port", err} + return SRVResource{}, &nestedError{"Port", err} } - target, _, err := unpackName(msg, off) - if err != nil { - return nil, &nestedError{"Target", err} + var target Name + if _, err := target.unpackCompressed(msg, off, false /* allowCompression */); err != nil { + return SRVResource{}, &nestedError{"Target", err} } - return &SRVResource{hdr, priority, weight, port, target}, nil + return SRVResource{priority, weight, port, target}, nil } // An AResource is an A Resource record. type AResource struct { - ResourceHeader - A [4]byte } @@ -1382,22 +2156,21 @@ func (r *AResource) realType() Type { return TypeA } -func (r *AResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the AResource to msg. +func (r *AResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { return packBytes(msg, r.A[:]), nil } -func unpackAResource(hdr ResourceHeader, msg []byte, off int) (*AResource, error) { +func unpackAResource(msg []byte, off int) (AResource, error) { var a [4]byte if _, err := unpackBytes(msg, off, a[:]); err != nil { - return nil, err + return AResource{}, err } - return &AResource{hdr, a}, nil + return AResource{a}, nil } // An AAAAResource is an AAAA Resource record. type AAAAResource struct { - ResourceHeader - AAAA [16]byte } @@ -1405,14 +2178,70 @@ func (r *AAAAResource) realType() Type { return TypeAAAA } -func (r *AAAAResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the AAAAResource to msg. +func (r *AAAAResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { return packBytes(msg, r.AAAA[:]), nil } -func unpackAAAAResource(hdr ResourceHeader, msg []byte, off int) (*AAAAResource, error) { +func unpackAAAAResource(msg []byte, off int) (AAAAResource, error) { var aaaa [16]byte if _, err := unpackBytes(msg, off, aaaa[:]); err != nil { - return nil, err + return AAAAResource{}, err } - return &AAAAResource{hdr, aaaa}, nil + return AAAAResource{aaaa}, nil +} + +// An OPTResource is an OPT pseudo Resource record. +// +// The pseudo resource record is part of the extension mechanisms for DNS +// as defined in RFC 6891. +type OPTResource struct { + Options []Option +} + +// An Option represents a DNS message option within OPTResource. +// +// The message option is part of the extension mechanisms for DNS as +// defined in RFC 6891. +type Option struct { + Code uint16 // option code + Data []byte +} + +func (r *OPTResource) realType() Type { + return TypeOPT +} + +func (r *OPTResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + for _, opt := range r.Options { + msg = packUint16(msg, opt.Code) + l := uint16(len(opt.Data)) + msg = packUint16(msg, l) + msg = packBytes(msg, opt.Data) + } + return msg, nil +} + +func unpackOPTResource(msg []byte, off int, length uint16) (OPTResource, error) { + var opts []Option + for oldOff := off; off < oldOff+int(length); { + var err error + var o Option + o.Code, off, err = unpackUint16(msg, off) + if err != nil { + return OPTResource{}, &nestedError{"Code", err} + } + var l uint16 + l, off, err = unpackUint16(msg, off) + if err != nil { + return OPTResource{}, &nestedError{"Data", err} + } + o.Data = make([]byte, l) + if copy(o.Data, msg[off:]) != int(l) { + return OPTResource{}, &nestedError{"Data", errCalcLen} + } + off += int(l) + opts = append(opts, o) + } + return OPTResource{opts}, nil } diff --git a/vendor/golang.org/x/net/dns/dnsmessage/message_test.go b/vendor/golang.org/x/net/dns/dnsmessage/message_test.go index 46edd724..7e4e4bd4 100644 --- a/vendor/golang.org/x/net/dns/dnsmessage/message_test.go +++ b/vendor/golang.org/x/net/dns/dnsmessage/message_test.go @@ -5,13 +5,29 @@ package dnsmessage import ( + "bytes" "fmt" - "net" "reflect" "strings" "testing" ) +func mustNewName(name string) Name { + n, err := NewName(name) + if err != nil { + panic(err) + } + return n +} + +func mustEDNS0ResourceHeader(l int, extrc RCode, do bool) ResourceHeader { + h := ResourceHeader{Class: ClassINET} + if err := h.SetEDNS0(l, extrc, do); err != nil { + panic(err) + } + return h +} + func (m *Message) String() string { s := fmt.Sprintf("Message: %#v\n", &m.Header) if len(m.Questions) > 0 { @@ -41,15 +57,23 @@ func (m *Message) String() string { return s } +func TestNameString(t *testing.T) { + want := "foo" + name := mustNewName(want) + if got := fmt.Sprint(name); got != want { + t.Errorf("got fmt.Sprint(%#v) = %s, want = %s", name, got, want) + } +} + func TestQuestionPackUnpack(t *testing.T) { want := Question{ - Name: ".", + Name: mustNewName("."), Type: TypeA, Class: ClassINET, } - buf, err := want.pack(make([]byte, 1, 50), map[string]int{}) + buf, err := want.pack(make([]byte, 1, 50), map[string]int{}, 1) if err != nil { - t.Fatal("Packing failed:", err) + t.Fatal("Question.pack() =", err) } var p Parser p.msg = buf @@ -58,13 +82,39 @@ func TestQuestionPackUnpack(t *testing.T) { p.off = 1 got, err := p.Question() if err != nil { - t.Fatalf("Unpacking failed: %v\n%s", err, string(buf[1:])) + t.Fatalf("Parser{%q}.Question() = %v", string(buf[1:]), err) } if p.off != len(buf) { - t.Errorf("Unpacked different amount than packed: got n = %d, want = %d", p.off, len(buf)) + t.Errorf("unpacked different amount than packed: got = %d, want = %d", p.off, len(buf)) } if !reflect.DeepEqual(got, want) { - t.Errorf("Got = %+v, want = %+v", got, want) + t.Errorf("got from Parser.Question() = %+v, want = %+v", got, want) + } +} + +func TestName(t *testing.T) { + tests := []string{ + "", + ".", + "google..com", + "google.com", + "google..com.", + "google.com.", + ".google.com.", + "www..google.com.", + "www.google.com.", + } + + for _, test := range tests { + n, err := NewName(test) + if err != nil { + t.Errorf("NewName(%q) = %v", test, err) + continue + } + if ns := n.String(); ns != test { + t.Errorf("got %#v.String() = %q, want = %q", n, ns, test) + continue + } } } @@ -74,10 +124,10 @@ func TestNamePackUnpack(t *testing.T) { want string err error }{ - {"", ".", nil}, + {"", "", errNonCanonicalName}, {".", ".", nil}, - {"google..com", "", errZeroSegLen}, - {"google.com", "google.com.", nil}, + {"google..com", "", errNonCanonicalName}, + {"google.com", "", errNonCanonicalName}, {"google..com.", "", errZeroSegLen}, {"google.com.", "google.com.", nil}, {".google.com.", "", errZeroSegLen}, @@ -86,29 +136,113 @@ func TestNamePackUnpack(t *testing.T) { } for _, test := range tests { - buf, err := packName(make([]byte, 0, 30), test.in, map[string]int{}) + in := mustNewName(test.in) + want := mustNewName(test.want) + buf, err := in.pack(make([]byte, 0, 30), map[string]int{}, 0) if err != test.err { - t.Errorf("Packing of %s: got err = %v, want err = %v", test.in, err, test.err) + t.Errorf("got %q.pack() = %v, want = %v", test.in, err, test.err) continue } if test.err != nil { continue } - got, n, err := unpackName(buf, 0) + var got Name + n, err := got.unpack(buf, 0) if err != nil { - t.Errorf("Unpacking for %s failed: %v", test.in, err) + t.Errorf("%q.unpack() = %v", test.in, err) continue } if n != len(buf) { t.Errorf( - "Unpacked different amount than packed for %s: got n = %d, want = %d", + "unpacked different amount than packed for %q: got = %d, want = %d", test.in, n, len(buf), ) } - if got != test.want { - t.Errorf("Unpacking packing of %s: got = %s, want = %s", test.in, got, test.want) + if got != want { + t.Errorf("unpacking packing of %q: got = %#v, want = %#v", test.in, got, want) + } + } +} + +func TestIncompressibleName(t *testing.T) { + name := mustNewName("example.com.") + compression := map[string]int{} + buf, err := name.pack(make([]byte, 0, 100), compression, 0) + if err != nil { + t.Fatal("first Name.pack() =", err) + } + buf, err = name.pack(buf, compression, 0) + if err != nil { + t.Fatal("second Name.pack() =", err) + } + var n1 Name + off, err := n1.unpackCompressed(buf, 0, false /* allowCompression */) + if err != nil { + t.Fatal("unpacking incompressible name without pointers failed:", err) + } + var n2 Name + if _, err := n2.unpackCompressed(buf, off, false /* allowCompression */); err != errCompressedSRV { + t.Errorf("unpacking compressed incompressible name with pointers: got %v, want = %v", err, errCompressedSRV) + } +} + +func checkErrorPrefix(err error, prefix string) bool { + e, ok := err.(*nestedError) + return ok && e.s == prefix +} + +func TestHeaderUnpackError(t *testing.T) { + wants := []string{ + "id", + "bits", + "questions", + "answers", + "authorities", + "additionals", + } + var buf []byte + var h header + for _, want := range wants { + n, err := h.unpack(buf, 0) + if n != 0 || !checkErrorPrefix(err, want) { + t.Errorf("got header.unpack([%d]byte, 0) = %d, %v, want = 0, %s", len(buf), n, err, want) + } + buf = append(buf, 0, 0) + } +} + +func TestParserStart(t *testing.T) { + const want = "unpacking header" + var p Parser + for i := 0; i <= 1; i++ { + _, err := p.Start([]byte{}) + if !checkErrorPrefix(err, want) { + t.Errorf("got Parser.Start(nil) = _, %v, want = _, %s", err, want) + } + } +} + +func TestResourceNotStarted(t *testing.T) { + tests := []struct { + name string + fn func(*Parser) error + }{ + {"CNAMEResource", func(p *Parser) error { _, err := p.CNAMEResource(); return err }}, + {"MXResource", func(p *Parser) error { _, err := p.MXResource(); return err }}, + {"NSResource", func(p *Parser) error { _, err := p.NSResource(); return err }}, + {"PTRResource", func(p *Parser) error { _, err := p.PTRResource(); return err }}, + {"SOAResource", func(p *Parser) error { _, err := p.SOAResource(); return err }}, + {"TXTResource", func(p *Parser) error { _, err := p.TXTResource(); return err }}, + {"SRVResource", func(p *Parser) error { _, err := p.SRVResource(); return err }}, + {"AResource", func(p *Parser) error { _, err := p.AResource(); return err }}, + {"AAAAResource", func(p *Parser) error { _, err := p.AAAAResource(); return err }}, + } + + for _, test := range tests { + if err := test.fn(&Parser{}); err != ErrNotStarted { + t.Errorf("got Parser.%s() = _ , %v, want = _, %v", test.name, err, ErrNotStarted) } } } @@ -118,7 +252,7 @@ func TestDNSPackUnpack(t *testing.T) { { Questions: []Question{ { - Name: ".", + Name: mustNewName("."), Type: TypeAAAA, Class: ClassINET, }, @@ -132,15 +266,49 @@ func TestDNSPackUnpack(t *testing.T) { for i, want := range wants { b, err := want.Pack() if err != nil { - t.Fatalf("%d: packing failed: %v", i, err) + t.Fatalf("%d: Message.Pack() = %v", i, err) } var got Message err = got.Unpack(b) if err != nil { - t.Fatalf("%d: unpacking failed: %v", i, err) + t.Fatalf("%d: Message.Unapck() = %v", i, err) } if !reflect.DeepEqual(got, want) { - t.Errorf("%d: got = %+v, want = %+v", i, &got, &want) + t.Errorf("%d: Message.Pack/Unpack() roundtrip: got = %+v, want = %+v", i, &got, &want) + } + } +} + +func TestDNSAppendPackUnpack(t *testing.T) { + wants := []Message{ + { + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Answers: []Resource{}, + Authorities: []Resource{}, + Additionals: []Resource{}, + }, + largeTestMsg(), + } + for i, want := range wants { + b := make([]byte, 2, 514) + b, err := want.AppendPack(b) + if err != nil { + t.Fatalf("%d: Message.AppendPack() = %v", i, err) + } + b = b[2:] + var got Message + err = got.Unpack(b) + if err != nil { + t.Fatalf("%d: Message.Unapck() = %v", i, err) + } + if !reflect.DeepEqual(got, want) { + t.Errorf("%d: Message.AppendPack/Unpack() roundtrip: got = %+v, want = %+v", i, &got, &want) } } } @@ -149,11 +317,11 @@ func TestSkipAll(t *testing.T) { msg := largeTestMsg() buf, err := msg.Pack() if err != nil { - t.Fatal("Packing large test message:", err) + t.Fatal("Message.Pack() =", err) } var p Parser if _, err := p.Start(buf); err != nil { - t.Fatal(err) + t.Fatal("Parser.Start(non-nil) =", err) } tests := []struct { @@ -168,12 +336,75 @@ func TestSkipAll(t *testing.T) { for _, test := range tests { for i := 1; i <= 3; i++ { if err := test.f(); err != nil { - t.Errorf("Call #%d to %s(): %v", i, test.name, err) + t.Errorf("%d: Parser.%s() = %v", i, test.name, err) } } } } +func TestSkipEach(t *testing.T) { + msg := smallTestMsg() + + buf, err := msg.Pack() + if err != nil { + t.Fatal("Message.Pack() =", err) + } + var p Parser + if _, err := p.Start(buf); err != nil { + t.Fatal("Parser.Start(non-nil) =", err) + } + + tests := []struct { + name string + f func() error + }{ + {"SkipQuestion", p.SkipQuestion}, + {"SkipAnswer", p.SkipAnswer}, + {"SkipAuthority", p.SkipAuthority}, + {"SkipAdditional", p.SkipAdditional}, + } + for _, test := range tests { + if err := test.f(); err != nil { + t.Errorf("first Parser.%s() = %v, want = nil", test.name, err) + } + if err := test.f(); err != ErrSectionDone { + t.Errorf("second Parser.%s() = %v, want = %v", test.name, err, ErrSectionDone) + } + } +} + +func TestSkipAfterRead(t *testing.T) { + msg := smallTestMsg() + + buf, err := msg.Pack() + if err != nil { + t.Fatal("Message.Pack() =", err) + } + var p Parser + if _, err := p.Start(buf); err != nil { + t.Fatal("Parser.Srart(non-nil) =", err) + } + + tests := []struct { + name string + skip func() error + read func() error + }{ + {"Question", p.SkipQuestion, func() error { _, err := p.Question(); return err }}, + {"Answer", p.SkipAnswer, func() error { _, err := p.Answer(); return err }}, + {"Authority", p.SkipAuthority, func() error { _, err := p.Authority(); return err }}, + {"Additional", p.SkipAdditional, func() error { _, err := p.Additional(); return err }}, + } + for _, test := range tests { + if err := test.read(); err != nil { + t.Errorf("got Parser.%s() = _, %v, want = _, nil", test.name, err) + } + if err := test.skip(); err != ErrSectionDone { + t.Errorf("got Parser.Skip%s() = %v, want = %v", test.name, err, ErrSectionDone) + } + } +} + func TestSkipNotStarted(t *testing.T) { var p Parser @@ -188,7 +419,7 @@ func TestSkipNotStarted(t *testing.T) { } for _, test := range tests { if err := test.f(); err != ErrNotStarted { - t.Errorf("Got %s() = %v, want = %v", test.name, err, ErrNotStarted) + t.Errorf("got Parser.%s() = %v, want = %v", test.name, err, ErrNotStarted) } } } @@ -232,344 +463,854 @@ func TestTooManyRecords(t *testing.T) { for _, test := range tests { if _, got := test.msg.Pack(); got != test.want { - t.Errorf("Packing %d %s: got = %v, want = %v", recs, test.name, got, test.want) + t.Errorf("got Message.Pack() for %d %s = %v, want = %v", recs, test.name, got, test.want) } } } func TestVeryLongTxt(t *testing.T) { - want := &TXTResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", + want := Resource{ + ResourceHeader{ + Name: mustNewName("foo.bar.example.com."), Type: TypeTXT, Class: ClassINET, }, - Txt: loremIpsum, + &TXTResource{[]string{ + "", + "", + "foo bar", + "", + "www.example.com", + "www.example.com.", + strings.Repeat(".", 255), + }}, } - buf, err := packResource(make([]byte, 0, 8000), want, map[string]int{}) + buf, err := want.pack(make([]byte, 0, 8000), map[string]int{}, 0) if err != nil { - t.Fatal("Packing failed:", err) + t.Fatal("Resource.pack() =", err) } - var hdr ResourceHeader - off, err := hdr.unpack(buf, 0) + var got Resource + off, err := got.Header.unpack(buf, 0) if err != nil { - t.Fatal("Unpacking ResourceHeader failed:", err) + t.Fatal("ResourceHeader.unpack() =", err) } - got, n, err := unpackResource(buf, off, hdr) + body, n, err := unpackResourceBody(buf, off, got.Header) if err != nil { - t.Fatal("Unpacking failed:", err) + t.Fatal("unpackResourceBody() =", err) } + got.Body = body if n != len(buf) { - t.Errorf("Unpacked different amount than packed: got n = %d, want = %d", n, len(buf)) + t.Errorf("unpacked different amount than packed: got = %d, want = %d", n, len(buf)) } if !reflect.DeepEqual(got, want) { - t.Errorf("Got = %+v, want = %+v", got, want) + t.Errorf("Resource.pack/unpack() roundtrip: got = %#v, want = %#v", got, want) } } -func ExampleHeaderSearch() { +func TestTooLongTxt(t *testing.T) { + rb := TXTResource{[]string{strings.Repeat(".", 256)}} + if _, err := rb.pack(make([]byte, 0, 8000), map[string]int{}, 0); err != errStringTooLong { + t.Errorf("packing TXTResource with 256 character string: got err = %v, want = %v", err, errStringTooLong) + } +} + +func TestStartAppends(t *testing.T) { + buf := make([]byte, 2, 514) + wantBuf := []byte{4, 44} + copy(buf, wantBuf) + + b := NewBuilder(buf, Header{}) + b.EnableCompression() + + buf, err := b.Finish() + if err != nil { + t.Fatal("Builder.Finish() =", err) + } + if got, want := len(buf), headerLen+2; got != want { + t.Errorf("got len(buf) = %d, want = %d", got, want) + } + if string(buf[:2]) != string(wantBuf) { + t.Errorf("original data not preserved, got = %#v, want = %#v", buf[:2], wantBuf) + } +} + +func TestStartError(t *testing.T) { + tests := []struct { + name string + fn func(*Builder) error + }{ + {"Questions", func(b *Builder) error { return b.StartQuestions() }}, + {"Answers", func(b *Builder) error { return b.StartAnswers() }}, + {"Authorities", func(b *Builder) error { return b.StartAuthorities() }}, + {"Additionals", func(b *Builder) error { return b.StartAdditionals() }}, + } + + envs := []struct { + name string + fn func() *Builder + want error + }{ + {"sectionNotStarted", func() *Builder { return &Builder{section: sectionNotStarted} }, ErrNotStarted}, + {"sectionDone", func() *Builder { return &Builder{section: sectionDone} }, ErrSectionDone}, + } + + for _, env := range envs { + for _, test := range tests { + if got := test.fn(env.fn()); got != env.want { + t.Errorf("got Builder{%s}.Start%s() = %v, want = %v", env.name, test.name, got, env.want) + } + } + } +} + +func TestBuilderResourceError(t *testing.T) { + tests := []struct { + name string + fn func(*Builder) error + }{ + {"CNAMEResource", func(b *Builder) error { return b.CNAMEResource(ResourceHeader{}, CNAMEResource{}) }}, + {"MXResource", func(b *Builder) error { return b.MXResource(ResourceHeader{}, MXResource{}) }}, + {"NSResource", func(b *Builder) error { return b.NSResource(ResourceHeader{}, NSResource{}) }}, + {"PTRResource", func(b *Builder) error { return b.PTRResource(ResourceHeader{}, PTRResource{}) }}, + {"SOAResource", func(b *Builder) error { return b.SOAResource(ResourceHeader{}, SOAResource{}) }}, + {"TXTResource", func(b *Builder) error { return b.TXTResource(ResourceHeader{}, TXTResource{}) }}, + {"SRVResource", func(b *Builder) error { return b.SRVResource(ResourceHeader{}, SRVResource{}) }}, + {"AResource", func(b *Builder) error { return b.AResource(ResourceHeader{}, AResource{}) }}, + {"AAAAResource", func(b *Builder) error { return b.AAAAResource(ResourceHeader{}, AAAAResource{}) }}, + {"OPTResource", func(b *Builder) error { return b.OPTResource(ResourceHeader{}, OPTResource{}) }}, + } + + envs := []struct { + name string + fn func() *Builder + want error + }{ + {"sectionNotStarted", func() *Builder { return &Builder{section: sectionNotStarted} }, ErrNotStarted}, + {"sectionHeader", func() *Builder { return &Builder{section: sectionHeader} }, ErrNotStarted}, + {"sectionQuestions", func() *Builder { return &Builder{section: sectionQuestions} }, ErrNotStarted}, + {"sectionDone", func() *Builder { return &Builder{section: sectionDone} }, ErrSectionDone}, + } + + for _, env := range envs { + for _, test := range tests { + if got := test.fn(env.fn()); got != env.want { + t.Errorf("got Builder{%s}.%s() = %v, want = %v", env.name, test.name, got, env.want) + } + } + } +} + +func TestFinishError(t *testing.T) { + var b Builder + want := ErrNotStarted + if _, got := b.Finish(); got != want { + t.Errorf("got Builder.Finish() = %v, want = %v", got, want) + } +} + +func TestBuilder(t *testing.T) { + msg := largeTestMsg() + want, err := msg.Pack() + if err != nil { + t.Fatal("Message.Pack() =", err) + } + + b := NewBuilder(nil, msg.Header) + b.EnableCompression() + + if err := b.StartQuestions(); err != nil { + t.Fatal("Builder.StartQuestions() =", err) + } + for _, q := range msg.Questions { + if err := b.Question(q); err != nil { + t.Fatalf("Builder.Question(%#v) = %v", q, err) + } + } + + if err := b.StartAnswers(); err != nil { + t.Fatal("Builder.StartAnswers() =", err) + } + for _, a := range msg.Answers { + switch a.Header.Type { + case TypeA: + if err := b.AResource(a.Header, *a.Body.(*AResource)); err != nil { + t.Fatalf("Builder.AResource(%#v) = %v", a, err) + } + case TypeNS: + if err := b.NSResource(a.Header, *a.Body.(*NSResource)); err != nil { + t.Fatalf("Builder.NSResource(%#v) = %v", a, err) + } + case TypeCNAME: + if err := b.CNAMEResource(a.Header, *a.Body.(*CNAMEResource)); err != nil { + t.Fatalf("Builder.CNAMEResource(%#v) = %v", a, err) + } + case TypeSOA: + if err := b.SOAResource(a.Header, *a.Body.(*SOAResource)); err != nil { + t.Fatalf("Builder.SOAResource(%#v) = %v", a, err) + } + case TypePTR: + if err := b.PTRResource(a.Header, *a.Body.(*PTRResource)); err != nil { + t.Fatalf("Builder.PTRResource(%#v) = %v", a, err) + } + case TypeMX: + if err := b.MXResource(a.Header, *a.Body.(*MXResource)); err != nil { + t.Fatalf("Builder.MXResource(%#v) = %v", a, err) + } + case TypeTXT: + if err := b.TXTResource(a.Header, *a.Body.(*TXTResource)); err != nil { + t.Fatalf("Builder.TXTResource(%#v) = %v", a, err) + } + case TypeAAAA: + if err := b.AAAAResource(a.Header, *a.Body.(*AAAAResource)); err != nil { + t.Fatalf("Builder.AAAAResource(%#v) = %v", a, err) + } + case TypeSRV: + if err := b.SRVResource(a.Header, *a.Body.(*SRVResource)); err != nil { + t.Fatalf("Builder.SRVResource(%#v) = %v", a, err) + } + } + } + + if err := b.StartAuthorities(); err != nil { + t.Fatal("Builder.StartAuthorities() =", err) + } + for _, a := range msg.Authorities { + if err := b.NSResource(a.Header, *a.Body.(*NSResource)); err != nil { + t.Fatalf("Builder.NSResource(%#v) = %v", a, err) + } + } + + if err := b.StartAdditionals(); err != nil { + t.Fatal("Builder.StartAdditionals() =", err) + } + for _, a := range msg.Additionals { + switch a.Body.(type) { + case *TXTResource: + if err := b.TXTResource(a.Header, *a.Body.(*TXTResource)); err != nil { + t.Fatalf("Builder.TXTResource(%#v) = %v", a, err) + } + case *OPTResource: + if err := b.OPTResource(a.Header, *a.Body.(*OPTResource)); err != nil { + t.Fatalf("Builder.OPTResource(%#v) = %v", a, err) + } + } + } + + got, err := b.Finish() + if err != nil { + t.Fatal("Builder.Finish() =", err) + } + if !bytes.Equal(got, want) { + t.Fatalf("got from Builder.Finish() = %#v\nwant = %#v", got, want) + } +} + +func TestResourcePack(t *testing.T) { + for _, tt := range []struct { + m Message + err error + }{ + { + Message{ + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Answers: []Resource{{ResourceHeader{}, nil}}, + }, + &nestedError{"packing Answer", errNilResouceBody}, + }, + { + Message{ + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Authorities: []Resource{{ResourceHeader{}, (*NSResource)(nil)}}, + }, + &nestedError{"packing Authority", + &nestedError{"ResourceHeader", + &nestedError{"Name", errNonCanonicalName}, + }, + }, + }, + { + Message{ + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeA, + Class: ClassINET, + }, + }, + Additionals: []Resource{{ResourceHeader{}, nil}}, + }, + &nestedError{"packing Additional", errNilResouceBody}, + }, + } { + _, err := tt.m.Pack() + if !reflect.DeepEqual(err, tt.err) { + t.Errorf("got Message{%v}.Pack() = %v, want %v", tt.m, err, tt.err) + } + } +} + +func TestOptionPackUnpack(t *testing.T) { + for _, tt := range []struct { + name string + w []byte // wire format of m.Additionals + m Message + dnssecOK bool + extRCode RCode + }{ + { + name: "without EDNS(0) options", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xfe, 0x00, 0x80, + 0x00, 0x00, 0x00, + }, + m: Message{ + Header: Header{RCode: RCodeFormatError}, + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xfe0|RCodeFormatError, true), + &OPTResource{}, + }, + }, + }, + dnssecOK: true, + extRCode: 0xfe0 | RCodeFormatError, + }, + { + name: "with EDNS(0) options", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, 0x00, + 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x02, 0x00, + 0x00, 0x00, 0x0b, 0x00, 0x02, 0x12, 0x34, + }, + m: Message{ + Header: Header{RCode: RCodeServerFailure}, + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeServerFailure, false), + &OPTResource{ + Options: []Option{ + { + Code: 12, // see RFC 7828 + Data: []byte{0x00, 0x00}, + }, + { + Code: 11, // see RFC 7830 + Data: []byte{0x12, 0x34}, + }, + }, + }, + }, + }, + }, + dnssecOK: false, + extRCode: 0xff0 | RCodeServerFailure, + }, + { + // Containing multiple OPT resources in a + // message is invalid, but it's necessary for + // protocol conformance testing. + name: "with multiple OPT resources", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, 0x00, + 0x00, 0x00, 0x06, 0x00, 0x0b, 0x00, 0x02, 0x12, + 0x34, 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, + 0x00, 0x00, 0x00, 0x06, 0x00, 0x0c, 0x00, 0x02, + 0x00, 0x00, + }, + m: Message{ + Header: Header{RCode: RCodeNameError}, + Questions: []Question{ + { + Name: mustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeNameError, false), + &OPTResource{ + Options: []Option{ + { + Code: 11, // see RFC 7830 + Data: []byte{0x12, 0x34}, + }, + }, + }, + }, + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeNameError, false), + &OPTResource{ + Options: []Option{ + { + Code: 12, // see RFC 7828 + Data: []byte{0x00, 0x00}, + }, + }, + }, + }, + }, + }, + }, + } { + w, err := tt.m.Pack() + if err != nil { + t.Errorf("Message.Pack() for %s = %v", tt.name, err) + continue + } + if !bytes.Equal(w[len(w)-len(tt.w):], tt.w) { + t.Errorf("got Message.Pack() for %s = %#v, want %#v", tt.name, w[len(w)-len(tt.w):], tt.w) + continue + } + var m Message + if err := m.Unpack(w); err != nil { + t.Errorf("Message.Unpack() for %s = %v", tt.name, err) + continue + } + if !reflect.DeepEqual(m.Additionals, tt.m.Additionals) { + t.Errorf("got Message.Pack/Unpack() roundtrip for %s = %+v, want %+v", tt.name, m, tt.m) + continue + } + } +} + +func benchmarkParsingSetup() ([]byte, error) { + name := mustNewName("foo.bar.example.com.") msg := Message{ Header: Header{Response: true, Authoritative: true}, Questions: []Question{ { - Name: "foo.bar.example.com.", - Type: TypeA, - Class: ClassINET, - }, - { - Name: "bar.example.com.", + Name: name, Type: TypeA, Class: ClassINET, }, }, Answers: []Resource{ - &AResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeA, + { + ResourceHeader{ + Name: name, Class: ClassINET, }, - A: [4]byte{127, 0, 0, 1}, + &AResource{[4]byte{}}, }, - &AResource{ - ResourceHeader: ResourceHeader{ - Name: "bar.example.com.", - Type: TypeA, + { + ResourceHeader{ + Name: name, Class: ClassINET, }, - A: [4]byte{127, 0, 0, 2}, + &AAAAResource{[16]byte{}}, + }, + { + ResourceHeader{ + Name: name, + Class: ClassINET, + }, + &CNAMEResource{name}, + }, + { + ResourceHeader{ + Name: name, + Class: ClassINET, + }, + &NSResource{name}, }, }, } buf, err := msg.Pack() if err != nil { - panic(err) + return nil, fmt.Errorf("Message.Pack() = %v", err) } + return buf, nil +} - wantName := "bar.example.com." - +func benchmarkParsing(tb testing.TB, buf []byte) { var p Parser if _, err := p.Start(buf); err != nil { - panic(err) + tb.Fatal("Parser.Start(non-nil) =", err) } for { - q, err := p.Question() + _, err := p.Question() if err == ErrSectionDone { break } if err != nil { - panic(err) + tb.Fatal("Parser.Question() =", err) } - - if q.Name != wantName { - continue - } - - fmt.Println("Found question for name", wantName) - if err := p.SkipAllQuestions(); err != nil { - panic(err) - } - break } - var gotIPs []net.IP for { h, err := p.AnswerHeader() if err == ErrSectionDone { break } if err != nil { - panic(err) + tb.Fatal("Parser.AnswerHeader() =", err) } - if (h.Type != TypeA && h.Type != TypeAAAA) || h.Class != ClassINET { - continue - } - - if !strings.EqualFold(h.Name, wantName) { - if err := p.SkipAnswer(); err != nil { - panic(err) + switch h.Type { + case TypeA: + if _, err := p.AResource(); err != nil { + tb.Fatal("Parser.AResource() =", err) + } + case TypeAAAA: + if _, err := p.AAAAResource(); err != nil { + tb.Fatal("Parser.AAAAResource() =", err) + } + case TypeCNAME: + if _, err := p.CNAMEResource(); err != nil { + tb.Fatal("Parser.CNAMEResource() =", err) + } + case TypeNS: + if _, err := p.NSResource(); err != nil { + tb.Fatal("Parser.NSResource() =", err) + } + case TypeOPT: + if _, err := p.OPTResource(); err != nil { + tb.Fatal("Parser.OPTResource() =", err) } - continue - } - a, err := p.Answer() - if err != nil { - panic(err) - } - - switch r := a.(type) { default: - panic(fmt.Sprintf("unknown type: %T", r)) - case *AResource: - gotIPs = append(gotIPs, r.A[:]) - case *AAAAResource: - gotIPs = append(gotIPs, r.AAAA[:]) + tb.Fatalf("got unknown type: %T", h) } } - - fmt.Printf("Found A/AAAA records for name %s: %v\n", wantName, gotIPs) - - // Output: - // Found question for name bar.example.com. - // Found A/AAAA records for name bar.example.com.: [127.0.0.2] } -func largeTestMsg() Message { +func BenchmarkParsing(b *testing.B) { + buf, err := benchmarkParsingSetup() + if err != nil { + b.Fatal(err) + } + + b.ReportAllocs() + for i := 0; i < b.N; i++ { + benchmarkParsing(b, buf) + } +} + +func TestParsingAllocs(t *testing.T) { + buf, err := benchmarkParsingSetup() + if err != nil { + t.Fatal(err) + } + + if allocs := testing.AllocsPerRun(100, func() { benchmarkParsing(t, buf) }); allocs > 0.5 { + t.Errorf("allocations during parsing: got = %f, want ~0", allocs) + } +} + +func benchmarkBuildingSetup() (Name, []byte) { + name := mustNewName("foo.bar.example.com.") + buf := make([]byte, 0, packStartingCap) + return name, buf +} + +func benchmarkBuilding(tb testing.TB, name Name, buf []byte) { + bld := NewBuilder(buf, Header{Response: true, Authoritative: true}) + + if err := bld.StartQuestions(); err != nil { + tb.Fatal("Builder.StartQuestions() =", err) + } + q := Question{ + Name: name, + Type: TypeA, + Class: ClassINET, + } + if err := bld.Question(q); err != nil { + tb.Fatalf("Builder.Question(%+v) = %v", q, err) + } + + hdr := ResourceHeader{ + Name: name, + Class: ClassINET, + } + if err := bld.StartAnswers(); err != nil { + tb.Fatal("Builder.StartQuestions() =", err) + } + + ar := AResource{[4]byte{}} + if err := bld.AResource(hdr, ar); err != nil { + tb.Fatalf("Builder.AResource(%+v, %+v) = %v", hdr, ar, err) + } + + aaar := AAAAResource{[16]byte{}} + if err := bld.AAAAResource(hdr, aaar); err != nil { + tb.Fatalf("Builder.AAAAResource(%+v, %+v) = %v", hdr, aaar, err) + } + + cnr := CNAMEResource{name} + if err := bld.CNAMEResource(hdr, cnr); err != nil { + tb.Fatalf("Builder.CNAMEResource(%+v, %+v) = %v", hdr, cnr, err) + } + + nsr := NSResource{name} + if err := bld.NSResource(hdr, nsr); err != nil { + tb.Fatalf("Builder.NSResource(%+v, %+v) = %v", hdr, nsr, err) + } + + extrc := 0xfe0 | RCodeNotImplemented + if err := (&hdr).SetEDNS0(4096, extrc, true); err != nil { + tb.Fatalf("ResourceHeader.SetEDNS0(4096, %#x, true) = %v", extrc, err) + } + optr := OPTResource{} + if err := bld.OPTResource(hdr, optr); err != nil { + tb.Fatalf("Builder.OPTResource(%+v, %+v) = %v", hdr, optr, err) + } + + if _, err := bld.Finish(); err != nil { + tb.Fatal("Builder.Finish() =", err) + } +} + +func BenchmarkBuilding(b *testing.B) { + name, buf := benchmarkBuildingSetup() + b.ReportAllocs() + for i := 0; i < b.N; i++ { + benchmarkBuilding(b, name, buf) + } +} + +func TestBuildingAllocs(t *testing.T) { + name, buf := benchmarkBuildingSetup() + if allocs := testing.AllocsPerRun(100, func() { benchmarkBuilding(t, name, buf) }); allocs > 0.5 { + t.Errorf("allocations during building: got = %f, want ~0", allocs) + } +} + +func smallTestMsg() Message { + name := mustNewName("example.com.") return Message{ Header: Header{Response: true, Authoritative: true}, Questions: []Question{ { - Name: "foo.bar.example.com.", + Name: name, Type: TypeA, Class: ClassINET, }, }, Answers: []Resource{ - &AResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", + { + ResourceHeader{ + Name: name, Type: TypeA, Class: ClassINET, }, - A: [4]byte{127, 0, 0, 1}, - }, - &AResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeA, - Class: ClassINET, - }, - A: [4]byte{127, 0, 0, 2}, + &AResource{[4]byte{127, 0, 0, 1}}, }, }, Authorities: []Resource{ - &NSResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeNS, + { + ResourceHeader{ + Name: name, + Type: TypeA, Class: ClassINET, }, - NS: "ns1.example.com.", - }, - &NSResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeNS, - Class: ClassINET, - }, - NS: "ns2.example.com.", + &AResource{[4]byte{127, 0, 0, 1}}, }, }, Additionals: []Resource{ - &TXTResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeTXT, + { + ResourceHeader{ + Name: name, + Type: TypeA, Class: ClassINET, }, - Txt: "So Long, and Thanks for All the Fish", - }, - &TXTResource{ - ResourceHeader: ResourceHeader{ - Name: "foo.bar.example.com.", - Type: TypeTXT, - Class: ClassINET, - }, - Txt: "Hamster Huey and the Gooey Kablooie", + &AResource{[4]byte{127, 0, 0, 1}}, }, }, } } -const loremIpsum = ` -Lorem ipsum dolor sit amet, nec enim antiopam id, an ullum choro -nonumes qui, pro eu debet honestatis mediocritatem. No alia enim eos, -magna signiferumque ex vis. Mei no aperiri dissentias, cu vel quas -regione. Malorum quaeque vim ut, eum cu semper aliquid invidunt, ei -nam ipsum assentior. +func BenchmarkPack(b *testing.B) { + msg := largeTestMsg() -Nostrum appellantur usu no, vis ex probatus adipiscing. Cu usu illum -facilis eleifend. Iusto conceptam complectitur vim id. Tale omnesque -no usu, ei oblique sadipscing vim. At nullam voluptua usu, mei laudem -reformidans et. Qui ei eros porro reformidans, ius suas veritus -torquatos ex. Mea te facer alterum consequat. + b.ReportAllocs() -Soleat torquatos democritum sed et, no mea congue appareat, facer -aliquam nec in. Has te ipsum tritani. At justo dicta option nec, movet -phaedrum ad nam. Ea detracto verterem liberavisse has, delectus -suscipiantur in mei. Ex nam meliore complectitur. Ut nam omnis -honestatis quaerendum, ea mea nihil affert detracto, ad vix rebum -mollis. + for i := 0; i < b.N; i++ { + if _, err := msg.Pack(); err != nil { + b.Fatal("Message.Pack() =", err) + } + } +} -Ut epicurei praesent neglegentur pri, prima fuisset intellegebat ad -vim. An habemus comprehensam usu, at enim dignissim pro. Eam reque -vivendum adipisci ea. Vel ne odio choro minimum. Sea admodum -dissentiet ex. Mundi tamquam evertitur ius cu. Homero postea iisque ut -pro, vel ne saepe senserit consetetur. +func BenchmarkAppendPack(b *testing.B) { + msg := largeTestMsg() + buf := make([]byte, 0, packStartingCap) -Nulla utamur facilisis ius ea, in viderer diceret pertinax eum. Mei no -enim quodsi facilisi, ex sed aeterno appareat mediocritatem, eum -sententiae deterruisset ut. At suas timeam euismod cum, offendit -appareat interpretaris ne vix. Vel ea civibus albucius, ex vim quidam -accusata intellegebat, noluisse instructior sea id. Nec te nonumes -habemus appellantur, quis dignissim vituperata eu nam. + b.ReportAllocs() -At vix apeirian patrioque vituperatoribus, an usu agam assum. Debet -iisque an mea. Per eu dicant ponderum accommodare. Pri alienum -placerat senserit an, ne eum ferri abhorreant vituperatoribus. Ut mea -eligendi disputationi. Ius no tation everti impedit, ei magna quidam -mediocritatem pri. + for i := 0; i < b.N; i++ { + if _, err := msg.AppendPack(buf[:0]); err != nil { + b.Fatal("Message.AppendPack() = ", err) + } + } +} -Legendos perpetua iracundia ne usu, no ius ullum epicurei intellegam, -ad modus epicuri lucilius eam. In unum quaerendum usu. Ne diam paulo -has, ea veri virtute sed. Alia honestatis conclusionemque mea eu, ut -iudico albucius his. - -Usu essent probatus eu, sed omnis dolor delicatissimi ex. No qui augue -dissentias dissentiet. Laudem recteque no usu, vel an velit noluisse, -an sed utinam eirmod appetere. Ne mea fuisset inimicus ocurreret. At -vis dicant abhorreant, utinam forensibus nec ne, mei te docendi -consequat. Brute inermis persecuti cum id. Ut ipsum munere propriae -usu, dicit graeco disputando id has. - -Eros dolore quaerendum nam ei. Timeam ornatus inciderint pro id. Nec -torquatos sadipscing ei, ancillae molestie per in. Malis principes duo -ea, usu liber postulant ei. - -Graece timeam voluptatibus eu eam. Alia probatus quo no, ea scripta -feugiat duo. Congue option meliore ex qui, noster invenire appellantur -ea vel. Eu exerci legendos vel. Consetetur repudiandae vim ut. Vix an -probo minimum, et nam illud falli tempor. - -Cum dico signiferumque eu. Sed ut regione maiorum, id veritus insolens -tacimates vix. Eu mel sint tamquam lucilius, duo no oporteat -tacimates. Atqui augue concludaturque vix ei, id mel utroque menandri. - -Ad oratio blandit aliquando pro. Vis et dolorum rationibus -philosophia, ad cum nulla molestie. Hinc fuisset adversarium eum et, -ne qui nisl verear saperet, vel te quaestio forensibus. Per odio -option delenit an. Alii placerat has no, in pri nihil platonem -cotidieque. Est ut elit copiosae scaevola, debet tollit maluisset sea -an. - -Te sea hinc debet pericula, liber ridens fabulas cu sed, quem mutat -accusam mea et. Elitr labitur albucius et pri, an labore feugait mel. -Velit zril melius usu ea. Ad stet putent interpretaris qui. Mel no -error volumus scripserit. In pro paulo iudico, quo ei dolorem -verterem, affert fabellas dissentiet ea vix. - -Vis quot deserunt te. Error aliquid detraxit eu usu, vis alia eruditi -salutatus cu. Est nostrud bonorum an, ei usu alii salutatus. Vel at -nisl primis, eum ex aperiri noluisse reformidans. Ad veri velit -utroque vis, ex equidem detraxit temporibus has. - -Inermis appareat usu ne. Eros placerat periculis mea ad, in dictas -pericula pro. Errem postulant at usu, ea nec amet ornatus mentitum. Ad -mazim graeco eum, vel ex percipit volutpat iudicabit, sit ne delicata -interesset. Mel sapientem prodesset abhorreant et, oblique suscipit -eam id. - -An maluisset disputando mea, vidit mnesarchum pri et. Malis insolens -inciderint no sea. Ea persius maluisset vix, ne vim appellantur -instructior, consul quidam definiebas pri id. Cum integre feugiat -pericula in, ex sed persius similique, mel ne natum dicit percipitur. - -Primis discere ne pri, errem putent definitionem at vis. Ei mel dolore -neglegentur, mei tincidunt percipitur ei. Pro ad simul integre -rationibus. Eu vel alii honestatis definitiones, mea no nonumy -reprehendunt. - -Dicta appareat legendos est cu. Eu vel congue dicunt omittam, no vix -adhuc minimum constituam, quot noluisse id mel. Eu quot sale mutat -duo, ex nisl munere invenire duo. Ne nec ullum utamur. Pro alterum -debitis nostrum no, ut vel aliquid vivendo. - -Aliquip fierent praesent quo ne, id sit audiam recusabo delicatissimi. -Usu postulant incorrupte cu. At pro dicit tibique intellegam, cibo -dolore impedit id eam, et aeque feugait assentior has. Quando sensibus -nec ex. Possit sensibus pri ad, unum mutat periculis cu vix. - -Mundi tibique vix te, duo simul partiendo qualisque id, est at vidit -sonet tempor. No per solet aeterno deseruisse. Petentium salutandi -definiebas pri cu. Munere vivendum est in. Ei justo congue eligendi -vis, modus offendit omittantur te mel. - -Integre voluptaria in qui, sit habemus tractatos constituam no. Utinam -melius conceptam est ne, quo in minimum apeirian delicata, ut ius -porro recusabo. Dicant expetenda vix no, ludus scripserit sed ex, eu -his modo nostro. Ut etiam sonet his, quodsi inciderint philosophia te -per. Nullam lobortis eu cum, vix an sonet efficiendi repudiandae. Vis -ad idque fabellas intellegebat. - -Eum commodo senserit conclusionemque ex. Sed forensibus sadipscing ut, -mei in facer delicata periculis, sea ne hinc putent cetero. Nec ne -alia corpora invenire, alia prima soleat te cum. Eleifend posidonium -nam at. - -Dolorum indoctum cu quo, ex dolor legendos recteque eam, cu pri zril -discere. Nec civibus officiis dissentiunt ex, est te liber ludus -elaboraret. Cum ea fabellas invenire. Ex vim nostrud eripuit -comprehensam, nam te inermis delectus, saepe inermis senserit. -` +func largeTestMsg() Message { + name := mustNewName("foo.bar.example.com.") + return Message{ + Header: Header{Response: true, Authoritative: true}, + Questions: []Question{ + { + Name: name, + Type: TypeA, + Class: ClassINET, + }, + }, + Answers: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 1}}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 2}}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeAAAA, + Class: ClassINET, + }, + &AAAAResource{[16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeCNAME, + Class: ClassINET, + }, + &CNAMEResource{mustNewName("alias.example.com.")}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeSOA, + Class: ClassINET, + }, + &SOAResource{ + NS: mustNewName("ns1.example.com."), + MBox: mustNewName("mb.example.com."), + Serial: 1, + Refresh: 2, + Retry: 3, + Expire: 4, + MinTTL: 5, + }, + }, + { + ResourceHeader{ + Name: name, + Type: TypePTR, + Class: ClassINET, + }, + &PTRResource{mustNewName("ptr.example.com.")}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeMX, + Class: ClassINET, + }, + &MXResource{ + 7, + mustNewName("mx.example.com."), + }, + }, + { + ResourceHeader{ + Name: name, + Type: TypeSRV, + Class: ClassINET, + }, + &SRVResource{ + 8, + 9, + 11, + mustNewName("srv.example.com."), + }, + }, + }, + Authorities: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeNS, + Class: ClassINET, + }, + &NSResource{mustNewName("ns1.example.com.")}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeNS, + Class: ClassINET, + }, + &NSResource{mustNewName("ns2.example.com.")}, + }, + }, + Additionals: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeTXT, + Class: ClassINET, + }, + &TXTResource{[]string{"So Long, and Thanks for All the Fish"}}, + }, + { + ResourceHeader{ + Name: name, + Type: TypeTXT, + Class: ClassINET, + }, + &TXTResource{[]string{"Hamster Huey and the Gooey Kablooie"}}, + }, + { + mustEDNS0ResourceHeader(4096, 0xfe0|RCodeSuccess, false), + &OPTResource{ + Options: []Option{ + { + Code: 10, // see RFC 7873 + Data: []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef}, + }, + }, + }, + }, + }, + } +} diff --git a/vendor/golang.org/x/net/html/atom/gen.go b/vendor/golang.org/x/net/html/atom/gen.go index 6bfa8660..5d052781 100644 --- a/vendor/golang.org/x/net/html/atom/gen.go +++ b/vendor/golang.org/x/net/html/atom/gen.go @@ -4,17 +4,17 @@ // +build ignore +//go:generate go run gen.go +//go:generate go run gen.go -test + package main -// This program generates table.go and table_test.go. -// Invoke as -// -// go run gen.go |gofmt >table.go -// go run gen.go -test |gofmt >table_test.go - import ( + "bytes" "flag" "fmt" + "go/format" + "io/ioutil" "math/rand" "os" "sort" @@ -42,6 +42,18 @@ func identifier(s string) string { var test = flag.Bool("test", false, "generate table_test.go") +func genFile(name string, buf *bytes.Buffer) { + b, err := format.Source(buf.Bytes()) + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + if err := ioutil.WriteFile(name, b, 0644); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + func main() { flag.Parse() @@ -52,32 +64,31 @@ func main() { all = append(all, extra...) sort.Strings(all) - if *test { - fmt.Printf("// generated by go run gen.go -test; DO NOT EDIT\n\n") - fmt.Printf("package atom\n\n") - fmt.Printf("var testAtomList = []string{\n") - for _, s := range all { - fmt.Printf("\t%q,\n", s) - } - fmt.Printf("}\n") - return - } - // uniq - lists have dups - // compute max len too - maxLen := 0 w := 0 for _, s := range all { if w == 0 || all[w-1] != s { - if maxLen < len(s) { - maxLen = len(s) - } all[w] = s w++ } } all = all[:w] + if *test { + var buf bytes.Buffer + fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n") + fmt.Fprintln(&buf, "//go:generate go run gen.go -test\n") + fmt.Fprintln(&buf, "package atom\n") + fmt.Fprintln(&buf, "var testAtomList = []string{") + for _, s := range all { + fmt.Fprintf(&buf, "\t%q,\n", s) + } + fmt.Fprintln(&buf, "}") + + genFile("table_test.go", &buf) + return + } + // Find hash that minimizes table size. var best *table for i := 0; i < 1000000; i++ { @@ -163,36 +174,46 @@ func main() { atom[s] = uint32(off<<8 | len(s)) } + var buf bytes.Buffer // Generate the Go code. - fmt.Printf("// generated by go run gen.go; DO NOT EDIT\n\n") - fmt.Printf("package atom\n\nconst (\n") + fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n") + fmt.Fprintln(&buf, "//go:generate go run gen.go\n") + fmt.Fprintln(&buf, "package atom\n\nconst (") + + // compute max len + maxLen := 0 for _, s := range all { - fmt.Printf("\t%s Atom = %#x\n", identifier(s), atom[s]) + if maxLen < len(s) { + maxLen = len(s) + } + fmt.Fprintf(&buf, "\t%s Atom = %#x\n", identifier(s), atom[s]) } - fmt.Printf(")\n\n") + fmt.Fprintln(&buf, ")\n") - fmt.Printf("const hash0 = %#x\n\n", best.h0) - fmt.Printf("const maxAtomLen = %d\n\n", maxLen) + fmt.Fprintf(&buf, "const hash0 = %#x\n\n", best.h0) + fmt.Fprintf(&buf, "const maxAtomLen = %d\n\n", maxLen) - fmt.Printf("var table = [1<<%d]Atom{\n", best.k) + fmt.Fprintf(&buf, "var table = [1<<%d]Atom{\n", best.k) for i, s := range best.tab { if s == "" { continue } - fmt.Printf("\t%#x: %#x, // %s\n", i, atom[s], s) + fmt.Fprintf(&buf, "\t%#x: %#x, // %s\n", i, atom[s], s) } - fmt.Printf("}\n") + fmt.Fprintf(&buf, "}\n") datasize := (1 << best.k) * 4 - fmt.Printf("const atomText =\n") + fmt.Fprintln(&buf, "const atomText =") textsize := len(text) for len(text) > 60 { - fmt.Printf("\t%q +\n", text[:60]) + fmt.Fprintf(&buf, "\t%q +\n", text[:60]) text = text[60:] } - fmt.Printf("\t%q\n\n", text) + fmt.Fprintf(&buf, "\t%q\n\n", text) - fmt.Fprintf(os.Stderr, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize) + genFile("table.go", &buf) + + fmt.Fprintf(os.Stdout, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize) } type byLen []string @@ -285,8 +306,10 @@ func (t *table) push(i uint32, depth int) bool { // The lists of element names and attribute keys were taken from // https://html.spec.whatwg.org/multipage/indices.html#index -// as of the "HTML Living Standard - Last Updated 21 February 2015" version. +// as of the "HTML Living Standard - Last Updated 16 April 2018" version. +// "command", "keygen" and "menuitem" have been removed from the spec, +// but are kept here for backwards compatibility. var elements = []string{ "a", "abbr", @@ -349,6 +372,7 @@ var elements = []string{ "legend", "li", "link", + "main", "map", "mark", "menu", @@ -364,6 +388,7 @@ var elements = []string{ "output", "p", "param", + "picture", "pre", "progress", "q", @@ -375,6 +400,7 @@ var elements = []string{ "script", "section", "select", + "slot", "small", "source", "span", @@ -403,14 +429,21 @@ var elements = []string{ } // https://html.spec.whatwg.org/multipage/indices.html#attributes-3 - +// +// "challenge", "command", "contextmenu", "dropzone", "icon", "keytype", "mediagroup", +// "radiogroup", "spellcheck", "scoped", "seamless", "sortable" and "sorted" have been removed from the spec, +// but are kept here for backwards compatibility. var attributes = []string{ "abbr", "accept", "accept-charset", "accesskey", "action", + "allowfullscreen", + "allowpaymentrequest", + "allowusermedia", "alt", + "as", "async", "autocomplete", "autofocus", @@ -420,6 +453,7 @@ var attributes = []string{ "checked", "cite", "class", + "color", "cols", "colspan", "command", @@ -457,6 +491,8 @@ var attributes = []string{ "icon", "id", "inputmode", + "integrity", + "is", "ismap", "itemid", "itemprop", @@ -481,16 +517,20 @@ var attributes = []string{ "multiple", "muted", "name", + "nomodule", + "nonce", "novalidate", "open", "optimum", "pattern", "ping", "placeholder", + "playsinline", "poster", "preload", "radiogroup", "readonly", + "referrerpolicy", "rel", "required", "reversed", @@ -507,10 +547,13 @@ var attributes = []string{ "sizes", "sortable", "sorted", + "slot", "span", + "spellcheck", "src", "srcdoc", "srclang", + "srcset", "start", "step", "style", @@ -520,16 +563,22 @@ var attributes = []string{ "translate", "type", "typemustmatch", + "updateviacache", "usemap", "value", "width", + "workertype", "wrap", } +// "onautocomplete", "onautocompleteerror", "onmousewheel", +// "onshow" and "onsort" have been removed from the spec, +// but are kept here for backwards compatibility. var eventHandlers = []string{ "onabort", "onautocomplete", "onautocompleteerror", + "onauxclick", "onafterprint", "onbeforeprint", "onbeforeunload", @@ -541,11 +590,14 @@ var eventHandlers = []string{ "onclick", "onclose", "oncontextmenu", + "oncopy", "oncuechange", + "oncut", "ondblclick", "ondrag", "ondragend", "ondragenter", + "ondragexit", "ondragleave", "ondragover", "ondragstart", @@ -565,18 +617,24 @@ var eventHandlers = []string{ "onload", "onloadeddata", "onloadedmetadata", + "onloadend", "onloadstart", "onmessage", + "onmessageerror", "onmousedown", + "onmouseenter", + "onmouseleave", "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onmousewheel", + "onwheel", "onoffline", "ononline", "onpagehide", "onpageshow", + "onpaste", "onpause", "onplay", "onplaying", @@ -585,7 +643,9 @@ var eventHandlers = []string{ "onratechange", "onreset", "onresize", + "onrejectionhandled", "onscroll", + "onsecuritypolicyviolation", "onseeked", "onseeking", "onselect", @@ -597,6 +657,7 @@ var eventHandlers = []string{ "onsuspend", "ontimeupdate", "ontoggle", + "onunhandledrejection", "onunload", "onvolumechange", "onwaiting", @@ -604,6 +665,7 @@ var eventHandlers = []string{ // extra are ad-hoc values not covered by any of the lists above. var extra = []string{ + "acronym", "align", "annotation", "annotation-xml", @@ -639,6 +701,8 @@ var extra = []string{ "plaintext", "prompt", "public", + "rb", + "rtc", "spacer", "strike", "svg", diff --git a/vendor/golang.org/x/net/html/atom/table.go b/vendor/golang.org/x/net/html/atom/table.go index 2605ba31..2a938864 100644 --- a/vendor/golang.org/x/net/html/atom/table.go +++ b/vendor/golang.org/x/net/html/atom/table.go @@ -1,713 +1,783 @@ -// generated by go run gen.go; DO NOT EDIT +// Code generated by go generate gen.go; DO NOT EDIT. + +//go:generate go run gen.go package atom const ( - A Atom = 0x1 - Abbr Atom = 0x4 - Accept Atom = 0x2106 - AcceptCharset Atom = 0x210e - Accesskey Atom = 0x3309 - Action Atom = 0x1f606 - Address Atom = 0x4f307 - Align Atom = 0x1105 - Alt Atom = 0x4503 - Annotation Atom = 0x1670a - AnnotationXml Atom = 0x1670e - Applet Atom = 0x2b306 - Area Atom = 0x2fa04 - Article Atom = 0x38807 - Aside Atom = 0x8305 - Async Atom = 0x7b05 - Audio Atom = 0xa605 - Autocomplete Atom = 0x1fc0c - Autofocus Atom = 0xb309 - Autoplay Atom = 0xce08 - B Atom = 0x101 - Base Atom = 0xd604 - Basefont Atom = 0xd608 - Bdi Atom = 0x1a03 - Bdo Atom = 0xe703 - Bgsound Atom = 0x11807 - Big Atom = 0x12403 - Blink Atom = 0x12705 - Blockquote Atom = 0x12c0a - Body Atom = 0x2f04 - Br Atom = 0x202 - Button Atom = 0x13606 - Canvas Atom = 0x7f06 - Caption Atom = 0x1bb07 - Center Atom = 0x5b506 - Challenge Atom = 0x21f09 - Charset Atom = 0x2807 - Checked Atom = 0x32807 - Cite Atom = 0x3c804 - Class Atom = 0x4de05 - Code Atom = 0x14904 - Col Atom = 0x15003 - Colgroup Atom = 0x15008 - Color Atom = 0x15d05 - Cols Atom = 0x16204 - Colspan Atom = 0x16207 - Command Atom = 0x17507 - Content Atom = 0x42307 - Contenteditable Atom = 0x4230f - Contextmenu Atom = 0x3310b - Controls Atom = 0x18808 - Coords Atom = 0x19406 - Crossorigin Atom = 0x19f0b - Data Atom = 0x44a04 - Datalist Atom = 0x44a08 - Datetime Atom = 0x23c08 - Dd Atom = 0x26702 - Default Atom = 0x8607 - Defer Atom = 0x14b05 - Del Atom = 0x3ef03 - Desc Atom = 0x4db04 - Details Atom = 0x4807 - Dfn Atom = 0x6103 - Dialog Atom = 0x1b06 - Dir Atom = 0x6903 - Dirname Atom = 0x6907 - Disabled Atom = 0x10c08 - Div Atom = 0x11303 - Dl Atom = 0x11e02 - Download Atom = 0x40008 - Draggable Atom = 0x17b09 - Dropzone Atom = 0x39108 - Dt Atom = 0x50902 - Em Atom = 0x6502 - Embed Atom = 0x6505 - Enctype Atom = 0x21107 - Face Atom = 0x5b304 - Fieldset Atom = 0x1b008 - Figcaption Atom = 0x1b80a - Figure Atom = 0x1cc06 - Font Atom = 0xda04 - Footer Atom = 0x8d06 - For Atom = 0x1d803 - ForeignObject Atom = 0x1d80d - Foreignobject Atom = 0x1e50d - Form Atom = 0x1f204 - Formaction Atom = 0x1f20a - Formenctype Atom = 0x20d0b - Formmethod Atom = 0x2280a - Formnovalidate Atom = 0x2320e - Formtarget Atom = 0x2470a - Frame Atom = 0x9a05 - Frameset Atom = 0x9a08 - H1 Atom = 0x26e02 - H2 Atom = 0x29402 - H3 Atom = 0x2a702 - H4 Atom = 0x2e902 - H5 Atom = 0x2f302 - H6 Atom = 0x50b02 - Head Atom = 0x2d504 - Header Atom = 0x2d506 - Headers Atom = 0x2d507 - Height Atom = 0x25106 - Hgroup Atom = 0x25906 - Hidden Atom = 0x26506 - High Atom = 0x26b04 - Hr Atom = 0x27002 - Href Atom = 0x27004 - Hreflang Atom = 0x27008 - Html Atom = 0x25504 - HttpEquiv Atom = 0x2780a - I Atom = 0x601 - Icon Atom = 0x42204 - Id Atom = 0x8502 - Iframe Atom = 0x29606 - Image Atom = 0x29c05 - Img Atom = 0x2a103 - Input Atom = 0x3e805 - Inputmode Atom = 0x3e809 - Ins Atom = 0x1a803 - Isindex Atom = 0x2a907 - Ismap Atom = 0x2b005 - Itemid Atom = 0x33c06 - Itemprop Atom = 0x3c908 - Itemref Atom = 0x5ad07 - Itemscope Atom = 0x2b909 - Itemtype Atom = 0x2c308 - Kbd Atom = 0x1903 - Keygen Atom = 0x3906 - Keytype Atom = 0x53707 - Kind Atom = 0x10904 - Label Atom = 0xf005 - Lang Atom = 0x27404 - Legend Atom = 0x18206 - Li Atom = 0x1202 - Link Atom = 0x12804 - List Atom = 0x44e04 - Listing Atom = 0x44e07 - Loop Atom = 0xf404 - Low Atom = 0x11f03 - Malignmark Atom = 0x100a - Manifest Atom = 0x5f108 - Map Atom = 0x2b203 - Mark Atom = 0x1604 - Marquee Atom = 0x2cb07 - Math Atom = 0x2d204 - Max Atom = 0x2e103 - Maxlength Atom = 0x2e109 - Media Atom = 0x6e05 - Mediagroup Atom = 0x6e0a - Menu Atom = 0x33804 - Menuitem Atom = 0x33808 - Meta Atom = 0x45d04 - Meter Atom = 0x24205 - Method Atom = 0x22c06 - Mglyph Atom = 0x2a206 - Mi Atom = 0x2eb02 - Min Atom = 0x2eb03 - Minlength Atom = 0x2eb09 - Mn Atom = 0x23502 - Mo Atom = 0x3ed02 - Ms Atom = 0x2bc02 - Mtext Atom = 0x2f505 - Multiple Atom = 0x30308 - Muted Atom = 0x30b05 - Name Atom = 0x6c04 - Nav Atom = 0x3e03 - Nobr Atom = 0x5704 - Noembed Atom = 0x6307 - Noframes Atom = 0x9808 - Noscript Atom = 0x3d208 - Novalidate Atom = 0x2360a - Object Atom = 0x1ec06 - Ol Atom = 0xc902 - Onabort Atom = 0x13a07 - Onafterprint Atom = 0x1c00c - Onautocomplete Atom = 0x1fa0e - Onautocompleteerror Atom = 0x1fa13 - Onbeforeprint Atom = 0x6040d - Onbeforeunload Atom = 0x4e70e - Onblur Atom = 0xaa06 - Oncancel Atom = 0xe908 - Oncanplay Atom = 0x28509 - Oncanplaythrough Atom = 0x28510 - Onchange Atom = 0x3a708 - Onclick Atom = 0x31007 - Onclose Atom = 0x31707 - Oncontextmenu Atom = 0x32f0d - Oncuechange Atom = 0x3420b - Ondblclick Atom = 0x34d0a - Ondrag Atom = 0x35706 - Ondragend Atom = 0x35709 - Ondragenter Atom = 0x3600b - Ondragleave Atom = 0x36b0b - Ondragover Atom = 0x3760a - Ondragstart Atom = 0x3800b - Ondrop Atom = 0x38f06 - Ondurationchange Atom = 0x39f10 - Onemptied Atom = 0x39609 - Onended Atom = 0x3af07 - Onerror Atom = 0x3b607 - Onfocus Atom = 0x3bd07 - Onhashchange Atom = 0x3da0c - Oninput Atom = 0x3e607 - Oninvalid Atom = 0x3f209 - Onkeydown Atom = 0x3fb09 - Onkeypress Atom = 0x4080a - Onkeyup Atom = 0x41807 - Onlanguagechange Atom = 0x43210 - Onload Atom = 0x44206 - Onloadeddata Atom = 0x4420c - Onloadedmetadata Atom = 0x45510 - Onloadstart Atom = 0x46b0b - Onmessage Atom = 0x47609 - Onmousedown Atom = 0x47f0b - Onmousemove Atom = 0x48a0b - Onmouseout Atom = 0x4950a - Onmouseover Atom = 0x4a20b - Onmouseup Atom = 0x4ad09 - Onmousewheel Atom = 0x4b60c - Onoffline Atom = 0x4c209 - Ononline Atom = 0x4cb08 - Onpagehide Atom = 0x4d30a - Onpageshow Atom = 0x4fe0a - Onpause Atom = 0x50d07 - Onplay Atom = 0x51706 - Onplaying Atom = 0x51709 - Onpopstate Atom = 0x5200a - Onprogress Atom = 0x52a0a - Onratechange Atom = 0x53e0c - Onreset Atom = 0x54a07 - Onresize Atom = 0x55108 - Onscroll Atom = 0x55f08 - Onseeked Atom = 0x56708 - Onseeking Atom = 0x56f09 - Onselect Atom = 0x57808 - Onshow Atom = 0x58206 - Onsort Atom = 0x58b06 - Onstalled Atom = 0x59509 - Onstorage Atom = 0x59e09 - Onsubmit Atom = 0x5a708 - Onsuspend Atom = 0x5bb09 - Ontimeupdate Atom = 0xdb0c - Ontoggle Atom = 0x5c408 - Onunload Atom = 0x5cc08 - Onvolumechange Atom = 0x5d40e - Onwaiting Atom = 0x5e209 - Open Atom = 0x3cf04 - Optgroup Atom = 0xf608 - Optimum Atom = 0x5eb07 - Option Atom = 0x60006 - Output Atom = 0x49c06 - P Atom = 0xc01 - Param Atom = 0xc05 - Pattern Atom = 0x5107 - Ping Atom = 0x7704 - Placeholder Atom = 0xc30b - Plaintext Atom = 0xfd09 - Poster Atom = 0x15706 - Pre Atom = 0x25e03 - Preload Atom = 0x25e07 - Progress Atom = 0x52c08 - Prompt Atom = 0x5fa06 - Public Atom = 0x41e06 - Q Atom = 0x13101 - Radiogroup Atom = 0x30a - Readonly Atom = 0x2fb08 - Rel Atom = 0x25f03 - Required Atom = 0x1d008 - Reversed Atom = 0x5a08 - Rows Atom = 0x9204 - Rowspan Atom = 0x9207 - Rp Atom = 0x1c602 - Rt Atom = 0x13f02 - Ruby Atom = 0xaf04 - S Atom = 0x2c01 - Samp Atom = 0x4e04 - Sandbox Atom = 0xbb07 - Scope Atom = 0x2bd05 - Scoped Atom = 0x2bd06 - Script Atom = 0x3d406 - Seamless Atom = 0x31c08 - Section Atom = 0x4e207 - Select Atom = 0x57a06 - Selected Atom = 0x57a08 - Shape Atom = 0x4f905 - Size Atom = 0x55504 - Sizes Atom = 0x55505 - Small Atom = 0x18f05 - Sortable Atom = 0x58d08 - Sorted Atom = 0x19906 - Source Atom = 0x1aa06 - Spacer Atom = 0x2db06 - Span Atom = 0x9504 - Spellcheck Atom = 0x3230a - Src Atom = 0x3c303 - Srcdoc Atom = 0x3c306 - Srclang Atom = 0x41107 - Start Atom = 0x38605 - Step Atom = 0x5f704 - Strike Atom = 0x53306 - Strong Atom = 0x55906 - Style Atom = 0x61105 - Sub Atom = 0x5a903 - Summary Atom = 0x61607 - Sup Atom = 0x61d03 - Svg Atom = 0x62003 - System Atom = 0x62306 - Tabindex Atom = 0x46308 - Table Atom = 0x42d05 - Target Atom = 0x24b06 - Tbody Atom = 0x2e05 - Td Atom = 0x4702 - Template Atom = 0x62608 - Textarea Atom = 0x2f608 - Tfoot Atom = 0x8c05 - Th Atom = 0x22e02 - Thead Atom = 0x2d405 - Time Atom = 0xdd04 - Title Atom = 0xa105 - Tr Atom = 0x10502 - Track Atom = 0x10505 - Translate Atom = 0x14009 - Tt Atom = 0x5302 - Type Atom = 0x21404 - Typemustmatch Atom = 0x2140d - U Atom = 0xb01 - Ul Atom = 0x8a02 - Usemap Atom = 0x51106 - Value Atom = 0x4005 - Var Atom = 0x11503 - Video Atom = 0x28105 - Wbr Atom = 0x12103 - Width Atom = 0x50705 - Wrap Atom = 0x58704 - Xmp Atom = 0xc103 + A Atom = 0x1 + Abbr Atom = 0x4 + Accept Atom = 0x1a06 + AcceptCharset Atom = 0x1a0e + Accesskey Atom = 0x2c09 + Acronym Atom = 0xaa07 + Action Atom = 0x27206 + Address Atom = 0x6f307 + Align Atom = 0xb105 + Allowfullscreen Atom = 0x2080f + Allowpaymentrequest Atom = 0xc113 + Allowusermedia Atom = 0xdd0e + Alt Atom = 0xf303 + Annotation Atom = 0x1c90a + AnnotationXml Atom = 0x1c90e + Applet Atom = 0x31906 + Area Atom = 0x35604 + Article Atom = 0x3fc07 + As Atom = 0x3c02 + Aside Atom = 0x10705 + Async Atom = 0xff05 + Audio Atom = 0x11505 + Autocomplete Atom = 0x2780c + Autofocus Atom = 0x12109 + Autoplay Atom = 0x13c08 + B Atom = 0x101 + Base Atom = 0x3b04 + Basefont Atom = 0x3b08 + Bdi Atom = 0xba03 + Bdo Atom = 0x14b03 + Bgsound Atom = 0x15e07 + Big Atom = 0x17003 + Blink Atom = 0x17305 + Blockquote Atom = 0x1870a + Body Atom = 0x2804 + Br Atom = 0x202 + Button Atom = 0x19106 + Canvas Atom = 0x10306 + Caption Atom = 0x23107 + Center Atom = 0x22006 + Challenge Atom = 0x29b09 + Charset Atom = 0x2107 + Checked Atom = 0x47907 + Cite Atom = 0x19c04 + Class Atom = 0x56405 + Code Atom = 0x5c504 + Col Atom = 0x1ab03 + Colgroup Atom = 0x1ab08 + Color Atom = 0x1bf05 + Cols Atom = 0x1c404 + Colspan Atom = 0x1c407 + Command Atom = 0x1d707 + Content Atom = 0x58b07 + Contenteditable Atom = 0x58b0f + Contextmenu Atom = 0x3800b + Controls Atom = 0x1de08 + Coords Atom = 0x1ea06 + Crossorigin Atom = 0x1fb0b + Data Atom = 0x4a504 + Datalist Atom = 0x4a508 + Datetime Atom = 0x2b808 + Dd Atom = 0x2d702 + Default Atom = 0x10a07 + Defer Atom = 0x5c705 + Del Atom = 0x45203 + Desc Atom = 0x56104 + Details Atom = 0x7207 + Dfn Atom = 0x8703 + Dialog Atom = 0xbb06 + Dir Atom = 0x9303 + Dirname Atom = 0x9307 + Disabled Atom = 0x16408 + Div Atom = 0x16b03 + Dl Atom = 0x5e602 + Download Atom = 0x46308 + Draggable Atom = 0x17a09 + Dropzone Atom = 0x40508 + Dt Atom = 0x64b02 + Em Atom = 0x6e02 + Embed Atom = 0x6e05 + Enctype Atom = 0x28d07 + Face Atom = 0x21e04 + Fieldset Atom = 0x22608 + Figcaption Atom = 0x22e0a + Figure Atom = 0x24806 + Font Atom = 0x3f04 + Footer Atom = 0xf606 + For Atom = 0x25403 + ForeignObject Atom = 0x2540d + Foreignobject Atom = 0x2610d + Form Atom = 0x26e04 + Formaction Atom = 0x26e0a + Formenctype Atom = 0x2890b + Formmethod Atom = 0x2a40a + Formnovalidate Atom = 0x2ae0e + Formtarget Atom = 0x2c00a + Frame Atom = 0x8b05 + Frameset Atom = 0x8b08 + H1 Atom = 0x15c02 + H2 Atom = 0x2de02 + H3 Atom = 0x30d02 + H4 Atom = 0x34502 + H5 Atom = 0x34f02 + H6 Atom = 0x64d02 + Head Atom = 0x33104 + Header Atom = 0x33106 + Headers Atom = 0x33107 + Height Atom = 0x5206 + Hgroup Atom = 0x2ca06 + Hidden Atom = 0x2d506 + High Atom = 0x2db04 + Hr Atom = 0x15702 + Href Atom = 0x2e004 + Hreflang Atom = 0x2e008 + Html Atom = 0x5604 + HttpEquiv Atom = 0x2e80a + I Atom = 0x601 + Icon Atom = 0x58a04 + Id Atom = 0x10902 + Iframe Atom = 0x2fc06 + Image Atom = 0x30205 + Img Atom = 0x30703 + Input Atom = 0x44b05 + Inputmode Atom = 0x44b09 + Ins Atom = 0x20403 + Integrity Atom = 0x23f09 + Is Atom = 0x16502 + Isindex Atom = 0x30f07 + Ismap Atom = 0x31605 + Itemid Atom = 0x38b06 + Itemprop Atom = 0x19d08 + Itemref Atom = 0x3cd07 + Itemscope Atom = 0x67109 + Itemtype Atom = 0x31f08 + Kbd Atom = 0xb903 + Keygen Atom = 0x3206 + Keytype Atom = 0xd607 + Kind Atom = 0x17704 + Label Atom = 0x5905 + Lang Atom = 0x2e404 + Legend Atom = 0x18106 + Li Atom = 0xb202 + Link Atom = 0x17404 + List Atom = 0x4a904 + Listing Atom = 0x4a907 + Loop Atom = 0x5d04 + Low Atom = 0xc303 + Main Atom = 0x1004 + Malignmark Atom = 0xb00a + Manifest Atom = 0x6d708 + Map Atom = 0x31803 + Mark Atom = 0xb604 + Marquee Atom = 0x32707 + Math Atom = 0x32e04 + Max Atom = 0x33d03 + Maxlength Atom = 0x33d09 + Media Atom = 0xe605 + Mediagroup Atom = 0xe60a + Menu Atom = 0x38704 + Menuitem Atom = 0x38708 + Meta Atom = 0x4b804 + Meter Atom = 0x9805 + Method Atom = 0x2a806 + Mglyph Atom = 0x30806 + Mi Atom = 0x34702 + Min Atom = 0x34703 + Minlength Atom = 0x34709 + Mn Atom = 0x2b102 + Mo Atom = 0xa402 + Ms Atom = 0x67402 + Mtext Atom = 0x35105 + Multiple Atom = 0x35f08 + Muted Atom = 0x36705 + Name Atom = 0x9604 + Nav Atom = 0x1303 + Nobr Atom = 0x3704 + Noembed Atom = 0x6c07 + Noframes Atom = 0x8908 + Nomodule Atom = 0xa208 + Nonce Atom = 0x1a605 + Noscript Atom = 0x21608 + Novalidate Atom = 0x2b20a + Object Atom = 0x26806 + Ol Atom = 0x13702 + Onabort Atom = 0x19507 + Onafterprint Atom = 0x2360c + Onautocomplete Atom = 0x2760e + Onautocompleteerror Atom = 0x27613 + Onauxclick Atom = 0x61f0a + Onbeforeprint Atom = 0x69e0d + Onbeforeunload Atom = 0x6e70e + Onblur Atom = 0x56d06 + Oncancel Atom = 0x11908 + Oncanplay Atom = 0x14d09 + Oncanplaythrough Atom = 0x14d10 + Onchange Atom = 0x41b08 + Onclick Atom = 0x2f507 + Onclose Atom = 0x36c07 + Oncontextmenu Atom = 0x37e0d + Oncopy Atom = 0x39106 + Oncuechange Atom = 0x3970b + Oncut Atom = 0x3a205 + Ondblclick Atom = 0x3a70a + Ondrag Atom = 0x3b106 + Ondragend Atom = 0x3b109 + Ondragenter Atom = 0x3ba0b + Ondragexit Atom = 0x3c50a + Ondragleave Atom = 0x3df0b + Ondragover Atom = 0x3ea0a + Ondragstart Atom = 0x3f40b + Ondrop Atom = 0x40306 + Ondurationchange Atom = 0x41310 + Onemptied Atom = 0x40a09 + Onended Atom = 0x42307 + Onerror Atom = 0x42a07 + Onfocus Atom = 0x43107 + Onhashchange Atom = 0x43d0c + Oninput Atom = 0x44907 + Oninvalid Atom = 0x45509 + Onkeydown Atom = 0x45e09 + Onkeypress Atom = 0x46b0a + Onkeyup Atom = 0x48007 + Onlanguagechange Atom = 0x48d10 + Onload Atom = 0x49d06 + Onloadeddata Atom = 0x49d0c + Onloadedmetadata Atom = 0x4b010 + Onloadend Atom = 0x4c609 + Onloadstart Atom = 0x4cf0b + Onmessage Atom = 0x4da09 + Onmessageerror Atom = 0x4da0e + Onmousedown Atom = 0x4e80b + Onmouseenter Atom = 0x4f30c + Onmouseleave Atom = 0x4ff0c + Onmousemove Atom = 0x50b0b + Onmouseout Atom = 0x5160a + Onmouseover Atom = 0x5230b + Onmouseup Atom = 0x52e09 + Onmousewheel Atom = 0x53c0c + Onoffline Atom = 0x54809 + Ononline Atom = 0x55108 + Onpagehide Atom = 0x5590a + Onpageshow Atom = 0x5730a + Onpaste Atom = 0x57f07 + Onpause Atom = 0x59a07 + Onplay Atom = 0x5a406 + Onplaying Atom = 0x5a409 + Onpopstate Atom = 0x5ad0a + Onprogress Atom = 0x5b70a + Onratechange Atom = 0x5cc0c + Onrejectionhandled Atom = 0x5d812 + Onreset Atom = 0x5ea07 + Onresize Atom = 0x5f108 + Onscroll Atom = 0x60008 + Onsecuritypolicyviolation Atom = 0x60819 + Onseeked Atom = 0x62908 + Onseeking Atom = 0x63109 + Onselect Atom = 0x63a08 + Onshow Atom = 0x64406 + Onsort Atom = 0x64f06 + Onstalled Atom = 0x65909 + Onstorage Atom = 0x66209 + Onsubmit Atom = 0x66b08 + Onsuspend Atom = 0x67b09 + Ontimeupdate Atom = 0x400c + Ontoggle Atom = 0x68408 + Onunhandledrejection Atom = 0x68c14 + Onunload Atom = 0x6ab08 + Onvolumechange Atom = 0x6b30e + Onwaiting Atom = 0x6c109 + Onwheel Atom = 0x6ca07 + Open Atom = 0x1a304 + Optgroup Atom = 0x5f08 + Optimum Atom = 0x6d107 + Option Atom = 0x6e306 + Output Atom = 0x51d06 + P Atom = 0xc01 + Param Atom = 0xc05 + Pattern Atom = 0x6607 + Picture Atom = 0x7b07 + Ping Atom = 0xef04 + Placeholder Atom = 0x1310b + Plaintext Atom = 0x1b209 + Playsinline Atom = 0x1400b + Poster Atom = 0x2cf06 + Pre Atom = 0x47003 + Preload Atom = 0x48607 + Progress Atom = 0x5b908 + Prompt Atom = 0x53606 + Public Atom = 0x58606 + Q Atom = 0xcf01 + Radiogroup Atom = 0x30a + Rb Atom = 0x3a02 + Readonly Atom = 0x35708 + Referrerpolicy Atom = 0x3d10e + Rel Atom = 0x48703 + Required Atom = 0x24c08 + Reversed Atom = 0x8008 + Rows Atom = 0x9c04 + Rowspan Atom = 0x9c07 + Rp Atom = 0x23c02 + Rt Atom = 0x19a02 + Rtc Atom = 0x19a03 + Ruby Atom = 0xfb04 + S Atom = 0x2501 + Samp Atom = 0x7804 + Sandbox Atom = 0x12907 + Scope Atom = 0x67505 + Scoped Atom = 0x67506 + Script Atom = 0x21806 + Seamless Atom = 0x37108 + Section Atom = 0x56807 + Select Atom = 0x63c06 + Selected Atom = 0x63c08 + Shape Atom = 0x1e505 + Size Atom = 0x5f504 + Sizes Atom = 0x5f505 + Slot Atom = 0x1ef04 + Small Atom = 0x20605 + Sortable Atom = 0x65108 + Sorted Atom = 0x33706 + Source Atom = 0x37806 + Spacer Atom = 0x43706 + Span Atom = 0x9f04 + Spellcheck Atom = 0x4740a + Src Atom = 0x5c003 + Srcdoc Atom = 0x5c006 + Srclang Atom = 0x5f907 + Srcset Atom = 0x6f906 + Start Atom = 0x3fa05 + Step Atom = 0x58304 + Strike Atom = 0xd206 + Strong Atom = 0x6dd06 + Style Atom = 0x6ff05 + Sub Atom = 0x66d03 + Summary Atom = 0x70407 + Sup Atom = 0x70b03 + Svg Atom = 0x70e03 + System Atom = 0x71106 + Tabindex Atom = 0x4be08 + Table Atom = 0x59505 + Target Atom = 0x2c406 + Tbody Atom = 0x2705 + Td Atom = 0x9202 + Template Atom = 0x71408 + Textarea Atom = 0x35208 + Tfoot Atom = 0xf505 + Th Atom = 0x15602 + Thead Atom = 0x33005 + Time Atom = 0x4204 + Title Atom = 0x11005 + Tr Atom = 0xcc02 + Track Atom = 0x1ba05 + Translate Atom = 0x1f209 + Tt Atom = 0x6802 + Type Atom = 0xd904 + Typemustmatch Atom = 0x2900d + U Atom = 0xb01 + Ul Atom = 0xa702 + Updateviacache Atom = 0x460e + Usemap Atom = 0x59e06 + Value Atom = 0x1505 + Var Atom = 0x16d03 + Video Atom = 0x2f105 + Wbr Atom = 0x57c03 + Width Atom = 0x64905 + Workertype Atom = 0x71c0a + Wrap Atom = 0x72604 + Xmp Atom = 0x12f03 ) -const hash0 = 0xc17da63e +const hash0 = 0x81cdf10e -const maxAtomLen = 19 +const maxAtomLen = 25 var table = [1 << 9]Atom{ - 0x1: 0x48a0b, // onmousemove - 0x2: 0x5e209, // onwaiting - 0x3: 0x1fa13, // onautocompleteerror - 0x4: 0x5fa06, // prompt - 0x7: 0x5eb07, // optimum - 0x8: 0x1604, // mark - 0xa: 0x5ad07, // itemref - 0xb: 0x4fe0a, // onpageshow - 0xc: 0x57a06, // select - 0xd: 0x17b09, // draggable - 0xe: 0x3e03, // nav - 0xf: 0x17507, // command - 0x11: 0xb01, // u - 0x14: 0x2d507, // headers - 0x15: 0x44a08, // datalist - 0x17: 0x4e04, // samp - 0x1a: 0x3fb09, // onkeydown - 0x1b: 0x55f08, // onscroll - 0x1c: 0x15003, // col - 0x20: 0x3c908, // itemprop - 0x21: 0x2780a, // http-equiv - 0x22: 0x61d03, // sup - 0x24: 0x1d008, // required - 0x2b: 0x25e07, // preload - 0x2c: 0x6040d, // onbeforeprint - 0x2d: 0x3600b, // ondragenter - 0x2e: 0x50902, // dt - 0x2f: 0x5a708, // onsubmit - 0x30: 0x27002, // hr - 0x31: 0x32f0d, // oncontextmenu - 0x33: 0x29c05, // image - 0x34: 0x50d07, // onpause - 0x35: 0x25906, // hgroup - 0x36: 0x7704, // ping - 0x37: 0x57808, // onselect - 0x3a: 0x11303, // div - 0x3b: 0x1fa0e, // onautocomplete - 0x40: 0x2eb02, // mi - 0x41: 0x31c08, // seamless - 0x42: 0x2807, // charset - 0x43: 0x8502, // id - 0x44: 0x5200a, // onpopstate - 0x45: 0x3ef03, // del - 0x46: 0x2cb07, // marquee - 0x47: 0x3309, // accesskey - 0x49: 0x8d06, // footer - 0x4a: 0x44e04, // list - 0x4b: 0x2b005, // ismap - 0x51: 0x33804, // menu - 0x52: 0x2f04, // body - 0x55: 0x9a08, // frameset - 0x56: 0x54a07, // onreset - 0x57: 0x12705, // blink - 0x58: 0xa105, // title - 0x59: 0x38807, // article - 0x5b: 0x22e02, // th - 0x5d: 0x13101, // q - 0x5e: 0x3cf04, // open - 0x5f: 0x2fa04, // area - 0x61: 0x44206, // onload - 0x62: 0xda04, // font - 0x63: 0xd604, // base - 0x64: 0x16207, // colspan - 0x65: 0x53707, // keytype - 0x66: 0x11e02, // dl - 0x68: 0x1b008, // fieldset - 0x6a: 0x2eb03, // min - 0x6b: 0x11503, // var - 0x6f: 0x2d506, // header - 0x70: 0x13f02, // rt - 0x71: 0x15008, // colgroup - 0x72: 0x23502, // mn - 0x74: 0x13a07, // onabort - 0x75: 0x3906, // keygen - 0x76: 0x4c209, // onoffline - 0x77: 0x21f09, // challenge - 0x78: 0x2b203, // map - 0x7a: 0x2e902, // h4 - 0x7b: 0x3b607, // onerror - 0x7c: 0x2e109, // maxlength - 0x7d: 0x2f505, // mtext - 0x7e: 0xbb07, // sandbox - 0x7f: 0x58b06, // onsort - 0x80: 0x100a, // malignmark - 0x81: 0x45d04, // meta - 0x82: 0x7b05, // async - 0x83: 0x2a702, // h3 - 0x84: 0x26702, // dd - 0x85: 0x27004, // href - 0x86: 0x6e0a, // mediagroup - 0x87: 0x19406, // coords - 0x88: 0x41107, // srclang - 0x89: 0x34d0a, // ondblclick - 0x8a: 0x4005, // value - 0x8c: 0xe908, // oncancel - 0x8e: 0x3230a, // spellcheck - 0x8f: 0x9a05, // frame - 0x91: 0x12403, // big - 0x94: 0x1f606, // action - 0x95: 0x6903, // dir - 0x97: 0x2fb08, // readonly - 0x99: 0x42d05, // table - 0x9a: 0x61607, // summary - 0x9b: 0x12103, // wbr - 0x9c: 0x30a, // radiogroup - 0x9d: 0x6c04, // name - 0x9f: 0x62306, // system - 0xa1: 0x15d05, // color - 0xa2: 0x7f06, // canvas - 0xa3: 0x25504, // html - 0xa5: 0x56f09, // onseeking - 0xac: 0x4f905, // shape - 0xad: 0x25f03, // rel - 0xae: 0x28510, // oncanplaythrough - 0xaf: 0x3760a, // ondragover - 0xb0: 0x62608, // template - 0xb1: 0x1d80d, // foreignObject - 0xb3: 0x9204, // rows - 0xb6: 0x44e07, // listing - 0xb7: 0x49c06, // output - 0xb9: 0x3310b, // contextmenu - 0xbb: 0x11f03, // low - 0xbc: 0x1c602, // rp - 0xbd: 0x5bb09, // onsuspend - 0xbe: 0x13606, // button - 0xbf: 0x4db04, // desc - 0xc1: 0x4e207, // section - 0xc2: 0x52a0a, // onprogress - 0xc3: 0x59e09, // onstorage - 0xc4: 0x2d204, // math - 0xc5: 0x4503, // alt - 0xc7: 0x8a02, // ul - 0xc8: 0x5107, // pattern - 0xc9: 0x4b60c, // onmousewheel - 0xca: 0x35709, // ondragend - 0xcb: 0xaf04, // ruby - 0xcc: 0xc01, // p - 0xcd: 0x31707, // onclose - 0xce: 0x24205, // meter - 0xcf: 0x11807, // bgsound - 0xd2: 0x25106, // height - 0xd4: 0x101, // b - 0xd5: 0x2c308, // itemtype - 0xd8: 0x1bb07, // caption - 0xd9: 0x10c08, // disabled - 0xdb: 0x33808, // menuitem - 0xdc: 0x62003, // svg - 0xdd: 0x18f05, // small - 0xde: 0x44a04, // data - 0xe0: 0x4cb08, // ononline - 0xe1: 0x2a206, // mglyph - 0xe3: 0x6505, // embed - 0xe4: 0x10502, // tr - 0xe5: 0x46b0b, // onloadstart - 0xe7: 0x3c306, // srcdoc - 0xeb: 0x5c408, // ontoggle - 0xed: 0xe703, // bdo - 0xee: 0x4702, // td - 0xef: 0x8305, // aside - 0xf0: 0x29402, // h2 - 0xf1: 0x52c08, // progress - 0xf2: 0x12c0a, // blockquote - 0xf4: 0xf005, // label - 0xf5: 0x601, // i - 0xf7: 0x9207, // rowspan - 0xfb: 0x51709, // onplaying - 0xfd: 0x2a103, // img - 0xfe: 0xf608, // optgroup - 0xff: 0x42307, // content - 0x101: 0x53e0c, // onratechange - 0x103: 0x3da0c, // onhashchange - 0x104: 0x4807, // details - 0x106: 0x40008, // download - 0x109: 0x14009, // translate - 0x10b: 0x4230f, // contenteditable - 0x10d: 0x36b0b, // ondragleave - 0x10e: 0x2106, // accept - 0x10f: 0x57a08, // selected - 0x112: 0x1f20a, // formaction - 0x113: 0x5b506, // center - 0x115: 0x45510, // onloadedmetadata - 0x116: 0x12804, // link - 0x117: 0xdd04, // time - 0x118: 0x19f0b, // crossorigin - 0x119: 0x3bd07, // onfocus - 0x11a: 0x58704, // wrap - 0x11b: 0x42204, // icon - 0x11d: 0x28105, // video - 0x11e: 0x4de05, // class - 0x121: 0x5d40e, // onvolumechange - 0x122: 0xaa06, // onblur - 0x123: 0x2b909, // itemscope - 0x124: 0x61105, // style - 0x127: 0x41e06, // public - 0x129: 0x2320e, // formnovalidate - 0x12a: 0x58206, // onshow - 0x12c: 0x51706, // onplay - 0x12d: 0x3c804, // cite - 0x12e: 0x2bc02, // ms - 0x12f: 0xdb0c, // ontimeupdate - 0x130: 0x10904, // kind - 0x131: 0x2470a, // formtarget - 0x135: 0x3af07, // onended - 0x136: 0x26506, // hidden - 0x137: 0x2c01, // s - 0x139: 0x2280a, // formmethod - 0x13a: 0x3e805, // input - 0x13c: 0x50b02, // h6 - 0x13d: 0xc902, // ol - 0x13e: 0x3420b, // oncuechange - 0x13f: 0x1e50d, // foreignobject - 0x143: 0x4e70e, // onbeforeunload - 0x144: 0x2bd05, // scope - 0x145: 0x39609, // onemptied - 0x146: 0x14b05, // defer - 0x147: 0xc103, // xmp - 0x148: 0x39f10, // ondurationchange - 0x149: 0x1903, // kbd - 0x14c: 0x47609, // onmessage - 0x14d: 0x60006, // option - 0x14e: 0x2eb09, // minlength - 0x14f: 0x32807, // checked - 0x150: 0xce08, // autoplay - 0x152: 0x202, // br - 0x153: 0x2360a, // novalidate - 0x156: 0x6307, // noembed - 0x159: 0x31007, // onclick - 0x15a: 0x47f0b, // onmousedown - 0x15b: 0x3a708, // onchange - 0x15e: 0x3f209, // oninvalid - 0x15f: 0x2bd06, // scoped - 0x160: 0x18808, // controls - 0x161: 0x30b05, // muted - 0x162: 0x58d08, // sortable - 0x163: 0x51106, // usemap - 0x164: 0x1b80a, // figcaption - 0x165: 0x35706, // ondrag - 0x166: 0x26b04, // high - 0x168: 0x3c303, // src - 0x169: 0x15706, // poster - 0x16b: 0x1670e, // annotation-xml - 0x16c: 0x5f704, // step - 0x16d: 0x4, // abbr - 0x16e: 0x1b06, // dialog - 0x170: 0x1202, // li - 0x172: 0x3ed02, // mo - 0x175: 0x1d803, // for - 0x176: 0x1a803, // ins - 0x178: 0x55504, // size - 0x179: 0x43210, // onlanguagechange - 0x17a: 0x8607, // default - 0x17b: 0x1a03, // bdi - 0x17c: 0x4d30a, // onpagehide - 0x17d: 0x6907, // dirname - 0x17e: 0x21404, // type - 0x17f: 0x1f204, // form - 0x181: 0x28509, // oncanplay - 0x182: 0x6103, // dfn - 0x183: 0x46308, // tabindex - 0x186: 0x6502, // em - 0x187: 0x27404, // lang - 0x189: 0x39108, // dropzone - 0x18a: 0x4080a, // onkeypress - 0x18b: 0x23c08, // datetime - 0x18c: 0x16204, // cols - 0x18d: 0x1, // a - 0x18e: 0x4420c, // onloadeddata - 0x190: 0xa605, // audio - 0x192: 0x2e05, // tbody - 0x193: 0x22c06, // method - 0x195: 0xf404, // loop - 0x196: 0x29606, // iframe - 0x198: 0x2d504, // head - 0x19e: 0x5f108, // manifest - 0x19f: 0xb309, // autofocus - 0x1a0: 0x14904, // code - 0x1a1: 0x55906, // strong - 0x1a2: 0x30308, // multiple - 0x1a3: 0xc05, // param - 0x1a6: 0x21107, // enctype - 0x1a7: 0x5b304, // face - 0x1a8: 0xfd09, // plaintext - 0x1a9: 0x26e02, // h1 - 0x1aa: 0x59509, // onstalled - 0x1ad: 0x3d406, // script - 0x1ae: 0x2db06, // spacer - 0x1af: 0x55108, // onresize - 0x1b0: 0x4a20b, // onmouseover - 0x1b1: 0x5cc08, // onunload - 0x1b2: 0x56708, // onseeked - 0x1b4: 0x2140d, // typemustmatch - 0x1b5: 0x1cc06, // figure - 0x1b6: 0x4950a, // onmouseout - 0x1b7: 0x25e03, // pre - 0x1b8: 0x50705, // width - 0x1b9: 0x19906, // sorted - 0x1bb: 0x5704, // nobr - 0x1be: 0x5302, // tt - 0x1bf: 0x1105, // align - 0x1c0: 0x3e607, // oninput - 0x1c3: 0x41807, // onkeyup - 0x1c6: 0x1c00c, // onafterprint - 0x1c7: 0x210e, // accept-charset - 0x1c8: 0x33c06, // itemid - 0x1c9: 0x3e809, // inputmode - 0x1cb: 0x53306, // strike - 0x1cc: 0x5a903, // sub - 0x1cd: 0x10505, // track - 0x1ce: 0x38605, // start - 0x1d0: 0xd608, // basefont - 0x1d6: 0x1aa06, // source - 0x1d7: 0x18206, // legend - 0x1d8: 0x2d405, // thead - 0x1da: 0x8c05, // tfoot - 0x1dd: 0x1ec06, // object - 0x1de: 0x6e05, // media - 0x1df: 0x1670a, // annotation - 0x1e0: 0x20d0b, // formenctype - 0x1e2: 0x3d208, // noscript - 0x1e4: 0x55505, // sizes - 0x1e5: 0x1fc0c, // autocomplete - 0x1e6: 0x9504, // span - 0x1e7: 0x9808, // noframes - 0x1e8: 0x24b06, // target - 0x1e9: 0x38f06, // ondrop - 0x1ea: 0x2b306, // applet - 0x1ec: 0x5a08, // reversed - 0x1f0: 0x2a907, // isindex - 0x1f3: 0x27008, // hreflang - 0x1f5: 0x2f302, // h5 - 0x1f6: 0x4f307, // address - 0x1fa: 0x2e103, // max - 0x1fb: 0xc30b, // placeholder - 0x1fc: 0x2f608, // textarea - 0x1fe: 0x4ad09, // onmouseup - 0x1ff: 0x3800b, // ondragstart + 0x1: 0xe60a, // mediagroup + 0x2: 0x2e404, // lang + 0x4: 0x2c09, // accesskey + 0x5: 0x8b08, // frameset + 0x7: 0x63a08, // onselect + 0x8: 0x71106, // system + 0xa: 0x64905, // width + 0xc: 0x2890b, // formenctype + 0xd: 0x13702, // ol + 0xe: 0x3970b, // oncuechange + 0x10: 0x14b03, // bdo + 0x11: 0x11505, // audio + 0x12: 0x17a09, // draggable + 0x14: 0x2f105, // video + 0x15: 0x2b102, // mn + 0x16: 0x38704, // menu + 0x17: 0x2cf06, // poster + 0x19: 0xf606, // footer + 0x1a: 0x2a806, // method + 0x1b: 0x2b808, // datetime + 0x1c: 0x19507, // onabort + 0x1d: 0x460e, // updateviacache + 0x1e: 0xff05, // async + 0x1f: 0x49d06, // onload + 0x21: 0x11908, // oncancel + 0x22: 0x62908, // onseeked + 0x23: 0x30205, // image + 0x24: 0x5d812, // onrejectionhandled + 0x26: 0x17404, // link + 0x27: 0x51d06, // output + 0x28: 0x33104, // head + 0x29: 0x4ff0c, // onmouseleave + 0x2a: 0x57f07, // onpaste + 0x2b: 0x5a409, // onplaying + 0x2c: 0x1c407, // colspan + 0x2f: 0x1bf05, // color + 0x30: 0x5f504, // size + 0x31: 0x2e80a, // http-equiv + 0x33: 0x601, // i + 0x34: 0x5590a, // onpagehide + 0x35: 0x68c14, // onunhandledrejection + 0x37: 0x42a07, // onerror + 0x3a: 0x3b08, // basefont + 0x3f: 0x1303, // nav + 0x40: 0x17704, // kind + 0x41: 0x35708, // readonly + 0x42: 0x30806, // mglyph + 0x44: 0xb202, // li + 0x46: 0x2d506, // hidden + 0x47: 0x70e03, // svg + 0x48: 0x58304, // step + 0x49: 0x23f09, // integrity + 0x4a: 0x58606, // public + 0x4c: 0x1ab03, // col + 0x4d: 0x1870a, // blockquote + 0x4e: 0x34f02, // h5 + 0x50: 0x5b908, // progress + 0x51: 0x5f505, // sizes + 0x52: 0x34502, // h4 + 0x56: 0x33005, // thead + 0x57: 0xd607, // keytype + 0x58: 0x5b70a, // onprogress + 0x59: 0x44b09, // inputmode + 0x5a: 0x3b109, // ondragend + 0x5d: 0x3a205, // oncut + 0x5e: 0x43706, // spacer + 0x5f: 0x1ab08, // colgroup + 0x62: 0x16502, // is + 0x65: 0x3c02, // as + 0x66: 0x54809, // onoffline + 0x67: 0x33706, // sorted + 0x69: 0x48d10, // onlanguagechange + 0x6c: 0x43d0c, // onhashchange + 0x6d: 0x9604, // name + 0x6e: 0xf505, // tfoot + 0x6f: 0x56104, // desc + 0x70: 0x33d03, // max + 0x72: 0x1ea06, // coords + 0x73: 0x30d02, // h3 + 0x74: 0x6e70e, // onbeforeunload + 0x75: 0x9c04, // rows + 0x76: 0x63c06, // select + 0x77: 0x9805, // meter + 0x78: 0x38b06, // itemid + 0x79: 0x53c0c, // onmousewheel + 0x7a: 0x5c006, // srcdoc + 0x7d: 0x1ba05, // track + 0x7f: 0x31f08, // itemtype + 0x82: 0xa402, // mo + 0x83: 0x41b08, // onchange + 0x84: 0x33107, // headers + 0x85: 0x5cc0c, // onratechange + 0x86: 0x60819, // onsecuritypolicyviolation + 0x88: 0x4a508, // datalist + 0x89: 0x4e80b, // onmousedown + 0x8a: 0x1ef04, // slot + 0x8b: 0x4b010, // onloadedmetadata + 0x8c: 0x1a06, // accept + 0x8d: 0x26806, // object + 0x91: 0x6b30e, // onvolumechange + 0x92: 0x2107, // charset + 0x93: 0x27613, // onautocompleteerror + 0x94: 0xc113, // allowpaymentrequest + 0x95: 0x2804, // body + 0x96: 0x10a07, // default + 0x97: 0x63c08, // selected + 0x98: 0x21e04, // face + 0x99: 0x1e505, // shape + 0x9b: 0x68408, // ontoggle + 0x9e: 0x64b02, // dt + 0x9f: 0xb604, // mark + 0xa1: 0xb01, // u + 0xa4: 0x6ab08, // onunload + 0xa5: 0x5d04, // loop + 0xa6: 0x16408, // disabled + 0xaa: 0x42307, // onended + 0xab: 0xb00a, // malignmark + 0xad: 0x67b09, // onsuspend + 0xae: 0x35105, // mtext + 0xaf: 0x64f06, // onsort + 0xb0: 0x19d08, // itemprop + 0xb3: 0x67109, // itemscope + 0xb4: 0x17305, // blink + 0xb6: 0x3b106, // ondrag + 0xb7: 0xa702, // ul + 0xb8: 0x26e04, // form + 0xb9: 0x12907, // sandbox + 0xba: 0x8b05, // frame + 0xbb: 0x1505, // value + 0xbc: 0x66209, // onstorage + 0xbf: 0xaa07, // acronym + 0xc0: 0x19a02, // rt + 0xc2: 0x202, // br + 0xc3: 0x22608, // fieldset + 0xc4: 0x2900d, // typemustmatch + 0xc5: 0xa208, // nomodule + 0xc6: 0x6c07, // noembed + 0xc7: 0x69e0d, // onbeforeprint + 0xc8: 0x19106, // button + 0xc9: 0x2f507, // onclick + 0xca: 0x70407, // summary + 0xcd: 0xfb04, // ruby + 0xce: 0x56405, // class + 0xcf: 0x3f40b, // ondragstart + 0xd0: 0x23107, // caption + 0xd4: 0xdd0e, // allowusermedia + 0xd5: 0x4cf0b, // onloadstart + 0xd9: 0x16b03, // div + 0xda: 0x4a904, // list + 0xdb: 0x32e04, // math + 0xdc: 0x44b05, // input + 0xdf: 0x3ea0a, // ondragover + 0xe0: 0x2de02, // h2 + 0xe2: 0x1b209, // plaintext + 0xe4: 0x4f30c, // onmouseenter + 0xe7: 0x47907, // checked + 0xe8: 0x47003, // pre + 0xea: 0x35f08, // multiple + 0xeb: 0xba03, // bdi + 0xec: 0x33d09, // maxlength + 0xed: 0xcf01, // q + 0xee: 0x61f0a, // onauxclick + 0xf0: 0x57c03, // wbr + 0xf2: 0x3b04, // base + 0xf3: 0x6e306, // option + 0xf5: 0x41310, // ondurationchange + 0xf7: 0x8908, // noframes + 0xf9: 0x40508, // dropzone + 0xfb: 0x67505, // scope + 0xfc: 0x8008, // reversed + 0xfd: 0x3ba0b, // ondragenter + 0xfe: 0x3fa05, // start + 0xff: 0x12f03, // xmp + 0x100: 0x5f907, // srclang + 0x101: 0x30703, // img + 0x104: 0x101, // b + 0x105: 0x25403, // for + 0x106: 0x10705, // aside + 0x107: 0x44907, // oninput + 0x108: 0x35604, // area + 0x109: 0x2a40a, // formmethod + 0x10a: 0x72604, // wrap + 0x10c: 0x23c02, // rp + 0x10d: 0x46b0a, // onkeypress + 0x10e: 0x6802, // tt + 0x110: 0x34702, // mi + 0x111: 0x36705, // muted + 0x112: 0xf303, // alt + 0x113: 0x5c504, // code + 0x114: 0x6e02, // em + 0x115: 0x3c50a, // ondragexit + 0x117: 0x9f04, // span + 0x119: 0x6d708, // manifest + 0x11a: 0x38708, // menuitem + 0x11b: 0x58b07, // content + 0x11d: 0x6c109, // onwaiting + 0x11f: 0x4c609, // onloadend + 0x121: 0x37e0d, // oncontextmenu + 0x123: 0x56d06, // onblur + 0x124: 0x3fc07, // article + 0x125: 0x9303, // dir + 0x126: 0xef04, // ping + 0x127: 0x24c08, // required + 0x128: 0x45509, // oninvalid + 0x129: 0xb105, // align + 0x12b: 0x58a04, // icon + 0x12c: 0x64d02, // h6 + 0x12d: 0x1c404, // cols + 0x12e: 0x22e0a, // figcaption + 0x12f: 0x45e09, // onkeydown + 0x130: 0x66b08, // onsubmit + 0x131: 0x14d09, // oncanplay + 0x132: 0x70b03, // sup + 0x133: 0xc01, // p + 0x135: 0x40a09, // onemptied + 0x136: 0x39106, // oncopy + 0x137: 0x19c04, // cite + 0x138: 0x3a70a, // ondblclick + 0x13a: 0x50b0b, // onmousemove + 0x13c: 0x66d03, // sub + 0x13d: 0x48703, // rel + 0x13e: 0x5f08, // optgroup + 0x142: 0x9c07, // rowspan + 0x143: 0x37806, // source + 0x144: 0x21608, // noscript + 0x145: 0x1a304, // open + 0x146: 0x20403, // ins + 0x147: 0x2540d, // foreignObject + 0x148: 0x5ad0a, // onpopstate + 0x14a: 0x28d07, // enctype + 0x14b: 0x2760e, // onautocomplete + 0x14c: 0x35208, // textarea + 0x14e: 0x2780c, // autocomplete + 0x14f: 0x15702, // hr + 0x150: 0x1de08, // controls + 0x151: 0x10902, // id + 0x153: 0x2360c, // onafterprint + 0x155: 0x2610d, // foreignobject + 0x156: 0x32707, // marquee + 0x157: 0x59a07, // onpause + 0x158: 0x5e602, // dl + 0x159: 0x5206, // height + 0x15a: 0x34703, // min + 0x15b: 0x9307, // dirname + 0x15c: 0x1f209, // translate + 0x15d: 0x5604, // html + 0x15e: 0x34709, // minlength + 0x15f: 0x48607, // preload + 0x160: 0x71408, // template + 0x161: 0x3df0b, // ondragleave + 0x162: 0x3a02, // rb + 0x164: 0x5c003, // src + 0x165: 0x6dd06, // strong + 0x167: 0x7804, // samp + 0x168: 0x6f307, // address + 0x169: 0x55108, // ononline + 0x16b: 0x1310b, // placeholder + 0x16c: 0x2c406, // target + 0x16d: 0x20605, // small + 0x16e: 0x6ca07, // onwheel + 0x16f: 0x1c90a, // annotation + 0x170: 0x4740a, // spellcheck + 0x171: 0x7207, // details + 0x172: 0x10306, // canvas + 0x173: 0x12109, // autofocus + 0x174: 0xc05, // param + 0x176: 0x46308, // download + 0x177: 0x45203, // del + 0x178: 0x36c07, // onclose + 0x179: 0xb903, // kbd + 0x17a: 0x31906, // applet + 0x17b: 0x2e004, // href + 0x17c: 0x5f108, // onresize + 0x17e: 0x49d0c, // onloadeddata + 0x180: 0xcc02, // tr + 0x181: 0x2c00a, // formtarget + 0x182: 0x11005, // title + 0x183: 0x6ff05, // style + 0x184: 0xd206, // strike + 0x185: 0x59e06, // usemap + 0x186: 0x2fc06, // iframe + 0x187: 0x1004, // main + 0x189: 0x7b07, // picture + 0x18c: 0x31605, // ismap + 0x18e: 0x4a504, // data + 0x18f: 0x5905, // label + 0x191: 0x3d10e, // referrerpolicy + 0x192: 0x15602, // th + 0x194: 0x53606, // prompt + 0x195: 0x56807, // section + 0x197: 0x6d107, // optimum + 0x198: 0x2db04, // high + 0x199: 0x15c02, // h1 + 0x19a: 0x65909, // onstalled + 0x19b: 0x16d03, // var + 0x19c: 0x4204, // time + 0x19e: 0x67402, // ms + 0x19f: 0x33106, // header + 0x1a0: 0x4da09, // onmessage + 0x1a1: 0x1a605, // nonce + 0x1a2: 0x26e0a, // formaction + 0x1a3: 0x22006, // center + 0x1a4: 0x3704, // nobr + 0x1a5: 0x59505, // table + 0x1a6: 0x4a907, // listing + 0x1a7: 0x18106, // legend + 0x1a9: 0x29b09, // challenge + 0x1aa: 0x24806, // figure + 0x1ab: 0xe605, // media + 0x1ae: 0xd904, // type + 0x1af: 0x3f04, // font + 0x1b0: 0x4da0e, // onmessageerror + 0x1b1: 0x37108, // seamless + 0x1b2: 0x8703, // dfn + 0x1b3: 0x5c705, // defer + 0x1b4: 0xc303, // low + 0x1b5: 0x19a03, // rtc + 0x1b6: 0x5230b, // onmouseover + 0x1b7: 0x2b20a, // novalidate + 0x1b8: 0x71c0a, // workertype + 0x1ba: 0x3cd07, // itemref + 0x1bd: 0x1, // a + 0x1be: 0x31803, // map + 0x1bf: 0x400c, // ontimeupdate + 0x1c0: 0x15e07, // bgsound + 0x1c1: 0x3206, // keygen + 0x1c2: 0x2705, // tbody + 0x1c5: 0x64406, // onshow + 0x1c7: 0x2501, // s + 0x1c8: 0x6607, // pattern + 0x1cc: 0x14d10, // oncanplaythrough + 0x1ce: 0x2d702, // dd + 0x1cf: 0x6f906, // srcset + 0x1d0: 0x17003, // big + 0x1d2: 0x65108, // sortable + 0x1d3: 0x48007, // onkeyup + 0x1d5: 0x5a406, // onplay + 0x1d7: 0x4b804, // meta + 0x1d8: 0x40306, // ondrop + 0x1da: 0x60008, // onscroll + 0x1db: 0x1fb0b, // crossorigin + 0x1dc: 0x5730a, // onpageshow + 0x1dd: 0x4, // abbr + 0x1de: 0x9202, // td + 0x1df: 0x58b0f, // contenteditable + 0x1e0: 0x27206, // action + 0x1e1: 0x1400b, // playsinline + 0x1e2: 0x43107, // onfocus + 0x1e3: 0x2e008, // hreflang + 0x1e5: 0x5160a, // onmouseout + 0x1e6: 0x5ea07, // onreset + 0x1e7: 0x13c08, // autoplay + 0x1e8: 0x63109, // onseeking + 0x1ea: 0x67506, // scoped + 0x1ec: 0x30a, // radiogroup + 0x1ee: 0x3800b, // contextmenu + 0x1ef: 0x52e09, // onmouseup + 0x1f1: 0x2ca06, // hgroup + 0x1f2: 0x2080f, // allowfullscreen + 0x1f3: 0x4be08, // tabindex + 0x1f6: 0x30f07, // isindex + 0x1f7: 0x1a0e, // accept-charset + 0x1f8: 0x2ae0e, // formnovalidate + 0x1fb: 0x1c90e, // annotation-xml + 0x1fc: 0x6e05, // embed + 0x1fd: 0x21806, // script + 0x1fe: 0xbb06, // dialog + 0x1ff: 0x1d707, // command } -const atomText = "abbradiogrouparamalignmarkbdialogaccept-charsetbodyaccesskey" + - "genavaluealtdetailsampatternobreversedfnoembedirnamediagroup" + - "ingasyncanvasidefaultfooterowspanoframesetitleaudionblurubya" + - "utofocusandboxmplaceholderautoplaybasefontimeupdatebdoncance" + - "labelooptgrouplaintextrackindisabledivarbgsoundlowbrbigblink" + - "blockquotebuttonabortranslatecodefercolgroupostercolorcolspa" + - "nnotation-xmlcommandraggablegendcontrolsmallcoordsortedcross" + - "originsourcefieldsetfigcaptionafterprintfigurequiredforeignO" + - "bjectforeignobjectformactionautocompleteerrorformenctypemust" + - "matchallengeformmethodformnovalidatetimeterformtargetheightm" + - "lhgroupreloadhiddenhigh1hreflanghttp-equivideoncanplaythroug" + - "h2iframeimageimglyph3isindexismappletitemscopeditemtypemarqu" + - "eematheaderspacermaxlength4minlength5mtextareadonlymultiplem" + - "utedonclickoncloseamlesspellcheckedoncontextmenuitemidoncuec" + - "hangeondblclickondragendondragenterondragleaveondragoverondr" + - "agstarticleondropzonemptiedondurationchangeonendedonerroronf" + - "ocusrcdocitempropenoscriptonhashchangeoninputmodeloninvalido" + - "nkeydownloadonkeypressrclangonkeyupublicontenteditableonlang" + - "uagechangeonloadeddatalistingonloadedmetadatabindexonloadsta" + - "rtonmessageonmousedownonmousemoveonmouseoutputonmouseoveronm" + - "ouseuponmousewheelonofflineononlineonpagehidesclassectionbef" + - "oreunloaddresshapeonpageshowidth6onpausemaponplayingonpopsta" + - "teonprogresstrikeytypeonratechangeonresetonresizestrongonscr" + - "ollonseekedonseekingonselectedonshowraponsortableonstalledon" + - "storageonsubmitemrefacenteronsuspendontoggleonunloadonvolume" + - "changeonwaitingoptimumanifestepromptoptionbeforeprintstylesu" + - "mmarysupsvgsystemplate" +const atomText = "abbradiogrouparamainavalueaccept-charsetbodyaccesskeygenobrb" + + "asefontimeupdateviacacheightmlabelooptgroupatternoembedetail" + + "sampictureversedfnoframesetdirnameterowspanomoduleacronymali" + + "gnmarkbdialogallowpaymentrequestrikeytypeallowusermediagroup" + + "ingaltfooterubyasyncanvasidefaultitleaudioncancelautofocusan" + + "dboxmplaceholderautoplaysinlinebdoncanplaythrough1bgsoundisa" + + "bledivarbigblinkindraggablegendblockquotebuttonabortcitempro" + + "penoncecolgrouplaintextrackcolorcolspannotation-xmlcommandco" + + "ntrolshapecoordslotranslatecrossoriginsmallowfullscreenoscri" + + "ptfacenterfieldsetfigcaptionafterprintegrityfigurequiredfore" + + "ignObjectforeignobjectformactionautocompleteerrorformenctype" + + "mustmatchallengeformmethodformnovalidatetimeformtargethgroup" + + "osterhiddenhigh2hreflanghttp-equivideonclickiframeimageimgly" + + "ph3isindexismappletitemtypemarqueematheadersortedmaxlength4m" + + "inlength5mtextareadonlymultiplemutedoncloseamlessourceoncont" + + "extmenuitemidoncopyoncuechangeoncutondblclickondragendondrag" + + "enterondragexitemreferrerpolicyondragleaveondragoverondragst" + + "articleondropzonemptiedondurationchangeonendedonerroronfocus" + + "paceronhashchangeoninputmodeloninvalidonkeydownloadonkeypres" + + "spellcheckedonkeyupreloadonlanguagechangeonloadeddatalisting" + + "onloadedmetadatabindexonloadendonloadstartonmessageerroronmo" + + "usedownonmouseenteronmouseleaveonmousemoveonmouseoutputonmou" + + "seoveronmouseupromptonmousewheelonofflineononlineonpagehides" + + "classectionbluronpageshowbronpastepublicontenteditableonpaus" + + "emaponplayingonpopstateonprogressrcdocodeferonratechangeonre" + + "jectionhandledonresetonresizesrclangonscrollonsecuritypolicy" + + "violationauxclickonseekedonseekingonselectedonshowidth6onsor" + + "tableonstalledonstorageonsubmitemscopedonsuspendontoggleonun" + + "handledrejectionbeforeprintonunloadonvolumechangeonwaitingon" + + "wheeloptimumanifestrongoptionbeforeunloaddressrcsetstylesumm" + + "arysupsvgsystemplateworkertypewrap" diff --git a/vendor/golang.org/x/net/html/atom/table_test.go b/vendor/golang.org/x/net/html/atom/table_test.go index 0f2ecce4..8a30762e 100644 --- a/vendor/golang.org/x/net/html/atom/table_test.go +++ b/vendor/golang.org/x/net/html/atom/table_test.go @@ -1,23 +1,29 @@ -// generated by go run gen.go -test; DO NOT EDIT +// Code generated by go generate gen.go; DO NOT EDIT. + +//go:generate go run gen.go -test package atom var testAtomList = []string{ "a", "abbr", - "abbr", "accept", "accept-charset", "accesskey", + "acronym", "action", "address", "align", + "allowfullscreen", + "allowpaymentrequest", + "allowusermedia", "alt", "annotation", "annotation-xml", "applet", "area", "article", + "as", "aside", "async", "audio", @@ -43,7 +49,6 @@ var testAtomList = []string{ "charset", "checked", "cite", - "cite", "class", "code", "col", @@ -52,7 +57,6 @@ var testAtomList = []string{ "cols", "colspan", "command", - "command", "content", "contenteditable", "contextmenu", @@ -60,7 +64,6 @@ var testAtomList = []string{ "coords", "crossorigin", "data", - "data", "datalist", "datetime", "dd", @@ -93,7 +96,6 @@ var testAtomList = []string{ "foreignObject", "foreignobject", "form", - "form", "formaction", "formenctype", "formmethod", @@ -128,6 +130,8 @@ var testAtomList = []string{ "input", "inputmode", "ins", + "integrity", + "is", "isindex", "ismap", "itemid", @@ -140,7 +144,6 @@ var testAtomList = []string{ "keytype", "kind", "label", - "label", "lang", "legend", "li", @@ -149,6 +152,7 @@ var testAtomList = []string{ "listing", "loop", "low", + "main", "malignmark", "manifest", "map", @@ -179,6 +183,8 @@ var testAtomList = []string{ "nobr", "noembed", "noframes", + "nomodule", + "nonce", "noscript", "novalidate", "object", @@ -187,6 +193,7 @@ var testAtomList = []string{ "onafterprint", "onautocomplete", "onautocompleteerror", + "onauxclick", "onbeforeprint", "onbeforeunload", "onblur", @@ -197,11 +204,14 @@ var testAtomList = []string{ "onclick", "onclose", "oncontextmenu", + "oncopy", "oncuechange", + "oncut", "ondblclick", "ondrag", "ondragend", "ondragenter", + "ondragexit", "ondragleave", "ondragover", "ondragstart", @@ -221,9 +231,13 @@ var testAtomList = []string{ "onload", "onloadeddata", "onloadedmetadata", + "onloadend", "onloadstart", "onmessage", + "onmessageerror", "onmousedown", + "onmouseenter", + "onmouseleave", "onmousemove", "onmouseout", "onmouseover", @@ -233,15 +247,18 @@ var testAtomList = []string{ "ononline", "onpagehide", "onpageshow", + "onpaste", "onpause", "onplay", "onplaying", "onpopstate", "onprogress", "onratechange", + "onrejectionhandled", "onreset", "onresize", "onscroll", + "onsecuritypolicyviolation", "onseeked", "onseeking", "onselect", @@ -253,9 +270,11 @@ var testAtomList = []string{ "onsuspend", "ontimeupdate", "ontoggle", + "onunhandledrejection", "onunload", "onvolumechange", "onwaiting", + "onwheel", "open", "optgroup", "optimum", @@ -264,9 +283,11 @@ var testAtomList = []string{ "p", "param", "pattern", + "picture", "ping", "placeholder", "plaintext", + "playsinline", "poster", "pre", "preload", @@ -275,7 +296,9 @@ var testAtomList = []string{ "public", "q", "radiogroup", + "rb", "readonly", + "referrerpolicy", "rel", "required", "reversed", @@ -283,6 +306,7 @@ var testAtomList = []string{ "rowspan", "rp", "rt", + "rtc", "ruby", "s", "samp", @@ -297,23 +321,23 @@ var testAtomList = []string{ "shape", "size", "sizes", + "slot", "small", "sortable", "sorted", "source", "spacer", "span", - "span", "spellcheck", "src", "srcdoc", "srclang", + "srcset", "start", "step", "strike", "strong", "style", - "style", "sub", "summary", "sup", @@ -331,7 +355,6 @@ var testAtomList = []string{ "thead", "time", "title", - "title", "tr", "track", "translate", @@ -340,12 +363,14 @@ var testAtomList = []string{ "typemustmatch", "u", "ul", + "updateviacache", "usemap", "value", "var", "video", "wbr", "width", + "workertype", "wrap", "xmp", } diff --git a/vendor/golang.org/x/net/html/const.go b/vendor/golang.org/x/net/html/const.go index 52f651ff..5eb7c5a8 100644 --- a/vendor/golang.org/x/net/html/const.go +++ b/vendor/golang.org/x/net/html/const.go @@ -4,7 +4,7 @@ package html -// Section 12.2.3.2 of the HTML5 specification says "The following elements +// Section 12.2.4.2 of the HTML5 specification says "The following elements // have varying levels of special parsing rules". // https://html.spec.whatwg.org/multipage/syntax.html#the-stack-of-open-elements var isSpecialElementMap = map[string]bool{ @@ -52,10 +52,12 @@ var isSpecialElementMap = map[string]bool{ "iframe": true, "img": true, "input": true, - "isindex": true, + "isindex": true, // The 'isindex' element has been removed, but keep it for backwards compatibility. + "keygen": true, "li": true, "link": true, "listing": true, + "main": true, "marquee": true, "menu": true, "meta": true, diff --git a/vendor/golang.org/x/net/html/doc.go b/vendor/golang.org/x/net/html/doc.go index 94f49687..822ed42a 100644 --- a/vendor/golang.org/x/net/html/doc.go +++ b/vendor/golang.org/x/net/html/doc.go @@ -49,18 +49,18 @@ call to Next. For example, to extract an HTML page's anchor text: for { tt := z.Next() switch tt { - case ErrorToken: + case html.ErrorToken: return z.Err() - case TextToken: + case html.TextToken: if depth > 0 { // emitBytes should copy the []byte it receives, // if it doesn't process it immediately. emitBytes(z.Text()) } - case StartTagToken, EndTagToken: + case html.StartTagToken, html.EndTagToken: tn, _ := z.TagName() if len(tn) == 1 && tn[0] == 'a' { - if tt == StartTagToken { + if tt == html.StartTagToken { depth++ } else { depth-- diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go index d3b38440..01477a96 100644 --- a/vendor/golang.org/x/net/html/foreign.go +++ b/vendor/golang.org/x/net/html/foreign.go @@ -67,7 +67,7 @@ func mathMLTextIntegrationPoint(n *Node) bool { return false } -// Section 12.2.5.5. +// Section 12.2.6.5. var breakout = map[string]bool{ "b": true, "big": true, @@ -115,7 +115,7 @@ var breakout = map[string]bool{ "var": true, } -// Section 12.2.5.5. +// Section 12.2.6.5. var svgTagNameAdjustments = map[string]string{ "altglyph": "altGlyph", "altglyphdef": "altGlyphDef", @@ -155,7 +155,7 @@ var svgTagNameAdjustments = map[string]string{ "textpath": "textPath", } -// Section 12.2.5.1 +// Section 12.2.6.1 var mathMLAttributeAdjustments = map[string]string{ "definitionurl": "definitionURL", } diff --git a/vendor/golang.org/x/net/html/node.go b/vendor/golang.org/x/net/html/node.go index 26b657ae..2c1cade6 100644 --- a/vendor/golang.org/x/net/html/node.go +++ b/vendor/golang.org/x/net/html/node.go @@ -21,9 +21,10 @@ const ( scopeMarkerNode ) -// Section 12.2.3.3 says "scope markers are inserted when entering applet -// elements, buttons, object elements, marquees, table cells, and table -// captions, and are used to prevent formatting from 'leaking'". +// Section 12.2.4.3 says "The markers are inserted when entering applet, +// object, marquee, template, td, th, and caption elements, and are used +// to prevent formatting from "leaking" into applet, object, marquee, +// template, td, th, and caption elements". var scopeMarker = Node{Type: scopeMarkerNode} // A Node consists of a NodeType and some Data (tag name for element nodes, @@ -173,6 +174,16 @@ func (s *nodeStack) index(n *Node) int { return -1 } +// contains returns whether a is within s. +func (s *nodeStack) contains(a atom.Atom) bool { + for _, n := range *s { + if n.DataAtom == a { + return true + } + } + return false +} + // insert inserts a node at the given index. func (s *nodeStack) insert(i int, n *Node) { (*s) = append(*s, nil) @@ -191,3 +202,19 @@ func (s *nodeStack) remove(n *Node) { (*s)[j] = nil *s = (*s)[:j] } + +type insertionModeStack []insertionMode + +func (s *insertionModeStack) pop() (im insertionMode) { + i := len(*s) + im = (*s)[i-1] + *s = (*s)[:i-1] + return im +} + +func (s *insertionModeStack) top() insertionMode { + if i := len(*s); i > 0 { + return (*s)[i-1] + } + return nil +} diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go index be4b2bf5..d23e05e0 100644 --- a/vendor/golang.org/x/net/html/parse.go +++ b/vendor/golang.org/x/net/html/parse.go @@ -25,20 +25,22 @@ type parser struct { hasSelfClosingToken bool // doc is the document root element. doc *Node - // The stack of open elements (section 12.2.3.2) and active formatting - // elements (section 12.2.3.3). + // The stack of open elements (section 12.2.4.2) and active formatting + // elements (section 12.2.4.3). oe, afe nodeStack - // Element pointers (section 12.2.3.4). + // Element pointers (section 12.2.4.4). head, form *Node - // Other parsing state flags (section 12.2.3.5). + // Other parsing state flags (section 12.2.4.5). scripting, framesetOK bool + // The stack of template insertion modes + templateStack insertionModeStack // im is the current insertion mode. im insertionMode // originalIM is the insertion mode to go back to after completing a text // or inTableText insertion mode. originalIM insertionMode // fosterParenting is whether new elements should be inserted according to - // the foster parenting rules (section 12.2.5.3). + // the foster parenting rules (section 12.2.6.1). fosterParenting bool // quirks is whether the parser is operating in "quirks mode." quirks bool @@ -56,7 +58,7 @@ func (p *parser) top() *Node { return p.doc } -// Stop tags for use in popUntil. These come from section 12.2.3.2. +// Stop tags for use in popUntil. These come from section 12.2.4.2. var ( defaultScopeStopTags = map[string][]a.Atom{ "": {a.Applet, a.Caption, a.Html, a.Table, a.Td, a.Th, a.Marquee, a.Object, a.Template}, @@ -79,7 +81,7 @@ const ( // popUntil pops the stack of open elements at the highest element whose tag // is in matchTags, provided there is no higher element in the scope's stop -// tags (as defined in section 12.2.3.2). It returns whether or not there was +// tags (as defined in section 12.2.4.2). It returns whether or not there was // such an element. If there was not, popUntil leaves the stack unchanged. // // For example, the set of stop tags for table scope is: "html", "table". If @@ -126,7 +128,7 @@ func (p *parser) indexOfElementInScope(s scope, matchTags ...a.Atom) int { return -1 } case tableScope: - if tagAtom == a.Html || tagAtom == a.Table { + if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { return -1 } case selectScope: @@ -162,17 +164,17 @@ func (p *parser) clearStackToContext(s scope) { tagAtom := p.oe[i].DataAtom switch s { case tableScope: - if tagAtom == a.Html || tagAtom == a.Table { + if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableRowScope: - if tagAtom == a.Html || tagAtom == a.Tr { + if tagAtom == a.Html || tagAtom == a.Tr || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableBodyScope: - if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead { + if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead || tagAtom == a.Template { p.oe = p.oe[:i+1] return } @@ -183,7 +185,7 @@ func (p *parser) clearStackToContext(s scope) { } // generateImpliedEndTags pops nodes off the stack of open elements as long as -// the top node has a tag name of dd, dt, li, option, optgroup, p, rp, or rt. +// the top node has a tag name of dd, dt, li, optgroup, option, p, rb, rp, rt or rtc. // If exceptions are specified, nodes with that name will not be popped off. func (p *parser) generateImpliedEndTags(exceptions ...string) { var i int @@ -192,7 +194,7 @@ loop: n := p.oe[i] if n.Type == ElementNode { switch n.DataAtom { - case a.Dd, a.Dt, a.Li, a.Option, a.Optgroup, a.P, a.Rp, a.Rt: + case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc: for _, except := range exceptions { if n.Data == except { break loop @@ -207,6 +209,27 @@ loop: p.oe = p.oe[:i+1] } +// generateAllImpliedEndTags pops nodes off the stack of open elements as long as +// the top node has a tag name of caption, colgroup, dd, div, dt, li, optgroup, option, p, rb, +// rp, rt, rtc, span, tbody, td, tfoot, th, thead or tr. +func (p *parser) generateAllImpliedEndTags() { + var i int + for i = len(p.oe) - 1; i >= 0; i-- { + n := p.oe[i] + if n.Type == ElementNode { + switch n.DataAtom { + // TODO: remove this divergence from the HTML5 spec + case a.Caption, a.Colgroup, a.Dd, a.Div, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, + a.Rp, a.Rt, a.Rtc, a.Span, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr: + continue + } + } + break + } + + p.oe = p.oe[:i+1] +} + // addChild adds a child node n to the top element, and pushes n onto the stack // of open elements if it is an element node. func (p *parser) addChild(n *Node) { @@ -234,9 +257,9 @@ func (p *parser) shouldFosterParent() bool { } // fosterParent adds a child node according to the foster parenting rules. -// Section 12.2.5.3, "foster parenting". +// Section 12.2.6.1, "foster parenting". func (p *parser) fosterParent(n *Node) { - var table, parent, prev *Node + var table, parent, prev, template *Node var i int for i = len(p.oe) - 1; i >= 0; i-- { if p.oe[i].DataAtom == a.Table { @@ -245,6 +268,19 @@ func (p *parser) fosterParent(n *Node) { } } + var j int + for j = len(p.oe) - 1; j >= 0; j-- { + if p.oe[j].DataAtom == a.Template { + template = p.oe[j] + break + } + } + + if template != nil && (table == nil || j < i) { + template.AppendChild(n) + return + } + if table == nil { // The foster parent is the html element. parent = p.oe[0] @@ -304,7 +340,7 @@ func (p *parser) addElement() { }) } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) addFormattingElement() { tagAtom, attr := p.tok.DataAtom, p.tok.Attr p.addElement() @@ -351,7 +387,7 @@ findIdenticalElements: p.afe = append(p.afe, p.top()) } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) clearActiveFormattingElements() { for { n := p.afe.pop() @@ -361,7 +397,7 @@ func (p *parser) clearActiveFormattingElements() { } } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) reconstructActiveFormattingElements() { n := p.afe.top() if n == nil { @@ -390,12 +426,12 @@ func (p *parser) reconstructActiveFormattingElements() { } } -// Section 12.2.4. +// Section 12.2.5. func (p *parser) acknowledgeSelfClosingTag() { p.hasSelfClosingToken = false } -// An insertion mode (section 12.2.3.1) is the state transition function from +// An insertion mode (section 12.2.4.1) is the state transition function from // a particular state in the HTML5 parser's state machine. It updates the // parser's fields depending on parser.tok (where ErrorToken means EOF). // It returns whether the token was consumed. @@ -403,7 +439,7 @@ type insertionMode func(*parser) bool // setOriginalIM sets the insertion mode to return to after completing a text or // inTableText insertion mode. -// Section 12.2.3.1, "using the rules for". +// Section 12.2.4.1, "using the rules for". func (p *parser) setOriginalIM() { if p.originalIM != nil { panic("html: bad parser state: originalIM was set twice") @@ -411,18 +447,38 @@ func (p *parser) setOriginalIM() { p.originalIM = p.im } -// Section 12.2.3.1, "reset the insertion mode". +// Section 12.2.4.1, "reset the insertion mode". func (p *parser) resetInsertionMode() { for i := len(p.oe) - 1; i >= 0; i-- { n := p.oe[i] - if i == 0 && p.context != nil { + last := i == 0 + if last && p.context != nil { n = p.context } switch n.DataAtom { case a.Select: + if !last { + for ancestor, first := n, p.oe[0]; ancestor != first; { + if ancestor == first { + break + } + ancestor = p.oe[p.oe.index(ancestor)-1] + switch ancestor.DataAtom { + case a.Template: + p.im = inSelectIM + return + case a.Table: + p.im = inSelectInTableIM + return + } + } + } p.im = inSelectIM case a.Td, a.Th: + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 p.im = inCellIM case a.Tr: p.im = inRowIM @@ -434,25 +490,37 @@ func (p *parser) resetInsertionMode() { p.im = inColumnGroupIM case a.Table: p.im = inTableIM + case a.Template: + p.im = p.templateStack.top() case a.Head: - p.im = inBodyIM + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 + p.im = inHeadIM case a.Body: p.im = inBodyIM case a.Frameset: p.im = inFramesetIM case a.Html: - p.im = beforeHeadIM + if p.head == nil { + p.im = beforeHeadIM + } else { + p.im = afterHeadIM + } default: + if last { + p.im = inBodyIM + return + } continue } return } - p.im = inBodyIM } const whitespace = " \t\r\n\f" -// Section 12.2.5.4.1. +// Section 12.2.6.4.1. func initialIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -479,7 +547,7 @@ func initialIM(p *parser) bool { return false } -// Section 12.2.5.4.2. +// Section 12.2.6.4.2. func beforeHTMLIM(p *parser) bool { switch p.tok.Type { case DoctypeToken: @@ -517,7 +585,7 @@ func beforeHTMLIM(p *parser) bool { return false } -// Section 12.2.5.4.3. +// Section 12.2.6.4.3. func beforeHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -560,7 +628,7 @@ func beforeHeadIM(p *parser) bool { return false } -// Section 12.2.5.4.4. +// Section 12.2.6.4.4. func inHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -590,19 +658,36 @@ func inHeadIM(p *parser) bool { case a.Head: // Ignore the token. return true + case a.Template: + p.addElement() + p.afe = append(p.afe, &scopeMarker) + p.framesetOK = false + p.im = inTemplateIM + p.templateStack = append(p.templateStack, inTemplateIM) + return true } case EndTagToken: switch p.tok.DataAtom { case a.Head: - n := p.oe.pop() - if n.DataAtom != a.Head { - panic("html: bad parser state: element not found, in the in-head insertion mode") - } + p.oe.pop() p.im = afterHeadIM return true case a.Body, a.Html, a.Br: p.parseImpliedToken(EndTagToken, a.Head, a.Head.String()) return false + case a.Template: + if !p.oe.contains(a.Template) { + return true + } + p.generateAllImpliedEndTags() + if n := p.oe.top(); n.DataAtom != a.Template { + return true + } + p.popUntil(defaultScope, a.Template) + p.clearActiveFormattingElements() + p.templateStack.pop() + p.resetInsertionMode() + return true default: // Ignore the token. return true @@ -622,7 +707,7 @@ func inHeadIM(p *parser) bool { return false } -// Section 12.2.5.4.6. +// Section 12.2.6.4.6. func afterHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -648,7 +733,7 @@ func afterHeadIM(p *parser) bool { p.addElement() p.im = inFramesetIM return true - case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title: + case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title: p.oe = append(p.oe, p.head) defer p.oe.remove(p.head) return inHeadIM(p) @@ -660,6 +745,8 @@ func afterHeadIM(p *parser) bool { switch p.tok.DataAtom { case a.Body, a.Html, a.Br: // Drop down to creating an implied tag. + case a.Template: + return inHeadIM(p) default: // Ignore the token. return true @@ -697,7 +784,7 @@ func copyAttributes(dst *Node, src Token) { } } -// Section 12.2.5.4.7. +// Section 12.2.6.4.7. func inBodyIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -727,10 +814,16 @@ func inBodyIM(p *parser) bool { case StartTagToken: switch p.tok.DataAtom { case a.Html: + if p.oe.contains(a.Template) { + return true + } copyAttributes(p.oe[0], p.tok) - case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title: + case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title: return inHeadIM(p) case a.Body: + if p.oe.contains(a.Template) { + return true + } if len(p.oe) >= 2 { body := p.oe[1] if body.Type == ElementNode && body.DataAtom == a.Body { @@ -767,7 +860,7 @@ func inBodyIM(p *parser) bool { // The newline, if any, will be dealt with by the TextToken case. p.framesetOK = false case a.Form: - if p.form == nil { + if p.oe.contains(a.Template) || p.form == nil { p.popUntil(buttonScope, a.P) p.addElement() p.form = p.top() @@ -952,11 +1045,16 @@ func inBodyIM(p *parser) bool { } p.reconstructActiveFormattingElements() p.addElement() - case a.Rp, a.Rt: + case a.Rb, a.Rtc: if p.elementInScope(defaultScope, a.Ruby) { p.generateImpliedEndTags() } p.addElement() + case a.Rp, a.Rt: + if p.elementInScope(defaultScope, a.Ruby) { + p.generateImpliedEndTags("rtc") + } + p.addElement() case a.Math, a.Svg: p.reconstructActiveFormattingElements() if p.tok.DataAtom == a.Math { @@ -972,7 +1070,13 @@ func inBodyIM(p *parser) bool { p.acknowledgeSelfClosingTag() } return true - case a.Caption, a.Col, a.Colgroup, a.Frame, a.Head, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr: + case a.Frame: + // TODO: remove this divergence from the HTML5 spec. + if p.oe.contains(a.Template) { + p.addElement() + return true + } + case a.Caption, a.Col, a.Colgroup, a.Head, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr: // Ignore the token. default: p.reconstructActiveFormattingElements() @@ -993,15 +1097,28 @@ func inBodyIM(p *parser) bool { case a.Address, a.Article, a.Aside, a.Blockquote, a.Button, a.Center, a.Details, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Listing, a.Menu, a.Nav, a.Ol, a.Pre, a.Section, a.Summary, a.Ul: p.popUntil(defaultScope, p.tok.DataAtom) case a.Form: - node := p.form - p.form = nil - i := p.indexOfElementInScope(defaultScope, a.Form) - if node == nil || i == -1 || p.oe[i] != node { - // Ignore the token. - return true + if p.oe.contains(a.Template) { + if !p.oe.contains(a.Form) { + // Ignore the token. + return true + } + p.generateImpliedEndTags() + if p.tok.DataAtom == a.Form { + // Ignore the token. + return true + } + p.popUntil(defaultScope, a.Form) + } else { + node := p.form + p.form = nil + i := p.indexOfElementInScope(defaultScope, a.Form) + if node == nil || i == -1 || p.oe[i] != node { + // Ignore the token. + return true + } + p.generateImpliedEndTags() + p.oe.remove(node) } - p.generateImpliedEndTags() - p.oe.remove(node) case a.P: if !p.elementInScope(buttonScope, a.P) { p.parseImpliedToken(StartTagToken, a.P, a.P.String()) @@ -1022,6 +1139,8 @@ func inBodyIM(p *parser) bool { case a.Br: p.tok.Type = StartTagToken return false + case a.Template: + return inHeadIM(p) default: p.inBodyEndTagOther(p.tok.DataAtom) } @@ -1030,6 +1149,21 @@ func inBodyIM(p *parser) bool { Type: CommentNode, Data: p.tok.Data, }) + case ErrorToken: + // TODO: remove this divergence from the HTML5 spec. + if len(p.templateStack) > 0 { + p.im = inTemplateIM + return false + } else { + for _, e := range p.oe { + switch e.DataAtom { + case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc, a.Tbody, a.Td, a.Tfoot, a.Th, + a.Thead, a.Tr, a.Body, a.Html: + default: + return true + } + } + } } return true @@ -1135,6 +1269,12 @@ func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) { switch commonAncestor.DataAtom { case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr: p.fosterParent(lastNode) + case a.Template: + // TODO: remove namespace checking + if commonAncestor.Namespace == "html" { + commonAncestor = commonAncestor.LastChild + } + fallthrough default: commonAncestor.AppendChild(lastNode) } @@ -1160,7 +1300,7 @@ func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) { } // inBodyEndTagOther performs the "any other end tag" algorithm for inBodyIM. -// "Any other end tag" handling from 12.2.5.5 The rules for parsing tokens in foreign content +// "Any other end tag" handling from 12.2.6.5 The rules for parsing tokens in foreign content // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inforeign func (p *parser) inBodyEndTagOther(tagAtom a.Atom) { for i := len(p.oe) - 1; i >= 0; i-- { @@ -1174,7 +1314,7 @@ func (p *parser) inBodyEndTagOther(tagAtom a.Atom) { } } -// Section 12.2.5.4.8. +// Section 12.2.6.4.8. func textIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1203,7 +1343,7 @@ func textIM(p *parser) bool { return p.tok.Type == EndTagToken } -// Section 12.2.5.4.9. +// Section 12.2.6.4.9. func inTableIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1249,7 +1389,7 @@ func inTableIM(p *parser) bool { } // Ignore the token. return true - case a.Style, a.Script: + case a.Style, a.Script, a.Template: return inHeadIM(p) case a.Input: for _, t := range p.tok.Attr { @@ -1261,7 +1401,7 @@ func inTableIM(p *parser) bool { } // Otherwise drop down to the default action. case a.Form: - if p.form != nil { + if p.oe.contains(a.Template) || p.form != nil { // Ignore the token. return true } @@ -1291,6 +1431,8 @@ func inTableIM(p *parser) bool { case a.Body, a.Caption, a.Col, a.Colgroup, a.Html, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr: // Ignore the token. return true + case a.Template: + return inHeadIM(p) } case CommentToken: p.addChild(&Node{ @@ -1309,7 +1451,7 @@ func inTableIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.11. +// Section 12.2.6.4.11. func inCaptionIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1355,7 +1497,7 @@ func inCaptionIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.12. +// Section 12.2.6.4.12. func inColumnGroupIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -1386,11 +1528,13 @@ func inColumnGroupIM(p *parser) bool { p.oe.pop() p.acknowledgeSelfClosingTag() return true + case a.Template: + return inHeadIM(p) } case EndTagToken: switch p.tok.DataAtom { case a.Colgroup: - if p.oe.top().DataAtom != a.Html { + if p.oe.top().DataAtom == a.Colgroup { p.oe.pop() p.im = inTableIM } @@ -1398,17 +1542,19 @@ func inColumnGroupIM(p *parser) bool { case a.Col: // Ignore the token. return true + case a.Template: + return inHeadIM(p) } } - if p.oe.top().DataAtom != a.Html { - p.oe.pop() - p.im = inTableIM - return false + if p.oe.top().DataAtom != a.Colgroup { + return true } - return true + p.oe.pop() + p.im = inTableIM + return false } -// Section 12.2.5.4.13. +// Section 12.2.6.4.13. func inTableBodyIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1460,7 +1606,7 @@ func inTableBodyIM(p *parser) bool { return inTableIM(p) } -// Section 12.2.5.4.14. +// Section 12.2.6.4.14. func inRowIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1511,7 +1657,7 @@ func inRowIM(p *parser) bool { return inTableIM(p) } -// Section 12.2.5.4.15. +// Section 12.2.6.4.15. func inCellIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1560,7 +1706,7 @@ func inCellIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.16. +// Section 12.2.6.4.16. func inSelectIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1597,7 +1743,7 @@ func inSelectIM(p *parser) bool { p.tokenizer.NextIsNotRawText() // Ignore the token. return true - case a.Script: + case a.Script, a.Template: return inHeadIM(p) } case EndTagToken: @@ -1618,6 +1764,8 @@ func inSelectIM(p *parser) bool { if p.popUntil(selectScope, a.Select) { p.resetInsertionMode() } + case a.Template: + return inHeadIM(p) } case CommentToken: p.addChild(&Node{ @@ -1632,7 +1780,7 @@ func inSelectIM(p *parser) bool { return true } -// Section 12.2.5.4.17. +// Section 12.2.6.4.17. func inSelectInTableIM(p *parser) bool { switch p.tok.Type { case StartTagToken, EndTagToken: @@ -1650,7 +1798,62 @@ func inSelectInTableIM(p *parser) bool { return inSelectIM(p) } -// Section 12.2.5.4.18. +// Section 12.2.6.4.18. +func inTemplateIM(p *parser) bool { + switch p.tok.Type { + case TextToken, CommentToken, DoctypeToken: + return inBodyIM(p) + case StartTagToken: + switch p.tok.DataAtom { + case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title: + return inHeadIM(p) + case a.Caption, a.Colgroup, a.Tbody, a.Tfoot, a.Thead: + p.templateStack.pop() + p.templateStack = append(p.templateStack, inTableIM) + p.im = inTableIM + return false + case a.Col: + p.templateStack.pop() + p.templateStack = append(p.templateStack, inColumnGroupIM) + p.im = inColumnGroupIM + return false + case a.Tr: + p.templateStack.pop() + p.templateStack = append(p.templateStack, inTableBodyIM) + p.im = inTableBodyIM + return false + case a.Td, a.Th: + p.templateStack.pop() + p.templateStack = append(p.templateStack, inRowIM) + p.im = inRowIM + return false + default: + p.templateStack.pop() + p.templateStack = append(p.templateStack, inBodyIM) + p.im = inBodyIM + return false + } + case EndTagToken: + switch p.tok.DataAtom { + case a.Template: + return inHeadIM(p) + default: + // Ignore the token. + return true + } + } + if !p.oe.contains(a.Template) { + // Ignore the token. + return true + } + p.popUntil(defaultScope, a.Template) + p.clearActiveFormattingElements() + p.templateStack.pop() + p.resetInsertionMode() + return false +} + +// Section 12.2.6.4.19. func afterBodyIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1688,7 +1891,7 @@ func afterBodyIM(p *parser) bool { return false } -// Section 12.2.5.4.19. +// Section 12.2.6.4.20. func inFramesetIM(p *parser) bool { switch p.tok.Type { case CommentToken: @@ -1720,6 +1923,11 @@ func inFramesetIM(p *parser) bool { p.acknowledgeSelfClosingTag() case a.Noframes: return inHeadIM(p) + case a.Template: + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 + return inTemplateIM(p) } case EndTagToken: switch p.tok.DataAtom { @@ -1738,7 +1946,7 @@ func inFramesetIM(p *parser) bool { return true } -// Section 12.2.5.4.20. +// Section 12.2.6.4.21. func afterFramesetIM(p *parser) bool { switch p.tok.Type { case CommentToken: @@ -1777,7 +1985,7 @@ func afterFramesetIM(p *parser) bool { return true } -// Section 12.2.5.4.21. +// Section 12.2.6.4.22. func afterAfterBodyIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1806,7 +2014,7 @@ func afterAfterBodyIM(p *parser) bool { return false } -// Section 12.2.5.4.22. +// Section 12.2.6.4.23. func afterAfterFramesetIM(p *parser) bool { switch p.tok.Type { case CommentToken: @@ -1844,7 +2052,7 @@ func afterAfterFramesetIM(p *parser) bool { const whitespaceOrNUL = whitespace + "\x00" -// Section 12.2.5.5. +// Section 12.2.6.5 func parseForeignContent(p *parser) bool { switch p.tok.Type { case TextToken: @@ -1924,7 +2132,7 @@ func parseForeignContent(p *parser) bool { return true } -// Section 12.2.5. +// Section 12.2.6. func (p *parser) inForeignContent() bool { if len(p.oe) == 0 { return false @@ -2064,6 +2272,9 @@ func ParseFragment(r io.Reader, context *Node) ([]*Node, error) { } p.doc.AppendChild(root) p.oe = nodeStack{root} + if context != nil && context.DataAtom == a.Template { + p.templateStack = append(p.templateStack, inTemplateIM) + } p.resetInsertionMode() for n := context; n != nil; n = n.Parent { diff --git a/vendor/golang.org/x/net/html/parse_test.go b/vendor/golang.org/x/net/html/parse_test.go index 7e47d11b..89d96426 100644 --- a/vendor/golang.org/x/net/html/parse_test.go +++ b/vendor/golang.org/x/net/html/parse_test.go @@ -125,6 +125,7 @@ func (a sortedAttributes) Swap(i, j int) { func dumpLevel(w io.Writer, n *Node, level int) error { dumpIndent(w, level) + level++ switch n.Type { case ErrorNode: return errors.New("unexpected ErrorNode") @@ -140,13 +141,19 @@ func dumpLevel(w io.Writer, n *Node, level int) error { sort.Sort(attr) for _, a := range attr { io.WriteString(w, "\n") - dumpIndent(w, level+1) + dumpIndent(w, level) if a.Namespace != "" { fmt.Fprintf(w, `%s %s="%s"`, a.Namespace, a.Key, a.Val) } else { fmt.Fprintf(w, `%s="%s"`, a.Key, a.Val) } } + if n.Namespace == "" && n.DataAtom == atom.Template { + io.WriteString(w, "\n") + dumpIndent(w, level) + level++ + io.WriteString(w, "content") + } case TextNode: fmt.Fprintf(w, `"%s"`, n.Data) case CommentNode: @@ -176,7 +183,7 @@ func dumpLevel(w io.Writer, n *Node, level int) error { } io.WriteString(w, "\n") for c := n.FirstChild; c != nil; c = c.NextSibling { - if err := dumpLevel(w, c, level+1); err != nil { + if err := dumpLevel(w, c, level); err != nil { return err } } @@ -373,6 +380,11 @@ func TestNodeConsistency(t *testing.T) { } } +func TestParseFragmentWithNilContext(t *testing.T) { + // This shouldn't panic. + ParseFragment(strings.NewReader("

hello

"), nil) +} + func BenchmarkParser(b *testing.B) { buf, err := ioutil.ReadFile("testdata/go1.html") if err != nil { diff --git a/vendor/golang.org/x/net/html/testdata/webkit/ruby.dat b/vendor/golang.org/x/net/html/testdata/webkit/ruby.dat new file mode 100644 index 00000000..1ca8016c --- /dev/null +++ b/vendor/golang.org/x/net/html/testdata/webkit/ruby.dat @@ -0,0 +1,298 @@ +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +abcd +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| +| "c" +| +| "d" + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| "a" +| +| "b" +| + +#data +ab +#errors +(1,6): expected-doctype-but-got-start-tag +#document +| +| +| +| +| +| +| "a" +| +| "b" +| diff --git a/vendor/golang.org/x/net/html/testdata/webkit/template.dat b/vendor/golang.org/x/net/html/testdata/webkit/template.dat new file mode 100644 index 00000000..e25f690c --- /dev/null +++ b/vendor/golang.org/x/net/html/testdata/webkit/template.dat @@ -0,0 +1,1117 @@ +#data + +#errors +#document +| +| +| +|